bug 681884 - faster slow path of GC allocations. r=wmccloskey
authorIgor Bukanov <igor@mir2.org>
Thu, 18 Aug 2011 09:16:08 +0200
changeset 76466 60df75bc14281f6f474db428155e3bcc6adc7441
parent 76465 30ffa45f9a63c5e770691bc26c66494c74009773
child 76467 d6f8a08a4c85a062f28efe121652a87fab971977
push id3
push userfelipc@gmail.com
push dateFri, 30 Sep 2011 20:09:13 +0000
reviewerswmccloskey
bugs681884
milestone9.0a1
bug 681884 - faster slow path of GC allocations. r=wmccloskey
js/src/jsarray.cpp
js/src/jscell.h
js/src/jscntxt.h
js/src/jscompartment.cpp
js/src/jscompartment.h
js/src/jsemit.cpp
js/src/jsfun.cpp
js/src/jsgc.cpp
js/src/jsgc.h
js/src/jsgcinlines.h
js/src/jsgcstats.cpp
js/src/jsgcstats.h
js/src/jsinfer.cpp
js/src/jsinfer.h
js/src/jsinterp.cpp
js/src/jsobj.cpp
js/src/jsobj.h
js/src/jsobjinlines.h
js/src/jsproxy.cpp
js/src/jsscopeinlines.h
js/src/methodjit/BaseAssembler.h
js/src/methodjit/StubCalls.cpp
js/src/vm/String-inl.h
js/src/vm/String.cpp
js/src/xpconnect/src/xpcjsruntime.cpp
--- a/js/src/jsarray.cpp
+++ b/js/src/jsarray.cpp
@@ -1063,17 +1063,17 @@ JSObject::makeDenseArraySlow(JSContext *
     /*
      * Save old map now, before calling InitScopeForObject. We'll have to undo
      * on error. This is gross, but a better way is not obvious. Note: the
      * exact contents of the array are not preserved on error.
      */
     js::Shape *oldMap = lastProp;
 
     /* Create a native scope. */
-    js::gc::FinalizeKind kind = js::gc::FinalizeKind(arenaHeader()->getThingKind());
+    gc::AllocKind kind = getAllocKind();
     if (!InitScopeForObject(cx, this, &js_SlowArrayClass, getProto()->getNewType(cx), kind))
         return false;
 
     backfillDenseArrayHoles(cx);
 
     uint32 arrayCapacity = getDenseArrayCapacity();
     uint32 arrayInitialized = getDenseArrayInitializedLength();
 
@@ -3251,17 +3251,17 @@ js_InitArrayClass(JSContext *cx, JSObjec
 namespace js {
 
 template<bool allocateCapacity>
 static JS_ALWAYS_INLINE JSObject *
 NewArray(JSContext *cx, jsuint length, JSObject *proto)
 {
     JS_ASSERT_IF(proto, proto->isArray());
 
-    gc::FinalizeKind kind = GuessObjectGCKind(length, true);
+    gc::AllocKind kind = GuessObjectGCKind(length, true);
     JSObject *obj = detail::NewObject<WithProto::Class, false>(cx, &js_ArrayClass, proto, NULL, kind);
     if (!obj)
         return NULL;
 
     obj->setArrayLength(cx, length);
 
     if (!cx->typeInferenceEnabled()) {
         obj->markDenseArrayNotPacked(cx);
--- a/js/src/jscell.h
+++ b/js/src/jscell.h
@@ -45,16 +45,47 @@
 struct JSCompartment;
 
 namespace js {
 namespace gc {
 
 struct ArenaHeader;
 struct Chunk;
 
+/* The GC allocation kinds. */
+enum AllocKind {
+    FINALIZE_OBJECT0,
+    FINALIZE_OBJECT0_BACKGROUND,
+    FINALIZE_OBJECT2,
+    FINALIZE_OBJECT2_BACKGROUND,
+    FINALIZE_OBJECT4,
+    FINALIZE_OBJECT4_BACKGROUND,
+    FINALIZE_OBJECT8,
+    FINALIZE_OBJECT8_BACKGROUND,
+    FINALIZE_OBJECT12,
+    FINALIZE_OBJECT12_BACKGROUND,
+    FINALIZE_OBJECT16,
+    FINALIZE_OBJECT16_BACKGROUND,
+    FINALIZE_OBJECT_LAST = FINALIZE_OBJECT16_BACKGROUND,
+    FINALIZE_FUNCTION,
+    FINALIZE_FUNCTION_AND_OBJECT_LAST = FINALIZE_FUNCTION,
+    FINALIZE_SCRIPT,
+    FINALIZE_SHAPE,
+    FINALIZE_TYPE_OBJECT,
+#if JS_HAS_XML_SUPPORT
+    FINALIZE_XML,
+#endif
+    FINALIZE_SHORT_STRING,
+    FINALIZE_STRING,
+    FINALIZE_EXTERNAL_STRING,
+    FINALIZE_LAST = FINALIZE_EXTERNAL_STRING
+};
+
+const size_t FINALIZE_LIMIT = FINALIZE_LAST + 1;
+
 /*
  * Live objects are marked black. How many other additional colors are available
  * depends on the size of the GCThing.
  */
 static const uint32 BLACK = 0;
 
 /*
  * A GC cell is the base class for all GC things.
@@ -62,16 +93,17 @@ static const uint32 BLACK = 0;
 struct Cell {
     static const size_t CellShift = 3;
     static const size_t CellSize = size_t(1) << CellShift;
     static const size_t CellMask = CellSize - 1;
 
     inline uintptr_t address() const;
     inline ArenaHeader *arenaHeader() const;
     inline Chunk *chunk() const;
+    inline AllocKind getAllocKind() const;
 
     JS_ALWAYS_INLINE bool isMarked(uint32 color = BLACK) const;
     JS_ALWAYS_INLINE bool markIfUnmarked(uint32 color = BLACK) const;
     JS_ALWAYS_INLINE void unmark(uint32 color) const;
 
     inline JSCompartment *compartment() const;
 
 #ifdef DEBUG
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -422,17 +422,17 @@ struct JSRuntime {
     size_t              gcMaxMallocBytes;
     uint32              gcEmptyArenaPoolLifespan;
     uint32              gcNumber;
     js::GCMarker        *gcMarkingTracer;
     bool                gcChunkAllocationSinceLastGC;
     int64               gcNextFullGCTime;
     int64               gcJitReleaseTime;
     JSGCMode            gcMode;
-    volatile bool       gcIsNeeded;
+    volatile jsuword    gcIsNeeded;
     js::WeakMapBase     *gcWeakMapList;
 
     /* Pre-allocated space for the GC mark stacks. Pointer type ensures alignment. */
     void                *gcMarkStackObjs[js::OBJECT_MARK_STACK_SIZE / sizeof(void *)];
     void                *gcMarkStackRopes[js::ROPES_MARK_STACK_SIZE / sizeof(void *)];
     void                *gcMarkStackTypes[js::TYPE_MARK_STACK_SIZE / sizeof(void *)];
     void                *gcMarkStackXMLs[js::XML_MARK_STACK_SIZE / sizeof(void *)];
     void                *gcMarkStackLarges[js::LARGE_MARK_STACK_SIZE / sizeof(void *)];
@@ -1789,27 +1789,43 @@ class AutoXMLRooter : private AutoGCRoot
   private:
     JSXML * const xml;
     JS_DECL_USE_GUARD_OBJECT_NOTIFIER
 };
 #endif /* JS_HAS_XML_SUPPORT */
 
 class AutoLockGC {
   public:
-    explicit AutoLockGC(JSRuntime *rt
+    explicit AutoLockGC(JSRuntime *rt = NULL
                         JS_GUARD_OBJECT_NOTIFIER_PARAM)
-      : rt(rt)
+      : runtime(rt)
     {
         JS_GUARD_OBJECT_NOTIFIER_INIT;
+        if (rt)
+            JS_LOCK_GC(rt);
+    }
+
+    bool locked() const {
+        return !!runtime;
+    }
+
+    void lock(JSRuntime *rt) {
+        JS_ASSERT(rt);
+        JS_ASSERT(!runtime);
+        runtime = rt;
         JS_LOCK_GC(rt);
     }
-    ~AutoLockGC() { JS_UNLOCK_GC(rt); }
+
+    ~AutoLockGC() {
+        if (runtime)
+            JS_UNLOCK_GC(runtime);
+    }
 
   private:
-    JSRuntime *rt;
+    JSRuntime *runtime;
     JS_DECL_USE_GUARD_OBJECT_NOTIFIER
 };
 
 class AutoUnlockGC {
   private:
     JSRuntime *rt;
     JS_DECL_USE_GUARD_OBJECT_NOTIFIER
 
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -124,28 +124,24 @@ JSCompartment::~JSCompartment()
     for (size_t i = 0; i != JS_ARRAY_LENGTH(evalCache); ++i)
         JS_ASSERT(!evalCache[i]);
 #endif
 }
 
 bool
 JSCompartment::init(JSContext *cx)
 {
-    for (unsigned i = 0; i < FINALIZE_LIMIT; i++)
-        arenas[i].init();
-
     activeAnalysis = activeInference = false;
     types.init(cx);
 
     /* Duplicated from jscntxt.cpp. :XXX: bug 675150 fix hack. */
     static const size_t ARENA_HEADER_SIZE_HACK = 40;
 
     JS_InitArenaPool(&pool, "analysis", 4096 - ARENA_HEADER_SIZE_HACK, 8);
 
-    freeLists.init();
     if (!crossCompartmentWrappers.init())
         return false;
 
     if (!scriptFilenameTable.init())
         return false;
 
     regExpAllocator = rt->new_<WTF::BumpPointerAllocator>();
     if (!regExpAllocator)
@@ -183,26 +179,16 @@ JSCompartment::getMjitCodeStats(size_t& 
     } else {
         method = 0;
         regexp = 0;
         unused = 0;
     }
 }
 #endif
 
-bool
-JSCompartment::arenaListsAreEmpty()
-{
-  for (unsigned i = 0; i < FINALIZE_LIMIT; i++) {
-       if (!arenas[i].isEmpty())
-           return false;
-  }
-  return true;
-}
-
 static bool
 IsCrossCompartmentWrapper(JSObject *wrapper)
 {
     return wrapper->isWrapper() &&
            !!(JSWrapper::wrapperHandler(wrapper)->flags() & JSWrapper::CROSS_COMPARTMENT);
 }
 
 bool
@@ -500,20 +486,20 @@ JSCompartment::markTypes(JSTracer *trc)
      */
     JS_ASSERT(activeAnalysis);
 
     for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
         JSScript *script = i.get<JSScript>();
         MarkScript(trc, script, "mark_types_script");
     }
 
-    for (unsigned thingKind = FINALIZE_OBJECT0;
+    for (size_t thingKind = FINALIZE_OBJECT0;
          thingKind <= FINALIZE_FUNCTION_AND_OBJECT_LAST;
          thingKind++) {
-        for (CellIterUnderGC i(this, FinalizeKind(thingKind)); !i.done(); i.next()) {
+        for (CellIterUnderGC i(this, AllocKind(thingKind)); !i.done(); i.next()) {
             JSObject *object = i.get<JSObject>();
             if (!object->isNewborn() && object->hasSingletonType())
                 MarkObject(trc, *object, "mark_types_singleton");
         }
     }
 
     for (CellIterUnderGC i(this, FINALIZE_TYPE_OBJECT); !i.done(); i.next())
         MarkTypeObject(trc, i.get<types::TypeObject>(), "mark_types_scan");
@@ -647,17 +633,17 @@ JSCompartment::sweep(JSContext *cx, uint
     }
 
     active = false;
 }
 
 void
 JSCompartment::purge(JSContext *cx)
 {
-    freeLists.purge();
+    arenas.purge();
     dtoaCache.purge();
 
     /*
      * Clear the hash and reset all evalHashLink to null before the GC. This
      * way MarkChildren(trc, JSScript *) can assume that JSScript::u.object is
      * not null when we have script owned by an object and not from the eval
      * cache.
      */
--- a/js/src/jscompartment.h
+++ b/js/src/jscompartment.h
@@ -389,18 +389,17 @@ typedef HashSet<ScriptFilenameEntry *,
                 SystemAllocPolicy> ScriptFilenameTable;
 
 } /* namespace js */
 
 struct JS_FRIEND_API(JSCompartment) {
     JSRuntime                    *rt;
     JSPrincipals                 *principals;
 
-    js::gc::ArenaList            arenas[js::gc::FINALIZE_LIMIT];
-    js::gc::FreeLists            freeLists;
+    js::gc::ArenaLists           arenas;
 
     uint32                       gcBytes;
     uint32                       gcTriggerBytes;
     size_t                       gcLastBytes;
 
     bool                         hold;
     bool                         isSystemCompartment;
 
@@ -530,22 +529,16 @@ struct JS_FRIEND_API(JSCompartment) {
     bool wrap(JSContext *cx, js::PropertyOp *op);
     bool wrap(JSContext *cx, js::StrictPropertyOp *op);
     bool wrap(JSContext *cx, js::PropertyDescriptor *desc);
     bool wrap(JSContext *cx, js::AutoIdVector &props);
 
     void markTypes(JSTracer *trc);
     void sweep(JSContext *cx, uint32 releaseInterval);
     void purge(JSContext *cx);
-    void finishArenaLists();
-    void finalizeObjectArenaLists(JSContext *cx);
-    void finalizeStringArenaLists(JSContext *cx);
-    void finalizeShapeArenaLists(JSContext *cx);
-    void finalizeScriptArenaLists(JSContext *cx);
-    bool arenaListsAreEmpty();
 
     void setGCLastBytes(size_t lastBytes, JSGCInvocationKind gckind);
     void reduceGCTriggerBytes(uint32 amount);
 
     js::DtoaCache dtoaCache;
 
   private:
     js::MathCache                *mathCache;
--- a/js/src/jsemit.cpp
+++ b/js/src/jsemit.cpp
@@ -4886,17 +4886,17 @@ JSParseNode::getConstantValue(JSContext 
 
         types::FixArrayType(cx, obj);
         vp->setObject(*obj);
         return true;
       }
       case TOK_RC: {
         JS_ASSERT((pn_op == JSOP_NEWINIT) && !(pn_xflags & PNX_NONCONST));
 
-        gc::FinalizeKind kind = GuessObjectGCKind(pn_count, false);
+        gc::AllocKind kind = GuessObjectGCKind(pn_count, false);
         JSObject *obj = NewBuiltinClassInstance(cx, &js_ObjectClass, kind);
         if (!obj)
             return false;
 
         for (JSParseNode *pn = pn_head; pn; pn = pn->pn_next) {
             Value value;
             if (!pn->pn_right->getConstantValue(cx, strictChecks, &value))
                 return false;
@@ -7079,17 +7079,17 @@ js_EmitTree(JSContext *cx, JSCodeGenerat
             return JS_FALSE;
 
         /*
          * Try to construct the shape of the object as we go, so we can emit a
          * JSOP_NEWOBJECT with the final shape instead.
          */
         JSObject *obj = NULL;
         if (!cg->hasSharps() && cg->compileAndGo()) {
-            gc::FinalizeKind kind = GuessObjectGCKind(pn->pn_count, false);
+            gc::AllocKind kind = GuessObjectGCKind(pn->pn_count, false);
             obj = NewBuiltinClassInstance(cx, &js_ObjectClass, kind);
             if (!obj)
                 return JS_FALSE;
         }
 
         uintN methodInits = 0, slowMethodInits = 0;
         for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) {
             /* Emit an index for t[2] for later consumption by JSOP_INITELEM. */
--- a/js/src/jsfun.cpp
+++ b/js/src/jsfun.cpp
@@ -768,17 +768,17 @@ Class js_DeclEnvClass = {
  * must be null.
  */
 static JSObject *
 NewCallObject(JSContext *cx, JSScript *script, JSObject &scopeChain, JSObject *callee)
 {
     Bindings &bindings = script->bindings;
     size_t argsVars = bindings.countArgsAndVars();
     size_t slots = JSObject::CALL_RESERVED_SLOTS + argsVars;
-    gc::FinalizeKind kind = gc::GetGCObjectKind(slots);
+    gc::AllocKind kind = gc::GetGCObjectKind(slots);
 
     JSObject *callobj = js_NewGCObject(cx, kind);
     if (!callobj)
         return NULL;
 
     /* Init immediately to avoid GC seeing a half-init'ed object. */
     callobj->initCall(cx, bindings, &scopeChain);
     callobj->makeVarObj();
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -105,27 +105,27 @@
 
 using namespace js;
 using namespace js::gc;
 
 namespace js {
 namespace gc {
 
 /* This array should be const, but that doesn't link right under GCC. */
-FinalizeKind slotsToThingKind[] = {
+AllocKind slotsToThingKind[] = {
     /* 0 */  FINALIZE_OBJECT0,  FINALIZE_OBJECT2,  FINALIZE_OBJECT2,  FINALIZE_OBJECT4,
     /* 4 */  FINALIZE_OBJECT4,  FINALIZE_OBJECT8,  FINALIZE_OBJECT8,  FINALIZE_OBJECT8,
     /* 8 */  FINALIZE_OBJECT8,  FINALIZE_OBJECT12, FINALIZE_OBJECT12, FINALIZE_OBJECT12,
     /* 12 */ FINALIZE_OBJECT12, FINALIZE_OBJECT16, FINALIZE_OBJECT16, FINALIZE_OBJECT16,
     /* 16 */ FINALIZE_OBJECT16
 };
 
 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(slotsToThingKind) == SLOTS_TO_THING_KIND_LIMIT);
 
-const uint8 GCThingSizeMap[] = {
+const uint32 Arena::ThingSizes[] = {
     sizeof(JSObject),           /* FINALIZE_OBJECT0             */
     sizeof(JSObject),           /* FINALIZE_OBJECT0_BACKGROUND  */
     sizeof(JSObject_Slots2),    /* FINALIZE_OBJECT2             */
     sizeof(JSObject_Slots2),    /* FINALIZE_OBJECT2_BACKGROUND  */
     sizeof(JSObject_Slots4),    /* FINALIZE_OBJECT4             */
     sizeof(JSObject_Slots4),    /* FINALIZE_OBJECT4_BACKGROUND  */
     sizeof(JSObject_Slots8),    /* FINALIZE_OBJECT8             */
     sizeof(JSObject_Slots8),    /* FINALIZE_OBJECT8_BACKGROUND  */
@@ -140,17 +140,44 @@ const uint8 GCThingSizeMap[] = {
 #if JS_HAS_XML_SUPPORT
     sizeof(JSXML),              /* FINALIZE_XML                 */
 #endif
     sizeof(JSShortString),      /* FINALIZE_SHORT_STRING        */
     sizeof(JSString),           /* FINALIZE_STRING              */
     sizeof(JSExternalString),   /* FINALIZE_EXTERNAL_STRING     */
 };
 
-JS_STATIC_ASSERT(JS_ARRAY_LENGTH(GCThingSizeMap) == FINALIZE_LIMIT);
+#define OFFSET(type) uint32(sizeof(ArenaHeader) + (ArenaSize - sizeof(ArenaHeader)) % sizeof(type))
+
+const uint32 Arena::FirstThingOffsets[] = {
+    OFFSET(JSObject),           /* FINALIZE_OBJECT0             */
+    OFFSET(JSObject),           /* FINALIZE_OBJECT0_BACKGROUND  */
+    OFFSET(JSObject_Slots2),    /* FINALIZE_OBJECT2             */
+    OFFSET(JSObject_Slots2),    /* FINALIZE_OBJECT2_BACKGROUND  */
+    OFFSET(JSObject_Slots4),    /* FINALIZE_OBJECT4             */
+    OFFSET(JSObject_Slots4),    /* FINALIZE_OBJECT4_BACKGROUND  */
+    OFFSET(JSObject_Slots8),    /* FINALIZE_OBJECT8             */
+    OFFSET(JSObject_Slots8),    /* FINALIZE_OBJECT8_BACKGROUND  */
+    OFFSET(JSObject_Slots12),   /* FINALIZE_OBJECT12            */
+    OFFSET(JSObject_Slots12),   /* FINALIZE_OBJECT12_BACKGROUND */
+    OFFSET(JSObject_Slots16),   /* FINALIZE_OBJECT16            */
+    OFFSET(JSObject_Slots16),   /* FINALIZE_OBJECT16_BACKGROUND */
+    OFFSET(JSFunction),         /* FINALIZE_FUNCTION            */
+    OFFSET(JSScript),           /* FINALIZE_SCRIPT              */
+    OFFSET(Shape),              /* FINALIZE_SHAPE               */
+    OFFSET(types::TypeObject),  /* FINALIZE_TYPE_OBJECT         */
+#if JS_HAS_XML_SUPPORT
+    OFFSET(JSXML),              /* FINALIZE_XML                 */
+#endif
+    OFFSET(JSShortString),      /* FINALIZE_SHORT_STRING        */
+    OFFSET(JSString),           /* FINALIZE_STRING              */
+    OFFSET(JSExternalString),   /* FINALIZE_EXTERNAL_STRING     */
+};
+
+#undef OFFSET
 
 #ifdef DEBUG
 void
 ArenaHeader::checkSynchronizedWithFreeList() const
 {
     /*
      * Do not allow to access the free list when its real head is still stored
      * in FreeLists and is not synchronized with this one.
@@ -163,157 +190,211 @@ ArenaHeader::checkSynchronizedWithFreeLi
      * checks in this case.
      */
     if (!compartment->rt->gcRunning)
         return;
 
     FreeSpan firstSpan = FreeSpan::decodeOffsets(arenaAddress(), firstFreeSpanOffsets);
     if (firstSpan.isEmpty())
         return;
-    FreeSpan *list = &compartment->freeLists.lists[getThingKind()];
+    const FreeSpan *list = compartment->arenas.getFreeList(getAllocKind());
     if (list->isEmpty() || firstSpan.arenaAddress() != list->arenaAddress())
         return;
 
     /*
      * Here this arena has free things, FreeList::lists[thingKind] is not
      * empty and also points to this arena. Thus they must the same.
      */
     JS_ASSERT(firstSpan.isSameNonEmptySpan(list));
 }
 #endif
 
+/* static */ void
+Arena::staticAsserts()
+{
+    JS_STATIC_ASSERT(sizeof(Arena) == ArenaSize);
+    JS_STATIC_ASSERT(JS_ARRAY_LENGTH(ThingSizes) == FINALIZE_LIMIT);
+    JS_STATIC_ASSERT(JS_ARRAY_LENGTH(FirstThingOffsets) == FINALIZE_LIMIT);
+}
+
 template<typename T>
 inline bool
-Arena::finalize(JSContext *cx)
+Arena::finalize(JSContext *cx, AllocKind thingKind, size_t thingSize)
 {
     /* Enforce requirements on size of T. */
-    JS_STATIC_ASSERT(sizeof(T) % Cell::CellSize == 0);
-    JS_STATIC_ASSERT(sizeof(T) <= 255);
+    JS_ASSERT(thingSize % Cell::CellSize == 0);
+    JS_ASSERT(thingSize <= 255);
 
     JS_ASSERT(aheader.allocated());
+    JS_ASSERT(thingKind == aheader.getAllocKind());
+    JS_ASSERT(thingSize == aheader.getThingSize());
     JS_ASSERT(!aheader.getMarkingDelay()->link);
 
-    uintptr_t thing = thingsStart(sizeof(T));
+    uintptr_t thing = thingsStart(thingKind);
     uintptr_t lastByte = thingsEnd() - 1;
 
     FreeSpan nextFree(aheader.getFirstFreeSpan());
     nextFree.checkSpan();
 
     FreeSpan newListHead;
     FreeSpan *newListTail = &newListHead;
     uintptr_t newFreeSpanStart = 0;
     bool allClear = true;
 #ifdef DEBUG
     size_t nmarked = 0;
 #endif
-    for (;; thing += sizeof(T)) {
+    for (;; thing += thingSize) {
         JS_ASSERT(thing <= lastByte + 1);
         if (thing == nextFree.first) {
             JS_ASSERT(nextFree.last <= lastByte);
             if (nextFree.last == lastByte)
                 break;
-            JS_ASSERT(Arena::isAligned(nextFree.last, sizeof(T)));
+            JS_ASSERT(Arena::isAligned(nextFree.last, thingSize));
             if (!newFreeSpanStart)
                 newFreeSpanStart = thing;
             thing = nextFree.last;
             nextFree = *nextFree.nextSpan();
             nextFree.checkSpan();
         } else {
             T *t = reinterpret_cast<T *>(thing);
             if (t->isMarked()) {
                 allClear = false;
 #ifdef DEBUG
                 nmarked++;
 #endif
                 if (newFreeSpanStart) {
-                    JS_ASSERT(thing >= thingsStart(sizeof(T)) + sizeof(T));
+                    JS_ASSERT(thing >= thingsStart(thingKind) + thingSize);
                     newListTail->first = newFreeSpanStart;
-                    newListTail->last = thing - sizeof(T);
-                    newListTail = newListTail->nextSpanUnchecked(sizeof(T));
+                    newListTail->last = thing - thingSize;
+                    newListTail = newListTail->nextSpanUnchecked(thingSize);
                     newFreeSpanStart = 0;
                 }
             } else {
                 if (!newFreeSpanStart)
                     newFreeSpanStart = thing;
                 t->finalize(cx);
-                JS_POISON(t, JS_FREE_PATTERN, sizeof(T));
+                JS_POISON(t, JS_FREE_PATTERN, thingSize);
             }
         }
     }
 
     if (allClear) {
         JS_ASSERT(newListTail == &newListHead);
-        JS_ASSERT(newFreeSpanStart == thingsStart(sizeof(T)));
+        JS_ASSERT(newFreeSpanStart == thingsStart(thingKind));
         return true;
     }
 
     newListTail->first = newFreeSpanStart ? newFreeSpanStart : nextFree.first;
-    JS_ASSERT(Arena::isAligned(newListTail->first, sizeof(T)));
+    JS_ASSERT(Arena::isAligned(newListTail->first, thingSize));
     newListTail->last = lastByte;
 
 #ifdef DEBUG
     size_t nfree = 0;
     for (const FreeSpan *span = &newListHead; span != newListTail; span = span->nextSpan()) {
         span->checkSpan();
-        JS_ASSERT(Arena::isAligned(span->first, sizeof(T)));
-        JS_ASSERT(Arena::isAligned(span->last, sizeof(T)));
-        nfree += (span->last - span->first) / sizeof(T) + 1;
-        JS_ASSERT(nfree + nmarked <= thingsPerArena(sizeof(T)));
+        JS_ASSERT(Arena::isAligned(span->first, thingSize));
+        JS_ASSERT(Arena::isAligned(span->last, thingSize));
+        nfree += (span->last - span->first) / thingSize + 1;
+        JS_ASSERT(nfree + nmarked <= thingsPerArena(thingSize));
     }
-    nfree += (newListTail->last + 1 - newListTail->first) / sizeof(T);
-    JS_ASSERT(nfree + nmarked == thingsPerArena(sizeof(T)));
+    nfree += (newListTail->last + 1 - newListTail->first) / thingSize;
+    JS_ASSERT(nfree + nmarked == thingsPerArena(thingSize));
 #endif
     aheader.setFirstFreeSpan(&newListHead);
 
     return false;
 }
 
-/*
- * Finalize arenas from the list. On return listHeadp points to the list of
- * non-empty arenas.
- */
 template<typename T>
-static void
-FinalizeArenas(JSContext *cx, ArenaHeader **listHeadp)
+inline void
+FinalizeTypedArenas(JSContext *cx, ArenaLists::ArenaList *al, AllocKind thingKind)
 {
-    ArenaHeader **ap = listHeadp;
+    /*
+     * Release empty arenas and move non-full arenas with some free things into
+     * a separated list that we append to al after the loop to ensure that any
+     * arena before al->cursor is full.
+     */
+    JS_ASSERT_IF(!al->head, al->cursor == &al->head);
+    ArenaLists::ArenaList available;
+    ArenaHeader **ap = &al->head;
+    size_t thingSize = Arena::thingSize(thingKind);
     while (ArenaHeader *aheader = *ap) {
-        bool allClear = aheader->getArena()->finalize<T>(cx);
+        bool allClear = aheader->getArena()->finalize<T>(cx, thingKind, thingSize);
         if (allClear) {
             *ap = aheader->next;
             aheader->chunk()->releaseArena(aheader);
+        } else if (aheader->hasFreeThings()) {
+            *ap = aheader->next;
+            *available.cursor = aheader;
+            available.cursor = &aheader->next;
         } else {
             ap = &aheader->next;
         }
     }
+
+    /* Terminate the available list and append it to al. */
+    *available.cursor = NULL;
+    *ap = available.head;
+    al->cursor = ap;
+    JS_ASSERT_IF(!al->head, al->cursor == &al->head);
 }
 
-#ifdef DEBUG
-bool
-checkArenaListAllUnmarked(JSCompartment *comp)
+/*
+ * Finalize the list. On return al->cursor points to the first non-empty arena
+ * after the al->head.
+ */
+static void
+FinalizeArenas(JSContext *cx, ArenaLists::ArenaList *al, AllocKind thingKind)
 {
-    for (unsigned i = 0; i < FINALIZE_LIMIT; i++) {
-        if (comp->arenas[i].markedThingsInArenaList())
-            return false;
+    switch(thingKind) {
+      case FINALIZE_OBJECT0:
+      case FINALIZE_OBJECT0_BACKGROUND:
+      case FINALIZE_OBJECT2:
+      case FINALIZE_OBJECT2_BACKGROUND:
+      case FINALIZE_OBJECT4:
+      case FINALIZE_OBJECT4_BACKGROUND:
+      case FINALIZE_OBJECT8:
+      case FINALIZE_OBJECT8_BACKGROUND:
+      case FINALIZE_OBJECT12:
+      case FINALIZE_OBJECT12_BACKGROUND:
+      case FINALIZE_OBJECT16:
+      case FINALIZE_OBJECT16_BACKGROUND:
+      case FINALIZE_FUNCTION:
+	FinalizeTypedArenas<JSObject>(cx, al, thingKind);
+        break;
+      case FINALIZE_SCRIPT:
+	FinalizeTypedArenas<JSScript>(cx, al, thingKind);
+        break;
+      case FINALIZE_SHAPE:
+	FinalizeTypedArenas<Shape>(cx, al, thingKind);
+        break;
+      case FINALIZE_TYPE_OBJECT:
+	FinalizeTypedArenas<types::TypeObject>(cx, al, thingKind);
+        break;
+#if JS_HAS_XML_SUPPORT
+      case FINALIZE_XML:
+	FinalizeTypedArenas<JSXML>(cx, al, thingKind);
+        break;
+#endif
+      case FINALIZE_STRING:
+	FinalizeTypedArenas<JSString>(cx, al, thingKind);
+        break;
+      case FINALIZE_SHORT_STRING:
+	FinalizeTypedArenas<JSShortString>(cx, al, thingKind);
+        break;
+      case FINALIZE_EXTERNAL_STRING:
+	FinalizeTypedArenas<JSExternalString>(cx, al, thingKind);
+        break;
     }
-    return true;
 }
-#endif
 
 } /* namespace gc */
 } /* namespace js */
 
 void
-JSCompartment::finishArenaLists()
-{
-    for (unsigned i = 0; i < FINALIZE_LIMIT; i++)
-        arenas[i].releaseAll(i);
-}
-
-void
 Chunk::init(JSRuntime *rt)
 {
     info.runtime = rt;
     info.age = 0;
     info.numFree = ArenasPerChunk;
 
     /* Assemble all arenas into a linked list and mark them as not allocated. */
     ArenaHeader **prevp = &info.emptyArenaListHead;
@@ -371,25 +452,24 @@ Chunk::removeFromAvailableList()
     if (info.next) {
         JS_ASSERT(info.next->info.prevp == &info.next);
         info.next->info.prevp = info.prevp;
     }
     info.prevp = NULL;
     info.next = NULL;
 }
 
-template <size_t thingSize>
 ArenaHeader *
-Chunk::allocateArena(JSContext *cx, unsigned thingKind)
+Chunk::allocateArena(JSContext *cx, AllocKind thingKind)
 {
     JSCompartment *comp = cx->compartment;
     JS_ASSERT(hasAvailableArenas());
     ArenaHeader *aheader = info.emptyArenaListHead;
     info.emptyArenaListHead = aheader->next;
-    aheader->init(comp, thingKind, thingSize);
+    aheader->init(comp, thingKind);
     --info.numFree;
 
     if (!hasAvailableArenas())
         removeFromAvailableList();
 
     JSRuntime *rt = info.runtime;
     Probes::resizeHeap(comp, rt->gcBytes, rt->gcBytes + ArenaSize);
     JS_ATOMIC_ADD(&rt->gcBytes, ArenaSize);
@@ -401,19 +481,19 @@ Chunk::allocateArena(JSContext *cx, unsi
 }
 
 void
 Chunk::releaseArena(ArenaHeader *aheader)
 {
     JS_ASSERT(aheader->allocated());
     JSRuntime *rt = info.runtime;
 #ifdef JS_THREADSAFE
-    Maybe<AutoLockGC> maybeLock;
+    AutoLockGC maybeLock;
     if (rt->gcHelperThread.sweeping)
-        maybeLock.construct(info.runtime);
+        maybeLock.lock(info.runtime);
 #endif
     JSCompartment *comp = aheader->compartment;
 
     Probes::resizeHeap(comp, rt->gcBytes, rt->gcBytes - ArenaSize);
     JS_ASSERT(size_t(rt->gcBytes) >= ArenaSize);
     JS_ASSERT(size_t(comp->gcBytes) >= ArenaSize);
 #ifdef JS_THREADSAFE
     if (rt->gcHelperThread.sweeping) {
@@ -467,17 +547,18 @@ ReleaseGCChunk(JSRuntime *rt, Chunk *p)
 {
     JS_ASSERT(p);
 #ifdef MOZ_GCTIMER
     JS_ATOMIC_INCREMENT(&destroyChunkCount);
 #endif
     rt->gcChunkAllocator->free_(p);
 }
 
-inline Chunk *
+/* The caller must hold the GC lock. */
+static Chunk *
 PickChunk(JSContext *cx)
 {
     JSCompartment *comp = cx->compartment;
     JSRuntime *rt = comp->rt;
     Chunk **listHeadp = GetAvailableChunkList(comp);
     Chunk *chunk = *listHeadp;
     if (chunk)
         return chunk;
@@ -646,55 +727,16 @@ InFreeList(ArenaHeader *aheader, uintptr
         /*
          * The last possible empty span is an the end of the arena. Here
          * span->end < thing < thingsEnd and so we must have more spans.
          */
         span = span->nextSpan();
     }
 }
 
-template <typename T>
-inline ConservativeGCTest
-MarkArenaPtrConservatively(JSTracer *trc, ArenaHeader *aheader, uintptr_t addr)
-{
-    JS_ASSERT(aheader->allocated());
-    JS_ASSERT(sizeof(T) == aheader->getThingSize());
-
-    uintptr_t offset = addr & ArenaMask;
-    uintptr_t minOffset = Arena::thingsStartOffset(sizeof(T));
-    if (offset < minOffset)
-        return CGCT_NOTARENA;
-
-    /* addr can point inside the thing so we must align the address. */
-    uintptr_t shift = (offset - minOffset) % sizeof(T);
-    addr -= shift;
-
-    /*
-     * Check if the thing is free. We must use the list of free spans as at
-     * this point we no longer have the mark bits from the previous GC run and
-     * we must account for newly allocated things.
-     */
-    if (InFreeList(aheader, addr))
-        return CGCT_NOTLIVE;
-
-    T *thing = reinterpret_cast<T *>(addr);
-    MarkRoot(trc, thing, "machine stack");
-
-#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
-    if (IS_GC_MARKING_TRACER(trc)) {
-        GCMarker *marker = static_cast<GCMarker *>(trc);
-        if (marker->conservativeDumpFileName)
-            marker->conservativeRoots.append(thing);
-        if (shift)
-            marker->conservativeStats.unaligned++;
-    }
-#endif
-    return CGCT_VALID;
-}
-
 /*
  * Returns CGCT_VALID and mark it if the w can be a  live GC thing and sets
  * thingKind accordingly. Otherwise returns the reason for rejection.
  */
 inline ConservativeGCTest
 MarkIfGCThingWord(JSTracer *trc, jsuword w)
 {
     /*
@@ -732,76 +774,54 @@ MarkIfGCThingWord(JSTracer *trc, jsuword
     if (!Chunk::withinArenasRange(addr))
         return CGCT_NOTARENA;
 
     ArenaHeader *aheader = &chunk->arenas[Chunk::arenaIndex(addr)].aheader;
 
     if (!aheader->allocated())
         return CGCT_FREEARENA;
 
-    ConservativeGCTest test;
-    unsigned thingKind = aheader->getThingKind();
-
-    switch (thingKind) {
-      case FINALIZE_OBJECT0:
-      case FINALIZE_OBJECT0_BACKGROUND:
-        test = MarkArenaPtrConservatively<JSObject>(trc, aheader, addr);
-        break;
-      case FINALIZE_OBJECT2:
-      case FINALIZE_OBJECT2_BACKGROUND:
-        test = MarkArenaPtrConservatively<JSObject_Slots2>(trc, aheader, addr);
-        break;
-      case FINALIZE_OBJECT4:
-      case FINALIZE_OBJECT4_BACKGROUND:
-        test = MarkArenaPtrConservatively<JSObject_Slots4>(trc, aheader, addr);
-        break;
-      case FINALIZE_OBJECT8:
-      case FINALIZE_OBJECT8_BACKGROUND:
-        test = MarkArenaPtrConservatively<JSObject_Slots8>(trc, aheader, addr);
-        break;
-      case FINALIZE_OBJECT12:
-      case FINALIZE_OBJECT12_BACKGROUND:
-        test = MarkArenaPtrConservatively<JSObject_Slots12>(trc, aheader, addr);
-        break;
-      case FINALIZE_OBJECT16:
-      case FINALIZE_OBJECT16_BACKGROUND:
-        test = MarkArenaPtrConservatively<JSObject_Slots16>(trc, aheader, addr);
-        break;
-      case FINALIZE_STRING:
-        test = MarkArenaPtrConservatively<JSString>(trc, aheader, addr);
-        break;
-      case FINALIZE_EXTERNAL_STRING:
-        test = MarkArenaPtrConservatively<JSExternalString>(trc, aheader, addr);
-        break;
-      case FINALIZE_SHORT_STRING:
-        test = MarkArenaPtrConservatively<JSShortString>(trc, aheader, addr);
-        break;
-      case FINALIZE_FUNCTION:
-        test = MarkArenaPtrConservatively<JSFunction>(trc, aheader, addr);
-        break;
-      case FINALIZE_SCRIPT:
-        test = MarkArenaPtrConservatively<JSScript>(trc, aheader, addr);
-        break;
-      case FINALIZE_SHAPE:
-        test = MarkArenaPtrConservatively<Shape>(trc, aheader, addr);
-        break;
-      case FINALIZE_TYPE_OBJECT:
-        test = MarkArenaPtrConservatively<types::TypeObject>(trc, aheader, addr);
-        break;
-#if JS_HAS_XML_SUPPORT
-      case FINALIZE_XML:
-        test = MarkArenaPtrConservatively<JSXML>(trc, aheader, addr);
-        break;
+    AllocKind thingKind = aheader->getAllocKind();
+    uintptr_t offset = addr & ArenaMask;
+    uintptr_t minOffset = Arena::firstThingOffset(thingKind);
+    if (offset < minOffset)
+        return CGCT_NOTARENA;
+
+    /* addr can point inside the thing so we must align the address. */
+    uintptr_t shift = (offset - minOffset) % Arena::thingSize(thingKind);
+    addr -= shift;
+
+    /*
+     * Check if the thing is free. We must use the list of free spans as at
+     * this point we no longer have the mark bits from the previous GC run and
+     * we must account for newly allocated things.
+     */
+    if (InFreeList(aheader, addr))
+        return CGCT_NOTLIVE;
+
+    void *thing = reinterpret_cast<void *>(addr);
+
+#ifdef DEBUG
+    const char pattern[] = "machine_stack %lx";
+    char nameBuf[sizeof(pattern) - 3 + sizeof(addr) * 2];
+    JS_snprintf(nameBuf, sizeof(nameBuf), "machine_stack %lx", (unsigned long) addr);
+    JS_SET_TRACING_NAME(trc, nameBuf);
 #endif
-      default:
-        test = CGCT_WRONGTAG;
-        JS_NOT_REACHED("wrong tag");
+    MarkKind(trc, thing, MapAllocToTraceKind(thingKind));
+
+#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
+    if (IS_GC_MARKING_TRACER(trc)) {
+        GCMarker *marker = static_cast<GCMarker *>(trc);
+        if (marker->conservativeDumpFileName)
+            marker->conservativeRoots.append(thing);
+        if (shift)
+            marker->conservativeStats.unaligned++;
     }
-
-    return test;
+#endif
+    return CGCT_VALID;
 }
 
 static void
 MarkWordConservatively(JSTracer *trc, jsuword w)
 {
     /*
      * The conservative scanner may access words that valgrind considers as
      * undefined. To avoid false positives and not to alter valgrind view of
@@ -921,21 +941,18 @@ RecordNativeStackTopForGC(JSContext *cx)
 static void
 CheckLeakedRoots(JSRuntime *rt);
 #endif
 
 void
 js_FinishGC(JSRuntime *rt)
 {
     /* Delete all remaining Compartments. */
-    for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c) {
-        JSCompartment *comp = *c;
-        comp->finishArenaLists();
-        Foreground::delete_(comp);
-    }
+    for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
+        Foreground::delete_(*c);
     rt->compartments.clear();
     rt->atomsCompartment = NULL;
 
     rt->gcSystemAvailableChunkListHead = NULL;
     rt->gcUserAvailableChunkListHead = NULL;
     for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
         ReleaseGCChunk(rt, r.front());
     rt->gcChunkSet.clear();
@@ -1132,237 +1149,273 @@ JSCompartment::reduceGCTriggerBytes(uint
     if (gcTriggerBytes - amount < GC_ALLOCATION_THRESHOLD * GC_HEAP_GROWTH_FACTOR)
         return;
     gcTriggerBytes -= amount;
 }
 
 namespace js {
 namespace gc {
 
-inline ArenaHeader *
-ArenaList::searchForFreeArena()
+inline void *
+ArenaLists::allocateFromArena(JSContext *cx, AllocKind thingKind)
 {
-    while (ArenaHeader *aheader = *cursor) {
-        cursor = &aheader->next;
-        if (aheader->hasFreeThings())
-            return aheader;
-    }
-    return NULL;
-}
-
-template <size_t thingSize>
-inline ArenaHeader *
-ArenaList::getArenaWithFreeList(JSContext *cx, unsigned thingKind)
-{
-    Chunk *chunk;
+    Chunk *chunk = NULL;
+
+    ArenaList *al = &arenaLists[thingKind];
+    AutoLockGC maybeLock;
 
 #ifdef JS_THREADSAFE
-    /*
-     * We cannot search the arena list for free things while the
-     * background finalization runs and can modify head or cursor at any
-     * moment.
-     */
-    if (backgroundFinalizeState == BFS_DONE) {
-      check_arena_list:
-        if (ArenaHeader *aheader = searchForFreeArena())
-            return aheader;
-    }
-
-    AutoLockGC lock(cx->runtime);
-
-    for (;;) {
-        if (backgroundFinalizeState == BFS_JUST_FINISHED) {
+    volatile uintptr_t *bfs = &backgroundFinalizeState[thingKind];
+    if (*bfs != BFS_DONE) {
+        /*
+         * We cannot search the arena list for free things while the
+         * background finalization runs and can modify head or cursor at any
+         * moment. So we always allocate a new arena in that case.
+         */
+        maybeLock.lock(cx->runtime);
+        for (;;) {
+            if (*bfs == BFS_DONE)
+                break;
+
+            if (*bfs == BFS_JUST_FINISHED) {
+                /*
+                 * Before we took the GC lock or while waiting for the
+                 * background finalization to finish the latter added new
+                 * arenas to the list.
+                 */
+                *bfs = BFS_DONE;
+                break;
+            }
+
+            JS_ASSERT(!*al->cursor);
+            chunk = PickChunk(cx);
+            if (chunk)
+                break;
+
             /*
-             * Before we took the GC lock or while waiting for the background
-             * finalization to finish the latter added new arenas to the list.
-             * Check the list again for free things outside the GC lock.
+             * If the background finalization still runs, wait for it to
+             * finish and retry to check if it populated the arena list or
+             * added new empty arenas.
              */
-            JS_ASSERT(*cursor);
-            backgroundFinalizeState = BFS_DONE;
-            goto check_arena_list;
+            JS_ASSERT(*bfs == BFS_RUN);
+            cx->runtime->gcHelperThread.waitBackgroundSweepEnd(cx->runtime, false);
+            JS_ASSERT(*bfs == BFS_JUST_FINISHED || *bfs == BFS_DONE);
         }
-
-        JS_ASSERT(!*cursor);
-        chunk = PickChunk(cx);
-        if (chunk || backgroundFinalizeState == BFS_DONE)
-            break;
-
-        /*
-         * If the background finalization still runs, wait for it to
-         * finish and retry to check if it populated the arena list or
-         * added new empty arenas.
-         */
-        JS_ASSERT(backgroundFinalizeState == BFS_RUN);
-        cx->runtime->gcHelperThread.waitBackgroundSweepEnd(cx->runtime, false);
-        JS_ASSERT(backgroundFinalizeState == BFS_JUST_FINISHED ||
-                  backgroundFinalizeState == BFS_DONE);
     }
-
-#else /* !JS_THREADSAFE */
-
-    if (ArenaHeader *aheader = searchForFreeArena())
-        return aheader;
-    chunk = PickChunk(cx);
-
-#endif /* !JS_THREADSAFE */
+#endif /* JS_THREADSAFE */
 
     if (!chunk) {
-        GCREASON(CHUNK);
-        TriggerGC(cx->runtime);
-        return NULL;
+        if (ArenaHeader *aheader = *al->cursor) {
+            JS_ASSERT(aheader->hasFreeThings());
+
+            /*
+             * The empty arenas are returned to the chunk and should not present on
+             * the list.
+             */
+            JS_ASSERT(!aheader->isEmpty());
+            al->cursor = &aheader->next;
+
+            /*
+             * Move the free span stored in the arena to the free list and
+             * allocate from it.
+             */
+            freeLists[thingKind] = aheader->getFirstFreeSpan();
+            aheader->setAsFullyUsed();
+            return freeLists[thingKind].infallibleAllocate(Arena::thingSize(thingKind));
+        }
+
+        /* Make sure we hold the GC lock before we call PickChunk. */
+        if (!maybeLock.locked())
+            maybeLock.lock(cx->runtime);
+        chunk = PickChunk(cx);
+        if (!chunk)
+            return NULL;
     }
 
     /*
-     * While we still hold the GC lock get the arena from the chunk and add it
-     * to the head of the list before the cursor to prevent checking the arena
-     * for the free things.
+     * While we still hold the GC lock get an arena from some chunk, mark it
+     * as full as its single free span is moved to the free lits, and insert
+     * it to the list as a fully allocated arena.
+     *
+     * We add the arena before the the head, not after the tail pointed by the
+     * cursor, so after the GC the most recently added arena will be used first
+     * for allocations improving cache locality.
      */
-    ArenaHeader *aheader = chunk->allocateArena<thingSize>(cx, thingKind);
-    aheader->next = head;
-    if (cursor == &head)
-        cursor = &aheader->next;
-    head = aheader;
-    return aheader;
+    JS_ASSERT(!*al->cursor);
+    ArenaHeader *aheader = chunk->allocateArena(cx, thingKind);
+    aheader->next = al->head;
+    if (!al->head) {
+        JS_ASSERT(al->cursor == &al->head);
+        al->cursor = &aheader->next;
+    }
+    al->head = aheader;
+
+    /* See comments before allocateFromNewArena about this assert. */
+    JS_ASSERT(!aheader->hasFreeThings());
+    uintptr_t arenaAddr = aheader->arenaAddress();
+    return freeLists[thingKind].allocateFromNewArena(arenaAddr,
+                                                     Arena::firstThingOffset(thingKind),
+                                                     Arena::thingSize(thingKind));
 }
 
-template<typename T>
 void
-ArenaList::finalizeNow(JSContext *cx)
+ArenaLists::finalizeNow(JSContext *cx, AllocKind thingKind)
 {
 #ifdef JS_THREADSAFE
-    JS_ASSERT(backgroundFinalizeState == BFS_DONE);
+    JS_ASSERT(backgroundFinalizeState[thingKind] == BFS_DONE);
 #endif
-    FinalizeArenas<T>(cx, &head);
-    cursor = &head;
+    FinalizeArenas(cx, &arenaLists[thingKind], thingKind);
 }
 
+inline void
+ArenaLists::finalizeLater(JSContext *cx, AllocKind thingKind)
+{
+    JS_ASSERT(thingKind == FINALIZE_OBJECT0_BACKGROUND  ||
+              thingKind == FINALIZE_OBJECT2_BACKGROUND  ||
+              thingKind == FINALIZE_OBJECT4_BACKGROUND  ||
+              thingKind == FINALIZE_OBJECT8_BACKGROUND  ||
+              thingKind == FINALIZE_OBJECT12_BACKGROUND ||
+              thingKind == FINALIZE_OBJECT16_BACKGROUND ||
+              thingKind == FINALIZE_FUNCTION            ||
+              thingKind == FINALIZE_SHORT_STRING        ||
+              thingKind == FINALIZE_STRING);
+
 #ifdef JS_THREADSAFE
-template<typename T>
-inline void
-ArenaList::finalizeLater(JSContext *cx)
-{
-    JS_ASSERT_IF(head,
-                 head->getThingKind() == FINALIZE_OBJECT0_BACKGROUND  ||
-                 head->getThingKind() == FINALIZE_OBJECT2_BACKGROUND  ||
-                 head->getThingKind() == FINALIZE_OBJECT4_BACKGROUND  ||
-                 head->getThingKind() == FINALIZE_OBJECT8_BACKGROUND  ||
-                 head->getThingKind() == FINALIZE_OBJECT12_BACKGROUND ||
-                 head->getThingKind() == FINALIZE_OBJECT16_BACKGROUND ||
-                 head->getThingKind() == FINALIZE_FUNCTION            ||
-                 head->getThingKind() == FINALIZE_SHORT_STRING        ||
-                 head->getThingKind() == FINALIZE_STRING);
     JS_ASSERT(!cx->runtime->gcHelperThread.sweeping);
 
+    ArenaList *al = &arenaLists[thingKind];
+    if (!al->head) {
+        JS_ASSERT(backgroundFinalizeState[thingKind] == BFS_DONE);
+        JS_ASSERT(al->cursor == &al->head);
+        return;
+    }
+
     /*
      * The state can be just-finished if we have not allocated any GC things
      * from the arena list after the previous background finalization.
      */
-    JS_ASSERT(backgroundFinalizeState == BFS_DONE ||
-              backgroundFinalizeState == BFS_JUST_FINISHED);
-
-    if (head && cx->gcBackgroundFree) {
+    JS_ASSERT(backgroundFinalizeState[thingKind] == BFS_DONE ||
+              backgroundFinalizeState[thingKind] == BFS_JUST_FINISHED);
+
+    if (cx->gcBackgroundFree) {
         /*
          * To ensure the finalization order even during the background GC we
          * must use infallibleAppend so arenas scheduled for background
          * finalization would not be finalized now if the append fails.
          */
-        cx->gcBackgroundFree->finalizeVector.infallibleAppend(head);
-        head = NULL;
-        cursor = &head;
-        backgroundFinalizeState = BFS_RUN;
+        cx->gcBackgroundFree->finalizeVector.infallibleAppend(al->head);
+        al->clear();
+        backgroundFinalizeState[thingKind] = BFS_RUN;
     } else {
-        JS_ASSERT_IF(!head, cursor == &head);
-        backgroundFinalizeState = BFS_DONE;
-        finalizeNow<T>(cx);
+        FinalizeArenas(cx, al, thingKind);
+        backgroundFinalizeState[thingKind] = BFS_DONE;
     }
+
+#else /* !JS_THREADSAFE */
+
+    finalizeNow(cx, thingKind);
+
+#endif
 }
 
 /*static*/ void
-ArenaList::backgroundFinalize(JSContext *cx, ArenaHeader *listHead)
+ArenaLists::backgroundFinalize(JSContext *cx, ArenaHeader *listHead)
 {
     JS_ASSERT(listHead);
-    unsigned thingKind = listHead->getThingKind();
+    AllocKind thingKind = listHead->getAllocKind();
     JSCompartment *comp = listHead->compartment;
-    ArenaList *al = &comp->arenas[thingKind];
-
-    switch (thingKind) {
-      default:
-        JS_NOT_REACHED("wrong kind");
-        break;
-      case FINALIZE_OBJECT0_BACKGROUND:
-        FinalizeArenas<JSObject>(cx, &listHead);
-        break;
-      case FINALIZE_OBJECT2_BACKGROUND:
-        FinalizeArenas<JSObject_Slots2>(cx, &listHead);
-        break;
-      case FINALIZE_OBJECT4_BACKGROUND:
-        FinalizeArenas<JSObject_Slots4>(cx, &listHead);
-        break;
-      case FINALIZE_OBJECT8_BACKGROUND:
-        FinalizeArenas<JSObject_Slots8>(cx, &listHead);
-        break;
-      case FINALIZE_OBJECT12_BACKGROUND:
-        FinalizeArenas<JSObject_Slots12>(cx, &listHead);
-        break;
-      case FINALIZE_OBJECT16_BACKGROUND:
-        FinalizeArenas<JSObject_Slots16>(cx, &listHead);
-        break;
-      case FINALIZE_FUNCTION:
-        FinalizeArenas<JSFunction>(cx, &listHead);
-        break;
-      case FINALIZE_STRING:
-        FinalizeArenas<JSString>(cx, &listHead);
-        break;
-      case FINALIZE_SHORT_STRING:
-        FinalizeArenas<JSShortString>(cx, &listHead);
-        break;
-    }
+    ArenaList finalized;
+    finalized.head = listHead;
+    FinalizeArenas(cx, &finalized, thingKind);
 
     /*
      * After we finish the finalization al->cursor must point to the end of
      * the head list as we emptied the list before the background finalization
      * and the allocation adds new arenas before the cursor.
      */
+    ArenaLists *lists = &comp->arenas;
+    ArenaList *al = &lists->arenaLists[thingKind];
+
     AutoLockGC lock(cx->runtime);
-    JS_ASSERT(al->backgroundFinalizeState == BFS_RUN);
+    JS_ASSERT(lists->backgroundFinalizeState[thingKind] == BFS_RUN);
     JS_ASSERT(!*al->cursor);
-    if (listHead) {
-        *al->cursor = listHead;
-        al->backgroundFinalizeState = BFS_JUST_FINISHED;
+
+    /*
+     * We must set the state to BFS_JUST_FINISHED if we touch arenaList list,
+     * even if we add to the list only fully allocated arenas without any free
+     * things. It ensures that the allocation thread takes the GC lock and all
+     * writes to the free list elements are propagated. As we always take the
+     * GC lock when allocating new arenas from the chunks we can set the state
+     * to BFS_DONE if we have released all finalized arenas back to their
+     * chunks.
+     */
+    if (finalized.head) {
+        *al->cursor = finalized.head;
+        if (finalized.cursor != &finalized.head)
+            al->cursor = finalized.cursor;
+        lists->backgroundFinalizeState[thingKind] = BFS_JUST_FINISHED;
     } else {
-        al->backgroundFinalizeState = BFS_DONE;
+        lists->backgroundFinalizeState[thingKind] = BFS_DONE;
     }
 }
 
-#endif /* JS_THREADSAFE */
-
-#ifdef DEBUG
-bool
-CheckAllocation(JSContext *cx)
+void
+ArenaLists::finalizeObjects(JSContext *cx)
 {
+    finalizeNow(cx, FINALIZE_OBJECT0);
+    finalizeNow(cx, FINALIZE_OBJECT2);
+    finalizeNow(cx, FINALIZE_OBJECT4);
+    finalizeNow(cx, FINALIZE_OBJECT8);
+    finalizeNow(cx, FINALIZE_OBJECT12);
+    finalizeNow(cx, FINALIZE_OBJECT16);
+
 #ifdef JS_THREADSAFE
-    JS_ASSERT(cx->thread());
-#endif
-    JS_ASSERT(!cx->runtime->gcRunning);
-    return true;
-}
+    finalizeLater(cx, FINALIZE_OBJECT0_BACKGROUND);
+    finalizeLater(cx, FINALIZE_OBJECT2_BACKGROUND);
+    finalizeLater(cx, FINALIZE_OBJECT4_BACKGROUND);
+    finalizeLater(cx, FINALIZE_OBJECT8_BACKGROUND);
+    finalizeLater(cx, FINALIZE_OBJECT12_BACKGROUND);
+    finalizeLater(cx, FINALIZE_OBJECT16_BACKGROUND);
 #endif
 
-inline bool
-NeedLastDitchGC(JSContext *cx)
-{
-    JSRuntime *rt = cx->runtime;
-    return rt->gcIsNeeded;
+    /*
+     * We must finalize Function instances after finalizing any other objects
+     * even if we use the background finalization for the latter. See comments
+     * in JSObject::finalizeUpvarsIfFlatClosure.
+     */
+    finalizeLater(cx, FINALIZE_FUNCTION);
+
+#if JS_HAS_XML_SUPPORT
+    finalizeNow(cx, FINALIZE_XML);
+#endif
 }
 
-/*
- * Return false only if the GC run but could not bring its memory usage under
- * JSRuntime::gcMaxBytes.
- */
-static bool
+void
+ArenaLists::finalizeStrings(JSContext *cx)
+{
+    finalizeLater(cx, FINALIZE_SHORT_STRING);
+    finalizeLater(cx, FINALIZE_STRING);
+
+    finalizeNow(cx, FINALIZE_EXTERNAL_STRING);
+}
+
+void
+ArenaLists::finalizeShapes(JSContext *cx)
+{
+    finalizeNow(cx, FINALIZE_SHAPE);
+    finalizeNow(cx, FINALIZE_TYPE_OBJECT);
+}
+
+void
+ArenaLists::finalizeScripts(JSContext *cx)
+{
+    finalizeNow(cx, FINALIZE_SCRIPT);
+}
+
+static void
 RunLastDitchGC(JSContext *cx)
 {
     JSRuntime *rt = cx->runtime;
 #ifdef JS_THREADSAFE
     Maybe<AutoUnlockAtomsCompartment> maybeUnlockAtomsCompartment;
     if (cx->compartment == rt->atomsCompartment && rt->atomsCompartmentIsLocked)
         maybeUnlockAtomsCompartment.construct(cx);
 #endif
@@ -1370,123 +1423,77 @@ RunLastDitchGC(JSContext *cx)
     AutoKeepAtoms keep(rt);
     GCREASON(LASTDITCH);
     js_GC(cx, rt->gcTriggerCompartment, GC_NORMAL);
 
 #ifdef JS_THREADSAFE
     if (rt->gcBytes >= rt->gcMaxBytes)
         cx->runtime->gcHelperThread.waitBackgroundSweepEnd(cx->runtime);
 #endif
-
-    return rt->gcBytes < rt->gcMaxBytes;
 }
 
-static inline bool
+inline bool
 IsGCAllowed(JSContext *cx)
 {
     return !JS_ON_TRACE(cx) && !JS_THREAD_DATA(cx)->waiveGCQuota;
 }
 
-template <typename T>
-inline void *
-RefillTypedFreeList(JSContext *cx, unsigned thingKind)
+/* static */ void *
+ArenaLists::refillFreeList(JSContext *cx, AllocKind thingKind)
 {
-    JS_ASSERT(!cx->runtime->gcRunning);
+    JS_ASSERT(cx->compartment->arenas.freeLists[thingKind].isEmpty());
 
     /*
      * For compatibility with older code we tolerate calling the allocator
      * during the GC in optimized builds.
      */
-    if (cx->runtime->gcRunning)
+    JSRuntime *rt = cx->runtime;
+    JS_ASSERT(!rt->gcRunning);
+    if (rt->gcRunning)
         return NULL;
 
-    JSCompartment *compartment = cx->compartment;
-    JS_ASSERT(compartment->freeLists.lists[thingKind].isEmpty());
-
-    bool canGC = IsGCAllowed(cx);
-    bool runGC = canGC && JS_UNLIKELY(NeedLastDitchGC(cx));
+    bool runGC = !!rt->gcIsNeeded;
     for (;;) {
-        if (runGC) {
-            if (!RunLastDitchGC(cx))
+        if (JS_UNLIKELY(runGC) && IsGCAllowed(cx)) {
+            RunLastDitchGC(cx);
+
+            /* Report OOM of the GC failed to free enough memory. */
+            if (rt->gcBytes > rt->gcMaxBytes)
                 break;
 
             /*
              * The JSGC_END callback can legitimately allocate new GC
              * things and populate the free list. If that happens, just
              * return that list head.
              */
-            if (void *thing = compartment->freeLists.getNext(thingKind, sizeof(T)))
+            size_t thingSize = Arena::thingSize(thingKind);
+            if (void *thing = cx->compartment->arenas.allocateFromFreeList(thingKind, thingSize))
                 return thing;
         }
-        ArenaHeader *aheader =
-            compartment->arenas[thingKind].getArenaWithFreeList<sizeof(T)>(cx, thingKind);
-        if (aheader) {
-            JS_ASSERT(sizeof(T) == aheader->getThingSize());
-            return compartment->freeLists.populate(aheader, thingKind, sizeof(T));
-        }
+        void *thing = cx->compartment->arenas.allocateFromArena(cx, thingKind);
+        if (JS_LIKELY(!!thing))
+            return thing;
 
         /*
-         * We failed to allocate any arena. Run the GC if we can unless we
-         * have done it already.
+         * We failed to allocate. Run the GC if we can unless we have done it
+         * already. Otherwise report OOM but first schedule a new GC soon.
          */
-        if (!canGC || runGC)
+        if (runGC || !IsGCAllowed(cx)) {
+            AutoLockGC lock(rt);
+            GCREASON(REFILL);
+            TriggerGC(rt);
             break;
+        }
         runGC = true;
     }
 
     js_ReportOutOfMemory(cx);
     return NULL;
 }
 
-void *
-RefillFinalizableFreeList(JSContext *cx, unsigned thingKind)
-{
-    switch (thingKind) {
-      case FINALIZE_OBJECT0:
-      case FINALIZE_OBJECT0_BACKGROUND:
-        return RefillTypedFreeList<JSObject>(cx, thingKind);
-      case FINALIZE_OBJECT2:
-      case FINALIZE_OBJECT2_BACKGROUND:
-        return RefillTypedFreeList<JSObject_Slots2>(cx, thingKind);
-      case FINALIZE_OBJECT4:
-      case FINALIZE_OBJECT4_BACKGROUND:
-        return RefillTypedFreeList<JSObject_Slots4>(cx, thingKind);
-      case FINALIZE_OBJECT8:
-      case FINALIZE_OBJECT8_BACKGROUND:
-        return RefillTypedFreeList<JSObject_Slots8>(cx, thingKind);
-      case FINALIZE_OBJECT12:
-      case FINALIZE_OBJECT12_BACKGROUND:
-        return RefillTypedFreeList<JSObject_Slots12>(cx, thingKind);
-      case FINALIZE_OBJECT16:
-      case FINALIZE_OBJECT16_BACKGROUND:
-        return RefillTypedFreeList<JSObject_Slots16>(cx, thingKind);
-      case FINALIZE_STRING:
-        return RefillTypedFreeList<JSString>(cx, thingKind);
-      case FINALIZE_EXTERNAL_STRING:
-        return RefillTypedFreeList<JSExternalString>(cx, thingKind);
-      case FINALIZE_SHORT_STRING:
-        return RefillTypedFreeList<JSShortString>(cx, thingKind);
-      case FINALIZE_FUNCTION:
-        return RefillTypedFreeList<JSFunction>(cx, thingKind);
-      case FINALIZE_SCRIPT:
-        return RefillTypedFreeList<JSScript>(cx, thingKind);
-      case FINALIZE_SHAPE:
-        return RefillTypedFreeList<Shape>(cx, thingKind);
-      case FINALIZE_TYPE_OBJECT:
-        return RefillTypedFreeList<types::TypeObject>(cx, thingKind);
-#if JS_HAS_XML_SUPPORT
-      case FINALIZE_XML:
-        return RefillTypedFreeList<JSXML>(cx, thingKind);
-#endif
-      default:
-        JS_NOT_REACHED("bad finalize kind");
-        return 0;
-    }
-}
-
 } /* namespace gc */
 } /* namespace js */
 
 JSGCTraceKind
 js_GetGCThingTraceKind(void *thing)
 {
     return GetGCThingTraceKind(thing);
 }
@@ -1578,21 +1585,22 @@ GCMarker::delayMarkingChildren(const voi
 #ifdef DEBUG
     markLaterArenas++;
 #endif
 }
 
 static void
 MarkDelayedChildren(JSTracer *trc, ArenaHeader *aheader)
 {
-    JSGCTraceKind traceKind = GetFinalizableTraceKind(aheader->getThingKind());
+    AllocKind thingKind = aheader->getAllocKind();
+    JSGCTraceKind traceKind = MapAllocToTraceKind(thingKind);
     size_t thingSize = aheader->getThingSize();
     Arena *a = aheader->getArena();
     uintptr_t end = a->thingsEnd();
-    for (uintptr_t thing = a->thingsStart(thingSize); thing != end; thing += thingSize) {
+    for (uintptr_t thing = a->thingsStart(thingKind); thing != end; thing += thingSize) {
         Cell *t = reinterpret_cast<Cell *>(thing);
         if (t->isMarked())
             JS_TraceChildren(trc, t, traceKind);
     }
 }
 
 void
 GCMarker::markDelayedChildren()
@@ -1968,77 +1976,16 @@ MaybeGC(JSContext *cx)
         } else {
             rt->gcNextFullGCTime = now + GC_IDLE_FULL_SPAN;
         }
     }
 }
 
 } /* namespace js */
 
-void
-JSCompartment::finalizeObjectArenaLists(JSContext *cx)
-{
-    arenas[FINALIZE_OBJECT0]. finalizeNow<JSObject>(cx);
-    arenas[FINALIZE_OBJECT2]. finalizeNow<JSObject_Slots2>(cx);
-    arenas[FINALIZE_OBJECT4]. finalizeNow<JSObject_Slots4>(cx);
-    arenas[FINALIZE_OBJECT8]. finalizeNow<JSObject_Slots8>(cx);
-    arenas[FINALIZE_OBJECT12].finalizeNow<JSObject_Slots12>(cx);
-    arenas[FINALIZE_OBJECT16].finalizeNow<JSObject_Slots16>(cx);
-
-#ifdef JS_THREADSAFE
-    arenas[FINALIZE_OBJECT0_BACKGROUND]. finalizeLater<JSObject>(cx);
-    arenas[FINALIZE_OBJECT2_BACKGROUND]. finalizeLater<JSObject_Slots2>(cx);
-    arenas[FINALIZE_OBJECT4_BACKGROUND]. finalizeLater<JSObject_Slots4>(cx);
-    arenas[FINALIZE_OBJECT8_BACKGROUND]. finalizeLater<JSObject_Slots8>(cx);
-    arenas[FINALIZE_OBJECT12_BACKGROUND].finalizeLater<JSObject_Slots12>(cx);
-    arenas[FINALIZE_OBJECT16_BACKGROUND].finalizeLater<JSObject_Slots16>(cx);
-#endif
-
-    /*
-     * We must finalize Function instances after finalizing any other objects
-     * even if we use the background finalization for the latter. See comments
-     * in JSObject::finalizeUpvarsIfFlatClosure.
-     */
-#ifdef JS_THREADSAFE
-    arenas[FINALIZE_FUNCTION].finalizeLater<JSFunction>(cx);
-#else
-    arenas[FINALIZE_FUNCTION].finalizeNow<JSFunction>(cx);
-#endif
-
-#if JS_HAS_XML_SUPPORT
-    arenas[FINALIZE_XML].finalizeNow<JSXML>(cx);
-#endif
-}
-
-void
-JSCompartment::finalizeStringArenaLists(JSContext *cx)
-{
-#ifdef JS_THREADSAFE
-    arenas[FINALIZE_SHORT_STRING].finalizeLater<JSShortString>(cx);
-    arenas[FINALIZE_STRING].finalizeLater<JSString>(cx);
-#else
-    arenas[FINALIZE_SHORT_STRING].finalizeNow<JSShortString>(cx);
-    arenas[FINALIZE_STRING].finalizeNow<JSString>(cx);
-#endif
-    arenas[FINALIZE_EXTERNAL_STRING].finalizeNow<JSExternalString>(cx);
-}
-
-void
-JSCompartment::finalizeShapeArenaLists(JSContext *cx)
-{
-    arenas[FINALIZE_TYPE_OBJECT].finalizeNow<types::TypeObject>(cx);
-    arenas[FINALIZE_SHAPE].finalizeNow<Shape>(cx);
-}
-
-void
-JSCompartment::finalizeScriptArenaLists(JSContext *cx)
-{
-    arenas[FINALIZE_SCRIPT].finalizeNow<JSScript>(cx);
-}
-
 #ifdef JS_THREADSAFE
 
 namespace js {
 
 bool
 GCHelperThread::init(JSRuntime *rt)
 {
     if (!(wakeup = PR_NewCondVar(rt->gcLock)))
@@ -2120,19 +2067,19 @@ GCHelperThread::startBackgroundSweep(JSR
     lastGCKind = gckind;
     sweeping = true;
     PR_NotifyCondVar(wakeup);
 }
 
 void
 GCHelperThread::waitBackgroundSweepEnd(JSRuntime *rt, bool gcUnlocked)
 {
-    Maybe<AutoLockGC> lock;
+    AutoLockGC maybeLock;
     if (gcUnlocked)
-        lock.construct(rt);
+        maybeLock.lock(rt);
     while (sweeping)
         PR_WaitCondVar(sweepingDone, PR_INTERVAL_NO_TIMEOUT);
 }
 
 JS_FRIEND_API(void)
 GCHelperThread::replenishAndFreeLater(void *ptr)
 {
     JS_ASSERT(freeCursor == freeCursorEnd);
@@ -2153,20 +2100,20 @@ GCHelperThread::replenishAndFreeLater(vo
 
 void
 GCHelperThread::doSweep()
 {
     JS_ASSERT(cx);
 
     /*
      * We must finalize in the insert order, see comments in
-     * finalizeObjectArenaLists.
+     * finalizeObjects.
      */
     for (ArenaHeader **i = finalizeVector.begin(); i != finalizeVector.end(); ++i)
-        ArenaList::backgroundFinalize(cx, *i);
+        ArenaLists::backgroundFinalize(cx, *i);
     finalizeVector.resize(0);
     ExpireGCChunks(cx->runtime, lastGCKind);
     cx = NULL;
 
     if (freeCursor) {
         void **array = freeCursorEnd - FREE_ARRAY_LENGTH;
         freeElementsAndArray(array, freeCursor);
         freeCursor = freeCursorEnd = NULL;
@@ -2227,19 +2174,19 @@ SweepCompartments(JSContext *cx, JSGCInv
     JSCompartment **write = read;
     JS_ASSERT(rt->compartments.length() >= 1);
     JS_ASSERT(*rt->compartments.begin() == rt->atomsCompartment);
 
     while (read < end) {
         JSCompartment *compartment = *read++;
 
         if (!compartment->hold &&
-            (compartment->arenaListsAreEmpty() || gckind == GC_LAST_CONTEXT))
+            (compartment->arenas.arenaListsAreEmpty() || gckind == GC_LAST_CONTEXT))
         {
-            compartment->freeLists.checkEmpty();
+            compartment->arenas.checkEmptyFreeLists();
             if (callback)
                 JS_ALWAYS_TRUE(callback(cx, compartment, JSCOMPARTMENT_DESTROY));
             if (compartment->principals)
                 JSPRINCIPALS_DROP(cx, compartment->principals);
             cx->delete_(compartment);
             continue;
         }
         *write++ = compartment;
@@ -2329,17 +2276,18 @@ MarkAndSweep(JSContext *cx, JSCompartmen
 
     if (rt->gcCallback)
         (void) rt->gcCallback(cx, JSGC_MARK_END);
 
 #ifdef DEBUG
     /* Make sure that we didn't mark an object in another compartment */
     if (comp) {
         for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
-            JS_ASSERT_IF(*c != comp && *c != rt->atomsCompartment, checkArenaListAllUnmarked(*c));
+            JS_ASSERT_IF(*c != comp && *c != rt->atomsCompartment,
+                         (*c)->arenas.checkArenaListAllUnmarked());
     }
 #endif
 
     /*
      * Sweep phase.
      *
      * Finalize as we sweep, outside of rt->gcLock but with rt->gcRunning set
      * so that any attempt to allocate a GC-thing from a finalizer will fail,
@@ -2367,55 +2315,55 @@ MarkAndSweep(JSContext *cx, JSCompartmen
      * can access them even if they will be freed. Sweep the runtime's property trees
      * after finalizing objects, in case any had watchpoints referencing tree nodes.
      * Do this before sweeping compartments, so that we sweep all shapes in
      * unreachable compartments.
      */
     if (comp) {
         Probes::GCStartSweepPhase(comp);
         comp->sweep(cx, 0);
-        comp->finalizeObjectArenaLists(cx);
+        comp->arenas.finalizeObjects(cx);
         GCTIMESTAMP(sweepObjectEnd);
-        comp->finalizeStringArenaLists(cx);
+        comp->arenas.finalizeStrings(cx);
         GCTIMESTAMP(sweepStringEnd);
-        comp->finalizeScriptArenaLists(cx);
+        comp->arenas.finalizeScripts(cx);
         GCTIMESTAMP(sweepScriptEnd);
-        comp->finalizeShapeArenaLists(cx);
+        comp->arenas.finalizeShapes(cx);
         GCTIMESTAMP(sweepShapeEnd);
         Probes::GCEndSweepPhase(comp);
     } else {
         /*
          * Some sweeping is not compartment-specific. Start a NULL-compartment
          * phase to demarcate all of that. (The compartment sweeps will nest
          * within.)
          */
         Probes::GCStartSweepPhase(NULL);
 
         Debugger::sweepAll(cx);
         SweepCrossCompartmentWrappers(cx);
         for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); c++) {
             Probes::GCStartSweepPhase(*c);
-            (*c)->finalizeObjectArenaLists(cx);
+            (*c)->arenas.finalizeObjects(cx);
         }
 
         GCTIMESTAMP(sweepObjectEnd);
 
         for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); c++)
-            (*c)->finalizeStringArenaLists(cx);
+            (*c)->arenas.finalizeStrings(cx);
 
         GCTIMESTAMP(sweepStringEnd);
 
         for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); c++) {
-            (*c)->finalizeScriptArenaLists(cx);
+            (*c)->arenas.finalizeScripts(cx);
         }
 
         GCTIMESTAMP(sweepScriptEnd);
 
         for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); c++) {
-            (*c)->finalizeShapeArenaLists(cx);
+            (*c)->arenas.finalizeShapes(cx);
             Probes::GCEndSweepPhase(*c);
         }
 
         GCTIMESTAMP(sweepShapeEnd);
     }
 
 #ifdef DEBUG
      PropertyTree::dumpShapes(cx);
@@ -2803,22 +2751,22 @@ namespace js {
 
 class AutoCopyFreeListToArenas {
     JSRuntime *rt;
 
   public:
     AutoCopyFreeListToArenas(JSRuntime *rt)
       : rt(rt) {
         for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
-            (*c)->freeLists.copyToArenas();
+            (*c)->arenas.copyFreeListsToArenas();
     }
 
     ~AutoCopyFreeListToArenas() {
         for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
-            (*c)->freeLists.clearInArenas();
+            (*c)->arenas.clearFreeListsInArenas();
     }
 };
 
 void
 TraceRuntime(JSTracer *trc)
 {
     JS_ASSERT(!IS_GC_MARKING_TRACER(trc));
     LeaveTrace(trc->context);
@@ -2899,29 +2847,28 @@ IterateCompartmentsArenasCells(JSContext
 #endif
     AutoUnlockGC unlock(rt);
 
     AutoCopyFreeListToArenas copy(rt);
     for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c) {
         JSCompartment *compartment = *c;
         (*compartmentCallback)(cx, data, compartment);
 
-        for (unsigned thingKind = 0; thingKind < FINALIZE_LIMIT; thingKind++) {
-            JSGCTraceKind traceKind = GetFinalizableTraceKind(thingKind);
-            size_t thingSize = GCThingSizeMap[thingKind];
+        for (size_t thingKind = 0; thingKind != FINALIZE_LIMIT; thingKind++) {
+            JSGCTraceKind traceKind = MapAllocToTraceKind(AllocKind(thingKind));
+            size_t thingSize = Arena::thingSize(AllocKind(thingKind));
             IterateArenaCallbackOp arenaOp(cx, data, arenaCallback, traceKind, thingSize);
             IterateCellCallbackOp cellOp(cx, data, cellCallback, traceKind, thingSize);
-
-            ForEachArenaAndCell(compartment, (FinalizeKind) thingKind, arenaOp, cellOp);
+            ForEachArenaAndCell(compartment, AllocKind(thingKind), arenaOp, cellOp);
         }
     }
 }
 
 void
-IterateCells(JSContext *cx, JSCompartment *compartment, FinalizeKind thingKind,
+IterateCells(JSContext *cx, JSCompartment *compartment, AllocKind thingKind,
              void *data, IterateCellCallback cellCallback)
 {
     /* :XXX: Any way to common this preamble with IterateCompartmentsArenasCells? */
     CHECK_REQUEST(cx);
 
     LeaveTrace(cx);
 
     JSRuntime *rt = cx->runtime;
@@ -2931,18 +2878,18 @@ IterateCells(JSContext *cx, JSCompartmen
     AutoGCSession gcsession(cx);
 #ifdef JS_THREADSAFE
     rt->gcHelperThread.waitBackgroundSweepEnd(rt, false);
 #endif
     AutoUnlockGC unlock(rt);
 
     AutoCopyFreeListToArenas copy(rt);
 
-    JSGCTraceKind traceKind = GetFinalizableTraceKind(thingKind);
-    size_t thingSize = GCThingSizeMap[thingKind];
+    JSGCTraceKind traceKind = MapAllocToTraceKind(thingKind);
+    size_t thingSize = Arena::thingSize(thingKind);
 
     if (compartment) {
         for (CellIterUnderGC i(compartment, thingKind); !i.done(); i.next())
             cellCallback(cx, data, i.getCell(), traceKind, thingSize);
     } else {
         for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c) {
             for (CellIterUnderGC i(*c, thingKind); !i.done(); i.next())
                 cellCallback(cx, data, i.getCell(), traceKind, thingSize);
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -75,53 +75,22 @@ namespace js {
 class GCHelperThread;
 struct Shape;
 
 namespace gc {
 
 struct Arena;
 struct MarkingDelay;
 
-/* The kind of GC thing with a finalizer. */
-enum FinalizeKind {
-    FINALIZE_OBJECT0,
-    FINALIZE_OBJECT0_BACKGROUND,
-    FINALIZE_OBJECT2,
-    FINALIZE_OBJECT2_BACKGROUND,
-    FINALIZE_OBJECT4,
-    FINALIZE_OBJECT4_BACKGROUND,
-    FINALIZE_OBJECT8,
-    FINALIZE_OBJECT8_BACKGROUND,
-    FINALIZE_OBJECT12,
-    FINALIZE_OBJECT12_BACKGROUND,
-    FINALIZE_OBJECT16,
-    FINALIZE_OBJECT16_BACKGROUND,
-    FINALIZE_OBJECT_LAST = FINALIZE_OBJECT16_BACKGROUND,
-    FINALIZE_FUNCTION,
-    FINALIZE_FUNCTION_AND_OBJECT_LAST = FINALIZE_FUNCTION,
-    FINALIZE_SCRIPT,
-    FINALIZE_SHAPE,
-    FINALIZE_TYPE_OBJECT,
-#if JS_HAS_XML_SUPPORT
-    FINALIZE_XML,
-#endif
-    FINALIZE_SHORT_STRING,
-    FINALIZE_STRING,
-    FINALIZE_EXTERNAL_STRING,
-    FINALIZE_LIMIT
-};
-
 /*
  * This must be an upper bound, but we do not need the least upper bound, so
  * we just exclude non-background objects.
  */
 const size_t MAX_BACKGROUND_FINALIZE_KINDS = FINALIZE_LIMIT - (FINALIZE_OBJECT_LAST + 1) / 2;
 
-extern JS_FRIEND_DATA(const uint8) GCThingSizeMap[];
-
 const size_t ArenaShift = 12;
 const size_t ArenaSize = size_t(1) << ArenaShift;
 const size_t ArenaMask = ArenaSize - 1;
 
 /*
  * The mark bitmap has one bit per each GC cell. For multi-cell GC things this
  * wastes space but allows to avoid expensive devisions by thing's size when
  * accessing the bitmap. In addition this allows to use some bits for colored
@@ -169,26 +138,30 @@ struct FreeSpan {
       : first(first), last(last) {
         checkSpan();
     }
 
     /*
      * To minimize the size of the arena header the first span is encoded
      * there as offsets from the arena start.
      */
-    static size_t encodeOffsets(size_t firstOffset, size_t lastOffset = ArenaSize - 1) {
+    static size_t encodeOffsets(size_t firstOffset, size_t lastOffset) {
         /* Check that we can pack the offsets into uint16. */
         JS_STATIC_ASSERT(ArenaShift < 16);
         JS_ASSERT(firstOffset <= ArenaSize);
         JS_ASSERT(lastOffset < ArenaSize);
         JS_ASSERT(firstOffset <= ((lastOffset + 1) & ~size_t(1)));
         return firstOffset | (lastOffset << 16);
     }
 
-    static const size_t EmptyOffsets = ArenaSize | ((ArenaSize - 1) << 16);
+    /*
+     * Encoded offsets for a full arena when its first span is the last one
+     * and empty.
+     */
+    static const size_t FullArenaOffsets = ArenaSize | ((ArenaSize - 1) << 16);
 
     static FreeSpan decodeOffsets(uintptr_t arenaAddr, size_t offsets) {
         JS_ASSERT(!(arenaAddr & ArenaMask));
 
         size_t firstOffset = offsets & 0xFFFF;
         size_t lastOffset = offsets >> 16;
         JS_ASSERT(firstOffset <= ArenaSize);
         JS_ASSERT(lastOffset < ArenaSize);
@@ -282,16 +255,47 @@ struct FreeSpan {
             *this = *reinterpret_cast<FreeSpan *>(thing);
         } else {
             return NULL;
         }
         checkSpan();
         return reinterpret_cast<void *>(thing);
     }
 
+    /* A version of allocate when we know that the span is not empty. */
+    JS_ALWAYS_INLINE void *infallibleAllocate(size_t thingSize) {
+        JS_ASSERT(thingSize % Cell::CellSize == 0);
+        checkSpan();
+        uintptr_t thing = first;
+        if (thing < last) {
+            first = thing + thingSize;
+        } else {
+            JS_ASSERT(thing == last);
+            *this = *reinterpret_cast<FreeSpan *>(thing);
+        }
+        checkSpan();
+        return reinterpret_cast<void *>(thing);
+    }
+
+    /*
+     * Allocate from a newly allocated arena. We do not move the free list
+     * from the arena. Rather we set the arena up as fully used during the
+     * initialization so to allocate we simply return the first thing in the
+     * arena and set the free list to point to the second.
+     */
+    JS_ALWAYS_INLINE void *allocateFromNewArena(uintptr_t arenaAddr, size_t firstThingOffset,
+                                                size_t thingSize) {
+        JS_ASSERT(!(arenaAddr & ArenaMask));
+        uintptr_t thing = arenaAddr | firstThingOffset;
+        first = thing + thingSize;
+        last = arenaAddr | ArenaMask;
+        checkSpan();
+        return reinterpret_cast<void *>(thing);
+    }
+
     void checkSpan() const {
 #ifdef DEBUG
         /* We do not allow spans at the end of the address space. */
         JS_ASSERT(last != uintptr_t(-1));
         JS_ASSERT(first);
         JS_ASSERT(last);
         JS_ASSERT(first - 1 <= last);
         uintptr_t arenaAddr = arenaAddressUnchecked();
@@ -360,79 +364,80 @@ struct ArenaHeader {
     /*
      * The first span of free things in the arena. We encode it as the start
      * and end offsets within the arena, not as FreeSpan structure, to
      * minimize the header size.
      */
     size_t          firstFreeSpanOffsets;
 
     /*
-     * One of FinalizeKind constants or FINALIZE_LIMIT when the arena does not
+     * One of AllocKind constants or FINALIZE_LIMIT when the arena does not
      * contain any GC things and is on the list of empty arenas in the GC
-     * chunk. The later allows to quickly check if the arena is allocated
+     * chunk. The latter allows to quickly check if the arena is allocated
      * during the conservative GC scanning without searching the arena in the
      * list.
      */
-    unsigned        thingKind;
+    unsigned        allocKind;
 
     friend struct FreeLists;
 
   public:
     inline uintptr_t address() const;
     inline Chunk *chunk() const;
 
     void setAsNotAllocated() {
-        thingKind = FINALIZE_LIMIT;
+        allocKind = FINALIZE_LIMIT;
     }
 
     bool allocated() const {
-        return thingKind < FINALIZE_LIMIT;
+        JS_ASSERT(allocKind <= FINALIZE_LIMIT);
+        return allocKind < FINALIZE_LIMIT;
     }
 
-    inline void init(JSCompartment *comp, unsigned thingKind, size_t thingSize);
+    inline void init(JSCompartment *comp, AllocKind kind);
 
     uintptr_t arenaAddress() const {
         return address();
     }
 
     Arena *getArena() {
         return reinterpret_cast<Arena *>(arenaAddress());
     }
 
-    unsigned getThingKind() const {
+    AllocKind getAllocKind() const {
         JS_ASSERT(allocated());
-        return thingKind;
+        return AllocKind(allocKind);
     }
 
+    inline size_t getThingSize() const;
+
     bool hasFreeThings() const {
-        return firstFreeSpanOffsets != FreeSpan::EmptyOffsets;
+        return firstFreeSpanOffsets != FreeSpan::FullArenaOffsets;
     }
 
+    inline bool isEmpty() const;
+
     void setAsFullyUsed() {
-        firstFreeSpanOffsets = FreeSpan::EmptyOffsets;
+        firstFreeSpanOffsets = FreeSpan::FullArenaOffsets;
     }
 
     FreeSpan getFirstFreeSpan() const {
 #ifdef DEBUG
         checkSynchronizedWithFreeList();
 #endif
         return FreeSpan::decodeOffsets(arenaAddress(), firstFreeSpanOffsets);
     }
 
     void setFirstFreeSpan(const FreeSpan *span) {
         JS_ASSERT(span->isWithinArena(arenaAddress()));
         firstFreeSpanOffsets = span->encodeAsOffsets();
     }
 
     inline MarkingDelay *getMarkingDelay() const;
 
-    size_t getThingSize() const {
-        return GCThingSizeMap[getThingKind()];
-    }
-
 #ifdef DEBUG
     void checkSynchronizedWithFreeList() const;
 #endif
 };
 
 struct Arena {
     /*
      * Layout of an arena:
@@ -441,65 +446,69 @@ struct Arena {
      * filled with the array of T things. The pad bytes ensure that the thing
      * array ends exactly at the end of the arena.
      *
      * +-------------+-----+----+----+-----+----+
      * | ArenaHeader | pad | T0 | T1 | ... | Tn |
      * +-------------+-----+----+----+-----+----+
      *
      * <----------------------------------------> = ArenaSize bytes
-     * <-------------------> = thingsStartOffset
+     * <-------------------> = first thing offset
      */
     ArenaHeader aheader;
     uint8_t     data[ArenaSize - sizeof(ArenaHeader)];
 
-    static void staticAsserts() {
-        JS_STATIC_ASSERT(sizeof(Arena) == ArenaSize);
+  private:
+    static JS_FRIEND_DATA(const uint32) ThingSizes[];
+    static JS_FRIEND_DATA(const uint32) FirstThingOffsets[];
+
+  public:
+    static void staticAsserts();
+
+    static size_t thingSize(AllocKind kind) {
+        return ThingSizes[kind];
+    }
+
+    static size_t firstThingOffset(AllocKind kind) {
+        return FirstThingOffsets[kind];
     }
 
     static size_t thingsPerArena(size_t thingSize) {
         JS_ASSERT(thingSize % Cell::CellSize == 0);
 
         /* We should be able to fit FreeSpan in any GC thing. */
         JS_ASSERT(thingSize >= sizeof(FreeSpan));
 
-        /* GCThingSizeMap assumes that any thing fits uint8. */
-        JS_ASSERT(thingSize < 256);
-
         return (ArenaSize - sizeof(ArenaHeader)) / thingSize;
     }
 
     static size_t thingsSpan(size_t thingSize) {
         return thingsPerArena(thingSize) * thingSize;
     }
 
-    static size_t thingsStartOffset(size_t thingSize) {
-        return ArenaSize - thingsSpan(thingSize);
-    }
-
     static bool isAligned(uintptr_t thing, size_t thingSize) {
         /* Things ends at the arena end. */
         uintptr_t tailOffset = (ArenaSize - thing) & ArenaMask;
         return tailOffset % thingSize == 0;
     }
 
     uintptr_t address() const {
         return aheader.address();
     }
 
-    uintptr_t thingsStart(size_t thingSize) {
-        return address() | thingsStartOffset(thingSize);
+    uintptr_t thingsStart(AllocKind thingKind) {
+        return address() | firstThingOffset(thingKind);
     }
 
     uintptr_t thingsEnd() {
         return address() + ArenaSize;
     }
 
     template <typename T>
-    bool finalize(JSContext *cx);
+    bool finalize(JSContext *cx, AllocKind thingKind, size_t thingSize);
 };
 
 /*
  * When recursive marking uses too much stack the marking is delayed and
  * the corresponding arenas are put into a stack using a linked via the
  * following per arena structure.
  */
 struct MarkingDelay {
@@ -636,18 +645,17 @@ struct Chunk {
 
     bool hasAvailableArenas() const {
         return info.numFree > 0;
     }
 
     inline void addToAvailableList(JSCompartment *compartment);
     inline void removeFromAvailableList();
 
-    template <size_t thingSize>
-    ArenaHeader *allocateArena(JSContext *cx, unsigned thingKind);
+    ArenaHeader *allocateArena(JSContext *cx, AllocKind kind);
 
     void releaseArena(ArenaHeader *aheader);
 };
 
 JS_STATIC_ASSERT(sizeof(Chunk) <= GC_CHUNK_SIZE);
 JS_STATIC_ASSERT(sizeof(Chunk) + BytesPerArena > GC_CHUNK_SIZE);
 
 inline uintptr_t
@@ -671,32 +679,40 @@ Chunk *
 Cell::chunk() const
 {
     uintptr_t addr = uintptr_t(this);
     JS_ASSERT(addr % Cell::CellSize == 0);
     addr &= ~(GC_CHUNK_SIZE - 1);
     return reinterpret_cast<Chunk *>(addr);
 }
 
+AllocKind
+Cell::getAllocKind() const
+{
+    return arenaHeader()->getAllocKind();
+}
+
 #ifdef DEBUG
 inline bool
 Cell::isAligned() const
 {
     return Arena::isAligned(address(), arenaHeader()->getThingSize());
 }
 #endif
 
 inline void
-ArenaHeader::init(JSCompartment *comp, unsigned kind, size_t thingSize)
+ArenaHeader::init(JSCompartment *comp, AllocKind kind)
 {
     JS_ASSERT(!allocated());
     JS_ASSERT(!getMarkingDelay()->link);
     compartment = comp;
-    thingKind = kind;
-    firstFreeSpanOffsets = FreeSpan::encodeOffsets(Arena::thingsStartOffset(thingSize));
+    allocKind = kind;
+
+    /* See comments in FreeSpan::allocateFromNewArena. */
+    firstFreeSpanOffsets = FreeSpan::FullArenaOffsets;
 }
 
 inline uintptr_t
 ArenaHeader::address() const
 {
     uintptr_t addr = reinterpret_cast<uintptr_t>(this);
     JS_ASSERT(!(addr & ArenaMask));
     JS_ASSERT(Chunk::withinArenasRange(addr));
@@ -704,16 +720,32 @@ ArenaHeader::address() const
 }
 
 inline Chunk *
 ArenaHeader::chunk() const
 {
     return Chunk::fromAddress(address());
 }
 
+inline bool
+ArenaHeader::isEmpty() const
+{
+    /* Arena is empty if its first span covers the whole arena. */
+    JS_ASSERT(allocated());
+    size_t firstThingOffset = Arena::firstThingOffset(getAllocKind());
+    return firstFreeSpanOffsets == FreeSpan::encodeOffsets(firstThingOffset, ArenaMask);
+}
+
+inline size_t
+ArenaHeader::getThingSize() const
+{
+    JS_ASSERT(allocated());
+    return Arena::thingSize(getAllocKind());
+}
+
 JS_ALWAYS_INLINE void
 ChunkBitmap::getMarkWordAndMask(const Cell *cell, uint32 color,
                                 uintptr_t **wordp, uintptr_t *maskp)
 {
     JS_ASSERT(cell->chunk() == Chunk::fromAddress(reinterpret_cast<uintptr_t>(this)));
     size_t bit = (cell->address() & GC_CHUNK_MASK) / Cell::CellSize + color;
     JS_ASSERT(bit < ArenaBitmapBits * ArenasPerChunk);
     *maskp = uintptr_t(1) << (bit % JS_BITS_PER_WORD);
@@ -774,17 +806,17 @@ const size_t GC_ALLOCATION_THRESHOLD = 3
  * starting after the lower limit of GC_ALLOCATION_THRESHOLD.
  */
 const float GC_HEAP_GROWTH_FACTOR = 3.0f;
 
 /* Perform a Full GC every 20 seconds if MaybeGC is called */
 static const int64 GC_IDLE_FULL_SPAN = 20 * 1000 * 1000;
 
 static inline JSGCTraceKind
-GetFinalizableTraceKind(size_t thingKind)
+MapAllocToTraceKind(AllocKind thingKind)
 {
     static const JSGCTraceKind map[FINALIZE_LIMIT] = {
         JSTRACE_OBJECT,     /* FINALIZE_OBJECT0 */
         JSTRACE_OBJECT,     /* FINALIZE_OBJECT0_BACKGROUND */
         JSTRACE_OBJECT,     /* FINALIZE_OBJECT2 */
         JSTRACE_OBJECT,     /* FINALIZE_OBJECT2_BACKGROUND */
         JSTRACE_OBJECT,     /* FINALIZE_OBJECT4 */
         JSTRACE_OBJECT,     /* FINALIZE_OBJECT4_BACKGROUND */
@@ -800,35 +832,68 @@ GetFinalizableTraceKind(size_t thingKind
         JSTRACE_TYPE_OBJECT,/* FINALIZE_TYPE_OBJECT */
 #if JS_HAS_XML_SUPPORT      /* FINALIZE_XML */
         JSTRACE_XML,
 #endif
         JSTRACE_STRING,     /* FINALIZE_SHORT_STRING */
         JSTRACE_STRING,     /* FINALIZE_STRING */
         JSTRACE_STRING,     /* FINALIZE_EXTERNAL_STRING */
     };
-
-    JS_ASSERT(thingKind < FINALIZE_LIMIT);
     return map[thingKind];
 }
 
 inline JSGCTraceKind
 GetGCThingTraceKind(const void *thing);
 
 static inline JSRuntime *
 GetGCThingRuntime(void *thing)
 {
     return reinterpret_cast<Cell *>(thing)->chunk()->info.runtime;
 }
 
-/* The arenas in a list have uniform kind. */
-class ArenaList {
+struct ArenaLists {
+
+    /*
+     * ArenaList::head points to the start of the list. Normally cursor points
+     * to the first arena in the list with some free things and all arenas
+     * before cursor are fully allocated. However, as the arena currently being
+     * allocated from is considered full while its list of free spans is moved
+     * into the freeList, during the GC or cell enumeration, when an
+     * unallocated freeList is moved back to the arena, we can see an arena
+     * with some free cells before the cursor. The cursor is an indirect
+     * pointer to allow for efficient list insertion at the cursor point and
+     * other list manipulations.
+     */
+    struct ArenaList {
+        ArenaHeader     *head;
+        ArenaHeader     **cursor;
+
+        ArenaList() {
+            clear();
+        }
+
+        void clear() {
+            head = NULL;
+            cursor = &head;
+        }
+    };
+
   private:
-    ArenaHeader     *head;      /* list start */
-    ArenaHeader     **cursor;   /* arena with free things */
+    /*
+     * For each arena kind its free list is represented as the first span with
+     * free things. Initially all the spans are initialized as empty. After we
+     * find a new arena with available things we move its first free span into
+     * the list and set the arena as fully allocated. way we do not need to
+     * update the arena header after the initial allocation. When starting the
+     * GC we only move the head of the of the list of spans back to the arena
+     * only for the arena that was not fully allocated.
+     */
+    FreeSpan       freeLists[FINALIZE_LIMIT];
+
+    ArenaList      arenaLists[FINALIZE_LIMIT];
 
 #ifdef JS_THREADSAFE
     /*
      * The background finalization adds the finalized arenas to the list at
      * the *cursor position. backgroundFinalizeState controls the interaction
      * between the GC lock and the access to the list from the allocation
      * thread.
      *
@@ -843,212 +908,200 @@ class ArenaList {
      * the allocation thread see all the writes done during finalization.
      */
     enum BackgroundFinalizeState {
         BFS_DONE,
         BFS_RUN,
         BFS_JUST_FINISHED
     };
 
-    volatile BackgroundFinalizeState backgroundFinalizeState;
+    volatile uintptr_t backgroundFinalizeState[FINALIZE_LIMIT];
 #endif
 
   public:
-    void init() {
-        head = NULL;
-        cursor = &head;
+    ArenaLists() {
+        for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
+            freeLists[i].initAsEmpty();
 #ifdef JS_THREADSAFE
-        backgroundFinalizeState = BFS_DONE;
+        for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
+            backgroundFinalizeState[i] = BFS_DONE;
 #endif
     }
 
-    ArenaHeader *getHead() { return head; }
-
-    inline ArenaHeader *searchForFreeArena();
-
-    template <size_t thingSize>
-    inline ArenaHeader *getArenaWithFreeList(JSContext *cx, unsigned thingKind);
+    ~ArenaLists() {
+        for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
+#ifdef JS_THREADSAFE
+            /*
+             * We can only call this during the shutdown after the last GC when
+             * the background finalization is disabled.
+             */
+            JS_ASSERT(backgroundFinalizeState[i] == BFS_DONE);
+#endif
+            ArenaHeader **headp = &arenaLists[i].head;
+            while (ArenaHeader *aheader = *headp) {
+                *headp = aheader->next;
+                aheader->chunk()->releaseArena(aheader);
+            }
+        }
+    }
 
-    template<typename T>
-    void finalizeNow(JSContext *cx);
+    const FreeSpan *getFreeList(AllocKind thingKind) const {
+        return &freeLists[thingKind];
+    }
 
-#ifdef JS_THREADSAFE
-    template<typename T>
-    inline void finalizeLater(JSContext *cx);
-
-    static void backgroundFinalize(JSContext *cx, ArenaHeader *listHead);
-
-    bool willBeFinalizedLater() const {
-        return backgroundFinalizeState == BFS_RUN;
+    ArenaHeader *getFirstArena(AllocKind thingKind) const {
+        return arenaLists[thingKind].head;
     }
 
-    bool doneBackgroundFinalize() const {
-        return backgroundFinalizeState == BFS_DONE;
+    bool arenaListsAreEmpty() const {
+        for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
+#ifdef JS_THREADSAFE
+            /*
+             * The arena cannot be empty if the background finalization is not yet
+             * done.
+             */
+            if (backgroundFinalizeState[i] != BFS_DONE)
+                return false;
+#endif
+            if (arenaLists[i].head)
+                return false;
+        }
+        return true;
+    }
+
+#ifdef DEBUG
+    bool checkArenaListAllUnmarked() const {
+        for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
+# ifdef JS_THREADSAFE
+            /* The background finalization must have stopped at this point. */
+            JS_ASSERT(backgroundFinalizeState[i] == BFS_DONE ||
+                      backgroundFinalizeState[i] == BFS_JUST_FINISHED);
+# endif
+            for (ArenaHeader *aheader = arenaLists[i].head; aheader; aheader = aheader->next) {
+                if (!aheader->chunk()->bitmap.noBitsSet(aheader))
+                    return false;
+            }
+        }
+        return true;
     }
 #endif
 
-#ifdef DEBUG
-    bool markedThingsInArenaList() {
-# ifdef JS_THREADSAFE
-        /* The background finalization must have stopped at this point. */
-        JS_ASSERT(backgroundFinalizeState == BFS_DONE ||
-                  backgroundFinalizeState == BFS_JUST_FINISHED);
-# endif
-        for (ArenaHeader *aheader = head; aheader; aheader = aheader->next) {
-            if (!aheader->chunk()->bitmap.noBitsSet(aheader))
-                return true;
-        }
-        return false;
-    }
-#endif /* DEBUG */
-
-    void releaseAll(unsigned thingKind) {
-# ifdef JS_THREADSAFE
-        /*
-         * We can only call this during the shutdown after the last GC when
-         * the background finalization is disabled.
-         */
-        JS_ASSERT(backgroundFinalizeState == BFS_DONE);
-# endif
-        while (ArenaHeader *aheader = head) {
-            head = aheader->next;
-            aheader->chunk()->releaseArena(aheader);
-        }
-        cursor = &head;
+#ifdef JS_THREADSAFE
+    bool doneBackgroundFinalize(AllocKind kind) const {
+        return backgroundFinalizeState[kind] == BFS_DONE;
     }
-
-    bool isEmpty() const {
-#ifdef JS_THREADSAFE
-        /*
-         * The arena cannot be empty if the background finalization is not yet
-         * done.
-         */
-        if (backgroundFinalizeState != BFS_DONE)
-            return false;
 #endif
-        return !head;
-    }
-};
-
-struct FreeLists {
-    /*
-     * For each arena kind its free list is represented as the first span with
-     * free things. Initially all the spans are zeroed to be treated as empty
-     * spans by the allocation code. After we find a new arena with available
-     * things we copy its first free span into the list and set the arena as
-     * if it has no free things. This way we do not need to update the arena
-     * header after the initial allocation. When starting the GC We only move
-     * the head of the of the list of spans back to the arena only for the
-     * arena that was not fully allocated.
-     */
-    FreeSpan       lists[FINALIZE_LIMIT];
-
-    void init() {
-        for (size_t i = 0; i != JS_ARRAY_LENGTH(lists); ++i)
-            lists[i].initAsEmpty();
-    }
 
     /*
      * Return the free list back to the arena so the GC finalization will not
      * run the finalizers over unitialized bytes from free things.
      */
     void purge() {
-        for (size_t i = 0; i != size_t(FINALIZE_LIMIT); ++i) {
-            FreeSpan *list = &lists[i];
-            if (!list->isEmpty()) {
-                ArenaHeader *aheader = list->arenaHeader();
+        for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
+            FreeSpan *headSpan = &freeLists[i];
+            if (!headSpan->isEmpty()) {
+                ArenaHeader *aheader = headSpan->arenaHeader();
                 JS_ASSERT(!aheader->hasFreeThings());
-                aheader->setFirstFreeSpan(list);
-                list->initAsEmpty();
+                aheader->setFirstFreeSpan(headSpan);
+                headSpan->initAsEmpty();
             }
         }
     }
 
     /*
      * Temporarily copy the free list heads to the arenas so the code can see
      * the proper value in ArenaHeader::freeList when accessing the latter
      * outside the GC.
      */
-    void copyToArenas() {
-        for (size_t i = 0; i != size_t(FINALIZE_LIMIT); ++i)
-            copyToArena(FinalizeKind(i));
+    void copyFreeListsToArenas() {
+        for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
+            copyFreeListToArena(AllocKind(i));
     }
 
-    void copyToArena(FinalizeKind thingKind) {
-        FreeSpan *list = &lists[thingKind];
-        if (!list->isEmpty()) {
-            ArenaHeader *aheader = list->arenaHeader();
+    void copyFreeListToArena(AllocKind thingKind) {
+        FreeSpan *headSpan = &freeLists[thingKind];
+        if (!headSpan->isEmpty()) {
+            ArenaHeader *aheader = headSpan->arenaHeader();
             JS_ASSERT(!aheader->hasFreeThings());
-            aheader->setFirstFreeSpan(list);
+            aheader->setFirstFreeSpan(headSpan);
         }
     }
 
     /*
      * Clear the free lists in arenas that were temporarily set there using
      * copyToArenas.
      */
-    void clearInArenas() {
-        for (size_t i = 0; i != size_t(FINALIZE_LIMIT); ++i) 
-            clearInArena(FinalizeKind(i));
+    void clearFreeListsInArenas() {
+        for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
+            clearFreeListInArena(AllocKind(i));
     }
 
 
-    void clearInArena(FinalizeKind thingKind) {
-        FreeSpan *list = &lists[thingKind];
-        if (!list->isEmpty()) {
-            ArenaHeader *aheader = list->arenaHeader();
-            JS_ASSERT(aheader->getFirstFreeSpan().isSameNonEmptySpan(list));
+    void clearFreeListInArena(AllocKind kind) {
+        FreeSpan *headSpan = &freeLists[kind];
+        if (!headSpan->isEmpty()) {
+            ArenaHeader *aheader = headSpan->arenaHeader();
+            JS_ASSERT(aheader->getFirstFreeSpan().isSameNonEmptySpan(headSpan));
             aheader->setAsFullyUsed();
         }
     }
 
     /*
      * Check that the free list is either empty or were synchronized with the
      * arena using copyToArena().
      */
-    bool isSynchronizedWithArena(FinalizeKind thingKind) {
-        FreeSpan *list = &lists[thingKind];
-        if (list->isEmpty())
+    bool isSynchronizedFreeList(AllocKind kind) {
+        FreeSpan *headSpan = &freeLists[kind];
+        if (headSpan->isEmpty())
             return true;
-        ArenaHeader *aheader = list->arenaHeader();
+        ArenaHeader *aheader = headSpan->arenaHeader();
         if (aheader->hasFreeThings()) {
             /*
              * If the arena has a free list, it must be the same as one in
              * lists.
-             */ 
-            JS_ASSERT(aheader->getFirstFreeSpan().isSameNonEmptySpan(list));
+             */
+            JS_ASSERT(aheader->getFirstFreeSpan().isSameNonEmptySpan(headSpan));
             return true;
         }
         return false;
     }
 
-    JS_ALWAYS_INLINE void *getNext(unsigned thingKind, size_t thingSize) {
-        return lists[thingKind].allocate(thingSize);
+    JS_ALWAYS_INLINE void *allocateFromFreeList(AllocKind thingKind, size_t thingSize) {
+        return freeLists[thingKind].allocate(thingSize);
+    }
+
+    static void *refillFreeList(JSContext *cx, AllocKind thingKind);
+
+    void checkEmptyFreeLists() {
+#ifdef DEBUG
+        for (size_t i = 0; i != JS_ARRAY_LENGTH(freeLists); ++i)
+            JS_ASSERT(freeLists[i].isEmpty());
+#endif
     }
 
-    void *populate(ArenaHeader *aheader, unsigned thingKind, size_t thingSize) {
-        FreeSpan *list = &lists[thingKind];
-        *list = aheader->getFirstFreeSpan();
-        aheader->setAsFullyUsed();
-        void *t = list->allocate(thingSize);
-        JS_ASSERT(t);
-        return t;
+    void checkEmptyFreeList(AllocKind kind) {
+        JS_ASSERT(freeLists[kind].isEmpty());
     }
 
-    void checkEmpty() {
-#ifdef DEBUG
-        for (size_t i = 0; i != JS_ARRAY_LENGTH(lists); ++i)
-            JS_ASSERT(lists[i].isEmpty());
+    void finalizeObjects(JSContext *cx);
+    void finalizeStrings(JSContext *cx);
+    void finalizeShapes(JSContext *cx);
+    void finalizeScripts(JSContext *cx);
+
+#ifdef JS_THREADSAFE
+    static void backgroundFinalize(JSContext *cx, ArenaHeader *listHead);
+
+  private:
+    inline void finalizeNow(JSContext *cx, AllocKind thingKind);
+    inline void finalizeLater(JSContext *cx, AllocKind thingKind);
+
+    inline void *allocateFromArena(JSContext *cx, AllocKind thingKind);
 #endif
-    }
 };
 
-extern void *
-RefillFinalizableFreeList(JSContext *cx, unsigned thingKind);
-
 /*
  * Initial allocation size for data structures holding chunks is set to hold
  * chunks with total capacity of 16MB to avoid buffer resizes during browser
  * startup.
  */
 const size_t INITIAL_CHUNK_CAPACITY = 16 * 1024 * 1024 / GC_CHUNK_SIZE;
 
 /* The number of GC cycles an empty chunk can survive before been released. */
@@ -1249,17 +1302,17 @@ class GCHelperThread {
     JSGCInvocationKind lastGCKind;
 
     Vector<void **, 16, js::SystemAllocPolicy> freeVector;
     void            **freeCursor;
     void            **freeCursorEnd;
 
     Vector<js::gc::ArenaHeader *, 64, js::SystemAllocPolicy> finalizeVector;
 
-    friend class js::gc::ArenaList;
+    friend struct js::gc::ArenaLists;
 
     JS_FRIEND_API(void)
     replenishAndFreeLater(void *ptr);
 
     static void freeElementsAndArray(void **array, void **end) {
         JS_ASSERT(array <= end);
         for (void **p = array; p != end; ++p)
             js::Foreground::free_(*p);
@@ -1515,17 +1568,17 @@ IterateCompartmentsArenasCells(JSContext
                                IterateArenaCallback arenaCallback,
                                IterateCellCallback cellCallback);
 
 /*
  * Invoke cellCallback on every in-use object of the specified thing kind for
  * the given compartment or for all compartments if it is null.
  */
 extern JS_FRIEND_API(void)
-IterateCells(JSContext *cx, JSCompartment *compartment, gc::FinalizeKind thingKind,
+IterateCells(JSContext *cx, JSCompartment *compartment, gc::AllocKind thingKind,
              void *data, IterateCellCallback cellCallback);
 
 } /* namespace js */
 
 extern void
 js_FinalizeStringRT(JSRuntime *rt, JSString *str);
 
 /*
--- a/js/src/jsgcinlines.h
+++ b/js/src/jsgcinlines.h
@@ -114,72 +114,71 @@ namespace gc {
 
 inline JSGCTraceKind
 GetGCThingTraceKind(const void *thing)
 {
     JS_ASSERT(thing);
     if (JSAtom::isStatic(thing))
         return JSTRACE_STRING;
     const Cell *cell = reinterpret_cast<const Cell *>(thing);
-    return GetFinalizableTraceKind(cell->arenaHeader()->getThingKind());
+    return MapAllocToTraceKind(cell->getAllocKind());
 }
 
 /* Capacity for slotsToThingKind */
 const size_t SLOTS_TO_THING_KIND_LIMIT = 17;
 
 /* Get the best kind to use when making an object with the given slot count. */
-static inline FinalizeKind
+static inline AllocKind
 GetGCObjectKind(size_t numSlots, bool isArray = false)
 {
-    extern FinalizeKind slotsToThingKind[];
+    extern AllocKind slotsToThingKind[];
 
     if (numSlots >= SLOTS_TO_THING_KIND_LIMIT) {
         /*
          * If the object will definitely want more than the maximum number of
          * fixed slots, use zero fixed slots for arrays and the maximum for
          * other objects. Arrays do not use their fixed slots anymore when
          * they have a slots array, while other objects will continue to do so.
          */
         return isArray ? FINALIZE_OBJECT0 : FINALIZE_OBJECT16;
     }
     return slotsToThingKind[numSlots];
 }
 
 static inline bool
-IsBackgroundFinalizeKind(FinalizeKind kind)
+IsBackgroundAllocKind(AllocKind kind)
 {
     JS_ASSERT(kind <= FINALIZE_OBJECT_LAST);
     return kind % 2 == 1;
 }
 
-static inline FinalizeKind
-GetBackgroundFinalizeKind(FinalizeKind kind)
+static inline AllocKind
+GetBackgroundAllocKind(AllocKind kind)
 {
-    JS_ASSERT(!IsBackgroundFinalizeKind(kind));
-    return (FinalizeKind) (kind + 1);
+    JS_ASSERT(!IsBackgroundAllocKind(kind));
+    return (AllocKind) (kind + 1);
 }
 
+/*
+ * Try to get the next larger size for an object, keeping BACKGROUND
+ * consistent.
+ */
 static inline bool
-CanBumpFinalizeKind(FinalizeKind kind)
+TryIncrementAllocKind(AllocKind *kindp)
 {
-    JS_ASSERT(kind <= FINALIZE_OBJECT_LAST);
-    return (kind + 2) <= FINALIZE_OBJECT_LAST;
-}
-
-/* Get the next larger size for an object, keeping BACKGROUND consistent. */
-static inline FinalizeKind
-BumpFinalizeKind(FinalizeKind kind)
-{
-    JS_ASSERT(CanBumpFinalizeKind(kind));
-    return (FinalizeKind) (kind + 2);
+    size_t next = size_t(*kindp) + 2;
+    if (next > size_t(FINALIZE_OBJECT_LAST))
+        return false;
+    *kindp = AllocKind(next);
+    return true;
 }
 
 /* Get the number of fixed slots and initial capacity associated with a kind. */
 static inline size_t
-GetGCKindSlots(FinalizeKind thingKind)
+GetGCKindSlots(AllocKind thingKind)
 {
     /* Using a switch in hopes that thingKind will usually be a compile-time constant. */
     switch (thingKind) {
       case FINALIZE_OBJECT0:
       case FINALIZE_OBJECT0_BACKGROUND:
         return 0;
       case FINALIZE_OBJECT2:
       case FINALIZE_OBJECT2_BACKGROUND:
@@ -224,59 +223,61 @@ GCPoke(JSContext *cx, Value oldval)
 }
 
 /*
  * Invoke ArenaOp and CellOp on every arena and cell in a compartment which
  * have the specified thing kind.
  */
 template <class ArenaOp, class CellOp>
 void
-ForEachArenaAndCell(JSCompartment *compartment, FinalizeKind thingKind,
+ForEachArenaAndCell(JSCompartment *compartment, AllocKind thingKind,
                     ArenaOp arenaOp, CellOp cellOp)
 {
-    size_t thingSize = GCThingSizeMap[thingKind];
-    ArenaHeader *aheader = compartment->arenas[thingKind].getHead();
+    size_t thingSize = Arena::thingSize(thingKind);
+    ArenaHeader *aheader = compartment->arenas.getFirstArena(thingKind);
 
     for (; aheader; aheader = aheader->next) {
         Arena *arena = aheader->getArena();
         arenaOp(arena);
         FreeSpan firstSpan(aheader->getFirstFreeSpan());
         const FreeSpan *span = &firstSpan;
 
-        for (uintptr_t thing = arena->thingsStart(thingSize); ; thing += thingSize) {
+        for (uintptr_t thing = arena->thingsStart(thingKind); ; thing += thingSize) {
             JS_ASSERT(thing <= arena->thingsEnd());
             if (thing == span->first) {
                 if (!span->hasNext())
                     break;
                 thing = span->last;
                 span = span->nextSpan();
             } else {
                 Cell *t = reinterpret_cast<Cell *>(thing);
                 cellOp(t);
             }
         }
     }
 }
 
 class CellIterImpl
 {
+    size_t firstThingOffset;
     size_t thingSize;
     ArenaHeader *aheader;
     FreeSpan firstSpan;
     const FreeSpan *span;
     uintptr_t thing;
     Cell *cell;
 
   protected:
     CellIterImpl() {
     }
 
-    void init(JSCompartment *comp, FinalizeKind thingKind) {
-        thingSize = GCThingSizeMap[thingKind];
-        aheader = comp->arenas[thingKind].getHead();
+    void init(JSCompartment *comp, AllocKind kind) {
+        firstThingOffset = Arena::firstThingOffset(kind);
+        thingSize = Arena::thingSize(kind);
+        aheader = comp->arenas.getFirstArena(kind);
         firstSpan.initAsEmpty();
         span = &firstSpan;
         thing = span->first;
         next();
     }
 
   public:
     bool done() const {
@@ -303,73 +304,73 @@ class CellIterImpl
                 break;
             }
             if (!aheader) {
                 cell = NULL;
                 return;
             }
             firstSpan = aheader->getFirstFreeSpan();
             span = &firstSpan;
-            thing = aheader->getArena()->thingsStart(thingSize);
+            thing = aheader->arenaAddress() | firstThingOffset;
             aheader = aheader->next;
         }
         cell = reinterpret_cast<Cell *>(thing);
         thing += thingSize;
     }
 };
 
 class CellIterUnderGC : public CellIterImpl {
 
   public:
-    CellIterUnderGC(JSCompartment *comp, FinalizeKind thingKind) {
+    CellIterUnderGC(JSCompartment *comp, AllocKind kind) {
         JS_ASSERT(comp->rt->gcRunning);
-        JS_ASSERT(comp->freeLists.lists[thingKind].isEmpty());
-        init(comp, thingKind);
+        comp->arenas.checkEmptyFreeList(kind);
+        init(comp, kind);
     }
 };
 
 /*
  * When using the iterator outside the GC the caller must ensure that no GC or
  * allocations of GC things are possible and that the background finalization
  * for the given thing kind is not enabled or is done.
  */
 class CellIter: public CellIterImpl
 {
-    FreeLists *lists;
-    FinalizeKind thingKind;
+    ArenaLists *lists;
+    AllocKind kind;
 #ifdef DEBUG
     size_t *counter;
 #endif
   public:
-    CellIter(JSContext *cx, JSCompartment *comp, FinalizeKind thingKind)
-      : lists(&comp->freeLists),
-        thingKind(thingKind) {
+    CellIter(JSContext *cx, JSCompartment *comp, AllocKind kind)
+      : lists(&comp->arenas),
+        kind(kind) {
 #ifdef JS_THREADSAFE
-        JS_ASSERT(comp->arenas[thingKind].doneBackgroundFinalize());
+        JS_ASSERT(comp->arenas.doneBackgroundFinalize(kind));
 #endif
-        if (lists->isSynchronizedWithArena(thingKind)) {
+        if (lists->isSynchronizedFreeList(kind)) {
             lists = NULL;
         } else {
             JS_ASSERT(!comp->rt->gcRunning);
-            lists->copyToArena(thingKind);
+            lists->copyFreeListToArena(kind);
         }
 #ifdef DEBUG
         counter = &JS_THREAD_DATA(cx)->noGCOrAllocationCheck;
         ++*counter;
 #endif
-        init(comp, thingKind);
+        init(comp, kind);
     }
 
     ~CellIter() {
 #ifdef DEBUG
         JS_ASSERT(*counter > 0);
         --*counter;
 #endif
         if (lists)
-            lists->clearInArena(thingKind);
+            lists->clearFreeListInArena(kind);
     }
 };
 
 /* Signatures for ArenaOp and CellOp above. */
 
 inline void EmptyArenaOp(Arena *arena) {}
 inline void EmptyCellOp(Cell *t) {}
 
@@ -380,42 +381,40 @@ inline void EmptyCellOp(Cell *t) {}
  * Allocates a new GC thing. After a successful allocation the caller must
  * fully initialize the thing before calling any function that can potentially
  * trigger GC. This will ensure that GC tracing never sees junk values stored
  * in the partially initialized thing.
  */
 
 template <typename T>
 inline T *
-NewGCThing(JSContext *cx, unsigned thingKind, size_t thingSize)
+NewGCThing(JSContext *cx, js::gc::AllocKind kind, size_t thingSize)
 {
-    JS_ASSERT(thingKind < js::gc::FINALIZE_LIMIT);
-    JS_ASSERT(thingSize == js::gc::GCThingSizeMap[thingKind]);
+    JS_ASSERT(thingSize == js::gc::Arena::thingSize(kind));
 #ifdef JS_THREADSAFE
     JS_ASSERT_IF((cx->compartment == cx->runtime->atomsCompartment),
-                 (thingKind == js::gc::FINALIZE_STRING) ||
-                 (thingKind == js::gc::FINALIZE_SHORT_STRING));
+                 kind == js::gc::FINALIZE_STRING || kind == js::gc::FINALIZE_SHORT_STRING);
 #endif
     JS_ASSERT(!cx->runtime->gcRunning);
     JS_ASSERT(!JS_THREAD_DATA(cx)->noGCOrAllocationCheck);
 
 #ifdef JS_GC_ZEAL
     if (cx->runtime->needZealousGC())
         js::gc::RunDebugGC(cx);
 #endif
 
-    void *t = cx->compartment->freeLists.getNext(thingKind, thingSize);
-    return static_cast<T *>(t ? t : js::gc::RefillFinalizableFreeList(cx, thingKind));
+    void *t = cx->compartment->arenas.allocateFromFreeList(kind, thingSize);
+    return static_cast<T *>(t ? t : js::gc::ArenaLists::refillFreeList(cx, kind));
 }
 
 inline JSObject *
-js_NewGCObject(JSContext *cx, js::gc::FinalizeKind kind)
+js_NewGCObject(JSContext *cx, js::gc::AllocKind kind)
 {
     JS_ASSERT(kind >= js::gc::FINALIZE_OBJECT0 && kind <= js::gc::FINALIZE_OBJECT_LAST);
-    JSObject *obj = NewGCThing<JSObject>(cx, kind, js::gc::GCThingSizeMap[kind]);
+    JSObject *obj = NewGCThing<JSObject>(cx, kind, js::gc::Arena::thingSize(kind));
     if (obj)
         obj->earlyInit(js::gc::GetGCKindSlots(kind));
     return obj;
 }
 
 inline JSString *
 js_NewGCString(JSContext *cx)
 {
--- a/js/src/jsgcstats.cpp
+++ b/js/src/jsgcstats.cpp
@@ -66,17 +66,16 @@ ConservativeGCStats::dump(FILE *fp)
    
 #define ULSTAT(x)       ((unsigned long)(x))
     fprintf(fp, "CONSERVATIVE STACK SCANNING:\n");
     fprintf(fp, "      number of stack words: %lu\n", ULSTAT(words));
     fprintf(fp, "      excluded, low bit set: %lu\n", ULSTAT(counter[CGCT_LOWBITSET]));
     fprintf(fp, "        not withing a chunk: %lu\n", ULSTAT(counter[CGCT_NOTCHUNK]));
     fprintf(fp, "     not within arena range: %lu\n", ULSTAT(counter[CGCT_NOTARENA]));
     fprintf(fp, "       points to free arena: %lu\n", ULSTAT(counter[CGCT_FREEARENA]));
-    fprintf(fp, "        excluded, wrong tag: %lu\n", ULSTAT(counter[CGCT_WRONGTAG]));
     fprintf(fp, "         excluded, not live: %lu\n", ULSTAT(counter[CGCT_NOTLIVE]));
     fprintf(fp, "            valid GC things: %lu\n", ULSTAT(counter[CGCT_VALID]));
     fprintf(fp, "      valid but not aligned: %lu\n", ULSTAT(unaligned));
 #undef ULSTAT
 }
 #endif
 
 } //gc
@@ -199,17 +198,17 @@ GCMarker::dumpConservativeRoots()
         fclose(fp);
 }
 #endif /* JS_DUMP_CONSERVATIVE_GC_ROOTS */
 
 #if defined(MOZ_GCTIMER) || defined(JSGC_TESTPILOT)
 
 volatile GCTimer::JSGCReason gcReason = GCTimer::NOREASON;
 const char *gcReasons[] = {"  API", "Maybe", "LastC", "DestC", "Compa", "LastD",
-                          "Malloc", "Alloc", "Chunk", "Shape", "  None"};
+                           "Malloc", "Refill", "Chunk", "Shape", "  None"};
 
 jsrefcount newChunkCount = 0;
 jsrefcount destroyChunkCount = 0;
 
 #ifdef MOZ_GCTIMER
 static const char *gcTimerStatPath = NULL;
 #endif
 
--- a/js/src/jsgcstats.h
+++ b/js/src/jsgcstats.h
@@ -94,17 +94,16 @@ namespace gc {
  */
 enum ConservativeGCTest
 {
     CGCT_VALID,
     CGCT_LOWBITSET, /* excluded because one of the low bits was set */
     CGCT_NOTARENA,  /* not within arena range in a chunk */
     CGCT_NOTCHUNK,  /* not within a valid chunk */
     CGCT_FREEARENA, /* within arena containing only free things */
-    CGCT_WRONGTAG,  /* tagged pointer but wrong type */
     CGCT_NOTLIVE,   /* gcthing is not allocated */
     CGCT_END
 };
 
 struct ConservativeGCStats
 {
     uint32  counter[gc::CGCT_END];  /* ConservativeGCTest classification
                                        counters */
@@ -157,17 +156,17 @@ struct GCTimer
         PUBLIC_API,
         MAYBEGC,
         LASTCONTEXT,
         DESTROYCONTEXT,
         COMPARTMENT,
         LASTDITCH,
         TOOMUCHMALLOC,
         ALLOCTRIGGER,
-        CHUNK,
+        REFILL,
         SHAPE,
         NOREASON
     };
 };
 
 /* We accept the possiblility of races for this variable. */
 extern volatile GCTimer::JSGCReason gcReason;
 
--- a/js/src/jsinfer.cpp
+++ b/js/src/jsinfer.cpp
@@ -2037,19 +2037,19 @@ TypeCompartment::nukeTypes(JSContext *cx
     }
 
     /*
      * We may or may not be under the GC. In either case don't allocate, and
      * acquire the GC lock so we can update inferenceEnabled for all contexts.
      */
 
 #ifdef JS_THREADSAFE
-    Maybe<AutoLockGC> maybeLock;
+    AutoLockGC maybeLock;
     if (!cx->runtime->gcMarkAndSweep)
-        maybeLock.construct(cx->runtime);
+        maybeLock.lock(cx->runtime);
 #endif
 
     inferenceEnabled = false;
 
     /* Update the cached inferenceEnabled bit in all contexts. */
     for (JSCList *cl = cx->runtime->contextList.next;
          cl != &cx->runtime->contextList;
          cl = cl->next) {
@@ -4406,17 +4406,17 @@ CheckNewScriptProperties(JSContext *cx, 
      * the properties added to baseobj match the type's definite properties.
      */
     if (type->newScript) {
         if (!type->matchDefiniteProperties(baseobj))
             type->clearNewScript(cx);
         return;
     }
 
-    gc::FinalizeKind kind = gc::GetGCObjectKind(baseobj->slotSpan());
+    gc::AllocKind kind = gc::GetGCObjectKind(baseobj->slotSpan());
 
     /* We should not have overflowed the maximum number of fixed slots for an object. */
     JS_ASSERT(gc::GetGCKindSlots(kind) >= baseobj->slotSpan());
 
     TypeNewScript::Initializer done(TypeNewScript::Initializer::DONE, 0);
 
     /*
      * The base object may have been created with a different finalize kind
@@ -4436,17 +4436,17 @@ CheckNewScriptProperties(JSContext *cx, 
                     + (initializerList.length() * sizeof(TypeNewScript::Initializer));
     type->newScript = (TypeNewScript *) cx->calloc_(numBytes);
     if (!type->newScript) {
         cx->compartment->types.setPendingNukeTypes(cx);
         return;
     }
 
     type->newScript->script = script;
-    type->newScript->finalizeKind = unsigned(kind);
+    type->newScript->allocKind = kind;
     type->newScript->shape = baseobj->lastProperty();
 
     type->newScript->initializerList = (TypeNewScript::Initializer *)
         ((char *) type->newScript + sizeof(TypeNewScript));
     PodCopy(type->newScript->initializerList, initializerList.begin(), initializerList.length());
 }
 
 /////////////////////////////////////////////////////////////////////
--- a/js/src/jsinfer.h
+++ b/js/src/jsinfer.h
@@ -641,18 +641,18 @@ struct Property
  * appropriate type constraints are in place when necessary, and that we can
  * remove the definite property information and repair the JS stack if the
  * constraints are violated.
  */
 struct TypeNewScript
 {
     JSScript *script;
 
-    /* Finalize kind to use for newly constructed objects. */
-    /* gc::FinalizeKind */ unsigned finalizeKind;
+    /* Allocation kind to use for newly constructed objects. */
+    gc::AllocKind allocKind;
 
     /*
      * Shape to use for newly constructed objects. Reflects all definite
      * properties the object will have.
      */
     const Shape *shape;
 
     /*
@@ -801,18 +801,17 @@ struct TypeObject : gc::Cell
     /*
      * Return an immutable, shareable, empty shape with the same clasp as this
      * and the same slotSpan as this had when empty.
      *
      * If |this| is the scope of an object |proto|, the resulting scope can be
      * used as the scope of a new object whose prototype is |proto|.
      */
     inline bool canProvideEmptyShape(js::Class *clasp);
-    inline js::EmptyShape *getEmptyShape(JSContext *cx, js::Class *aclasp,
-                                         /* gc::FinalizeKind */ unsigned kind);
+    inline js::EmptyShape *getEmptyShape(JSContext *cx, js::Class *aclasp, gc::AllocKind kind);
 
     /*
      * Get or create a property of this object. Only call this for properties which
      * a script accesses explicitly. 'assign' indicates whether this is for an
      * assignment, and the own types of the property will be used instead of
      * aggregate types.
      */
     inline TypeSet *getProperty(JSContext *cx, jsid id, bool assign);
--- a/js/src/jsinterp.cpp
+++ b/js/src/jsinterp.cpp
@@ -5233,17 +5233,17 @@ BEGIN_CASE(JSOP_NEWINIT)
     jsint i = regs.pc[1];
 
     JS_ASSERT(i == JSProto_Array || i == JSProto_Object);
     JSObject *obj;
 
     if (i == JSProto_Array) {
         obj = NewDenseEmptyArray(cx);
     } else {
-        gc::FinalizeKind kind = GuessObjectGCKind(0, false);
+        gc::AllocKind kind = GuessObjectGCKind(0, false);
         obj = NewBuiltinClassInstance(cx, &js_ObjectClass, kind);
     }
 
     if (!obj)
         goto error;
 
     TypeObject *type = TypeScript::InitObject(cx, script, regs.pc, (JSProtoKey) i);
     if (!type)
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -2908,32 +2908,32 @@ js_Object(JSContext *cx, uintN argc, Val
     } else {
         /* If argv[0] is null or undefined, obj comes back null. */
         if (!js_ValueToObjectOrNull(cx, vp[2], &obj))
             return JS_FALSE;
     }
     if (!obj) {
         /* Make an object whether this was called with 'new' or not. */
         JS_ASSERT(!argc || vp[2].isNull() || vp[2].isUndefined());
-        gc::FinalizeKind kind = NewObjectGCKind(cx, &js_ObjectClass);
+        gc::AllocKind kind = NewObjectGCKind(cx, &js_ObjectClass);
         obj = NewBuiltinClassInstance(cx, &js_ObjectClass, kind);
         if (!obj)
             return JS_FALSE;
         TypeObject *type = GetTypeCallerInitObject(cx, JSProto_Object);
         if (!type)
             return JS_FALSE;
         obj->setType(type);
     }
     vp->setObject(*obj);
     return JS_TRUE;
 }
 
 JSObject *
 js::NewReshapedObject(JSContext *cx, TypeObject *type, JSObject *parent,
-                      gc::FinalizeKind kind, const Shape *shape)
+                      gc::AllocKind kind, const Shape *shape)
 {
     JSObject *res = NewObjectWithType(cx, type, parent, kind);
     if (!res)
         return NULL;
 
     if (JSID_IS_EMPTY(shape->propid))
         return res;
 
@@ -2974,56 +2974,56 @@ js_CreateThis(JSContext *cx, JSObject *c
     }
 
     Value protov;
     if (!callee->getProperty(cx, ATOM_TO_JSID(cx->runtime->atomState.classPrototypeAtom), &protov))
         return NULL;
 
     JSObject *proto = protov.isObjectOrNull() ? protov.toObjectOrNull() : NULL;
     JSObject *parent = callee->getParent();
-    gc::FinalizeKind kind = NewObjectGCKind(cx, newclasp);
+    gc::AllocKind kind = NewObjectGCKind(cx, newclasp);
     JSObject *obj = NewObject<WithProto::Class>(cx, newclasp, proto, parent, kind);
     if (obj)
         obj->syncSpecialEquality();
     return obj;
 }
 
 static inline JSObject *
 CreateThisForFunctionWithType(JSContext *cx, types::TypeObject *type, JSObject *parent)
 {
     if (type->newScript) {
         /*
          * Make an object with the type's associated finalize kind and shape,
          * which reflects any properties that will definitely be added to the
          * object before it is read from.
          */
-        gc::FinalizeKind kind = gc::FinalizeKind(type->newScript->finalizeKind);
+        gc::AllocKind kind = type->newScript->allocKind;
         JSObject *res = NewObjectWithType(cx, type, parent, kind);
         if (res)
             res->setMap((Shape *) type->newScript->shape);
         return res;
     }
 
-    gc::FinalizeKind kind = NewObjectGCKind(cx, &js_ObjectClass);
+    gc::AllocKind kind = NewObjectGCKind(cx, &js_ObjectClass);
     return NewObjectWithType(cx, type, parent, kind);
 }
 
 JSObject *
 js_CreateThisForFunctionWithProto(JSContext *cx, JSObject *callee, JSObject *proto)
 {
     JSScript *calleeScript = callee->getFunctionPrivate()->script();
     JSObject *res;
 
     if (proto) {
         types::TypeObject *type = proto->getNewType(cx, calleeScript);
         if (!type)
             return NULL;
         res = CreateThisForFunctionWithType(cx, type, callee->getParent());
     } else {
-        gc::FinalizeKind kind = NewObjectGCKind(cx, &js_ObjectClass);
+        gc::AllocKind kind = NewObjectGCKind(cx, &js_ObjectClass);
         res = NewNonFunction<WithProto::Class>(cx, &js_ObjectClass, proto, callee->getParent(), kind);
     }
 
     if (res && cx->typeInferenceEnabled())
         TypeScript::SetThis(cx, calleeScript, types::Type::ObjectType(res));
 
     return res;
 }
@@ -3072,17 +3072,17 @@ js_Object_tn(JSContext* cx, JSObject* pr
 JS_DEFINE_TRCINFO_1(js_Object,
     (2, (extern, CONSTRUCTOR_RETRY, js_Object_tn, CONTEXT, CALLEE_PROTOTYPE, 0,
          nanojit::ACCSET_STORE_ANY)))
 
 JSObject* FASTCALL
 js_InitializerObject(JSContext* cx, JSObject *proto, JSObject *baseobj)
 {
     if (!baseobj) {
-        gc::FinalizeKind kind = GuessObjectGCKind(0, false);
+        gc::AllocKind kind = GuessObjectGCKind(0, false);
         return NewObjectWithClassProto(cx, &js_ObjectClass, proto, kind);
     }
 
     /* :FIXME: bug 637856 new Objects do not have the right type when created on trace. */
     TypeObject *type = proto->getNewType(cx);
     if (!type)
         return NULL;
 
@@ -3124,17 +3124,17 @@ js_CreateThisFromTrace(JSContext *cx, JS
         /*
          * GetInterpretedFunctionPrototype found that ctor.prototype is
          * primitive. Use Object.prototype for proto, per ES5 13.2.2 step 7.
          */
         if (!js_GetClassPrototype(cx, parent, JSProto_Object, &proto))
             return NULL;
     }
 
-    gc::FinalizeKind kind = NewObjectGCKind(cx, &js_ObjectClass);
+    gc::AllocKind kind = NewObjectGCKind(cx, &js_ObjectClass);
     return NewNativeClassInstance(cx, &js_ObjectClass, proto, parent, kind);
 }
 JS_DEFINE_CALLINFO_3(extern, CONSTRUCTOR_RETRY, js_CreateThisFromTrace, CONTEXT, OBJECT, UINTN, 0,
                      nanojit::ACCSET_STORE_ANY)
 
 #else  /* !JS_TRACER */
 
 # define js_Object_trcinfo NULL
@@ -3400,17 +3400,17 @@ js_NewBlockObject(JSContext *cx)
 }
 
 JSObject *
 js_CloneBlockObject(JSContext *cx, JSObject *proto, StackFrame *fp)
 {
     JS_ASSERT(proto->isStaticBlock());
 
     size_t count = OBJ_BLOCK_COUNT(cx, proto);
-    gc::FinalizeKind kind = gc::GetGCObjectKind(count + 1);
+    gc::AllocKind kind = gc::GetGCObjectKind(count + 1);
 
     TypeObject *type = proto->getNewType(cx);
     if (!type)
         return NULL;
 
     JSObject *clone = js_NewGCObject(cx, kind);
     if (!clone)
         return NULL;
@@ -3610,19 +3610,17 @@ JSObject::clone(JSContext *cx, JSObject 
             if (!makeDenseArraySlow(cx))
                 return NULL;
         } else if (!isProxy()) {
             JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
                                  JSMSG_CANT_CLONE_OBJECT);
             return NULL;
         }
     }
-    JSObject *clone = NewObject<WithProto::Given>(cx, getClass(),
-                                                  proto, parent,
-                                                  gc::FinalizeKind(finalizeKind()));
+    JSObject *clone = NewObject<WithProto::Given>(cx, getClass(), proto, parent, getAllocKind());
     if (!clone)
         return NULL;
     if (isNative()) {
         if (clone->isFunction() && (compartment() != clone->compartment())) {
             JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
                                  JSMSG_CANT_CLONE_OBJECT);
             return NULL;
         }
@@ -4359,26 +4357,25 @@ JSObject::allocSlots(JSContext *cx, size
 
     /*
      * If we are allocating slots for an object whose type is always created
      * by calling 'new' on a particular script, bump the GC kind for that
      * type to give these objects a larger number of fixed slots when future
      * objects are constructed.
      */
     if (!hasLazyType() && type()->newScript) {
-        gc::FinalizeKind kind = gc::FinalizeKind(type()->newScript->finalizeKind);
+        gc::AllocKind kind = type()->newScript->allocKind;
         unsigned newScriptSlots = gc::GetGCKindSlots(kind);
-        if (newScriptSlots == numFixedSlots() && gc::CanBumpFinalizeKind(kind)) {
-            kind = gc::BumpFinalizeKind(kind);
+        if (newScriptSlots == numFixedSlots() && gc::TryIncrementAllocKind(&kind)) {
             JSObject *obj = NewReshapedObject(cx, type(), getParent(), kind,
                                               type()->newScript->shape);
             if (!obj)
                 return false;
 
-            type()->newScript->finalizeKind = kind;
+            type()->newScript->allocKind = kind;
             type()->newScript->shape = obj->lastProperty();
             type()->markStateChange(cx);
         }
     }
 
     if (newcap > NSLOTS_LIMIT) {
         if (!JS_ON_TRACE(cx))
             js_ReportAllocationOverflow(cx);
--- a/js/src/jsobj.h
+++ b/js/src/jsobj.h
@@ -659,18 +659,16 @@ struct JSObject : js::gc::Cell {
      * a doubly-linked list.
      */
     inline bool inDictionaryMode() const;
 
     inline uint32 propertyCount() const;
 
     inline bool hasPropertyTable() const;
 
-    /* gc::FinalizeKind */ unsigned finalizeKind() const;
-
     uint32 numSlots() const { return uint32(capacity); }
 
     inline size_t structSize() const;
     inline size_t slotsAndStructSize() const;
 
     /* Slot accessors for JITs. */
 
     static inline size_t getFixedSlotOffset(size_t slot);
@@ -1274,17 +1272,17 @@ struct JSObject : js::gc::Cell {
      * Like init, but also initializes map.  proto must have an empty shape
      * created for it via proto->getEmptyShape.
      */
     inline bool initSharingEmptyShape(JSContext *cx,
                                       js::Class *clasp,
                                       js::types::TypeObject *type,
                                       JSObject *parent,
                                       void *priv,
-                                      /* gc::FinalizeKind */ unsigned kind);
+                                      js::gc::AllocKind kind);
 
     inline bool hasProperty(JSContext *cx, jsid id, bool *foundp, uintN flags = 0);
 
     /*
      * Allocate and free an object slot. Note that freeSlot is infallible: it
      * returns true iff this is a dictionary-mode object and the freed slot was
      * added to the freelist.
      *
--- a/js/src/jsobjinlines.h
+++ b/js/src/jsobjinlines.h
@@ -397,22 +397,16 @@ JSObject::getPrimitiveThis() const
 
 inline void
 JSObject::setPrimitiveThis(const js::Value &pthis)
 {
     JS_ASSERT(isPrimitive());
     setFixedSlot(JSSLOT_PRIMITIVE_THIS, pthis);
 }
 
-inline /* gc::FinalizeKind */ unsigned
-JSObject::finalizeKind() const
-{
-    return js::gc::FinalizeKind(arenaHeader()->getThingKind());
-}
-
 inline bool
 JSObject::hasSlotsArray() const
 {
     JS_ASSERT_IF(!slots, !isDenseArray());
     JS_ASSERT_IF(slots == fixedSlots(), isDenseArray() || isArrayBuffer());
     return slots && slots != fixedSlots();
 }
 
@@ -959,17 +953,17 @@ JSObject::finish(JSContext *cx)
 }
 
 inline bool
 JSObject::initSharingEmptyShape(JSContext *cx,
                                 js::Class *aclasp,
                                 js::types::TypeObject *type,
                                 JSObject *parent,
                                 void *privateValue,
-                                /* js::gc::FinalizeKind */ unsigned kind)
+                                js::gc::AllocKind kind)
 {
     init(cx, aclasp, type, parent, privateValue, false);
 
     JS_ASSERT(!isDenseArray());
 
     js::EmptyShape *empty = type->getEmptyShape(cx, aclasp, kind);
     if (!empty)
         return false;
@@ -1240,17 +1234,17 @@ class AutoPropertyDescriptorRooter : pri
         value = desc->value;
     }
 
     friend void AutoGCRooter::trace(JSTracer *trc);
 };
 
 static inline bool
 InitScopeForObject(JSContext* cx, JSObject* obj, js::Class *clasp, js::types::TypeObject *type,
-                   gc::FinalizeKind kind)
+                   gc::AllocKind kind)
 {
     JS_ASSERT(clasp->isNative());
 
     /* Share proto's emptyShape only if obj is similar to proto. */
     js::EmptyShape *empty = NULL;
 
     uint32 freeslot = JSSLOT_FREE(clasp);
     if (freeslot > obj->numSlots() && !obj->allocSlots(cx, freeslot))
@@ -1268,60 +1262,60 @@ InitScopeForObject(JSContext* cx, JSObje
 
   bad:
     /* The GC nulls map initially. It should still be null on error. */
     JS_ASSERT(obj->isNewborn());
     return false;
 }
 
 static inline bool
-CanBeFinalizedInBackground(gc::FinalizeKind kind, Class *clasp)
+CanBeFinalizedInBackground(gc::AllocKind kind, Class *clasp)
 {
 #ifdef JS_THREADSAFE
     JS_ASSERT(kind <= gc::FINALIZE_OBJECT_LAST);
     /* If the class has no finalizer or a finalizer that is safe to call on
      * a different thread, we change the finalize kind. For example,
      * FINALIZE_OBJECT0 calls the finalizer on the main thread,
      * FINALIZE_OBJECT0_BACKGROUND calls the finalizer on the gcHelperThread.
-     * IsBackgroundFinalizeKind is called to prevent recursively incrementing
+     * IsBackgroundAllocKind is called to prevent recursively incrementing
      * the finalize kind; kind may already be a background finalize kind.
      */
-    if (!gc::IsBackgroundFinalizeKind(kind) &&
+    if (!gc::IsBackgroundAllocKind(kind) &&
         (!clasp->finalize || clasp->flags & JSCLASS_CONCURRENT_FINALIZER)) {
         return true;
     }
 #endif
     return false;
 }
 
 /*
  * Helper optimized for creating a native instance of the given class (not the
  * class's prototype object). Use this in preference to NewObject, but use
  * NewBuiltinClassInstance if you need the default class prototype as proto,
  * and its parent global as parent.
  */
 static inline JSObject *
 NewNativeClassInstance(JSContext *cx, Class *clasp, JSObject *proto,
-                       JSObject *parent, gc::FinalizeKind kind)
+                       JSObject *parent, gc::AllocKind kind)
 {
     JS_ASSERT(proto);
     JS_ASSERT(parent);
     JS_ASSERT(kind <= gc::FINALIZE_OBJECT_LAST);
 
     types::TypeObject *type = proto->getNewType(cx);
     if (!type)
         return NULL;
 
     /*
      * Allocate an object from the GC heap and initialize all its fields before
      * doing any operation that can potentially trigger GC.
      */
 
     if (CanBeFinalizedInBackground(kind, clasp))
-        kind = GetBackgroundFinalizeKind(kind);
+        kind = GetBackgroundAllocKind(kind);
 
     JSObject* obj = js_NewGCObject(cx, kind);
 
     if (obj) {
         /*
          * Default parent to the parent of the prototype, which was set from
          * the parent of the prototype's constructor.
          */
@@ -1338,32 +1332,32 @@ NewNativeClassInstance(JSContext *cx, Cl
     }
 
     return obj;
 }
 
 static inline JSObject *
 NewNativeClassInstance(JSContext *cx, Class *clasp, JSObject *proto, JSObject *parent)
 {
-    gc::FinalizeKind kind = gc::GetGCObjectKind(JSCLASS_RESERVED_SLOTS(clasp));
+    gc::AllocKind kind = gc::GetGCObjectKind(JSCLASS_RESERVED_SLOTS(clasp));
     return NewNativeClassInstance(cx, clasp, proto, parent, kind);
 }
 
 bool
 FindClassPrototype(JSContext *cx, JSObject *scope, JSProtoKey protoKey, JSObject **protop,
                    Class *clasp);
 
 /*
  * Helper used to create Boolean, Date, RegExp, etc. instances of built-in
  * classes with class prototypes of the same Class. See, e.g., jsdate.cpp,
  * jsregexp.cpp, and js_PrimitiveToObject in jsobj.cpp. Use this to get the
  * right default proto and parent for clasp in cx.
  */
 static inline JSObject *
-NewBuiltinClassInstance(JSContext *cx, Class *clasp, gc::FinalizeKind kind)
+NewBuiltinClassInstance(JSContext *cx, Class *clasp, gc::AllocKind kind)
 {
     VOUCH_DOES_NOT_REQUIRE_STACK();
 
     JSProtoKey protoKey = JSCLASS_CACHED_PROTO_KEY(clasp);
     JS_ASSERT(protoKey != JSProto_Null);
 
     /* NB: inline-expanded and specialized version of js_GetClassPrototype. */
     JSObject *global;
@@ -1387,17 +1381,17 @@ NewBuiltinClassInstance(JSContext *cx, C
     }
 
     return NewNativeClassInstance(cx, clasp, proto, global, kind);
 }
 
 static inline JSObject *
 NewBuiltinClassInstance(JSContext *cx, Class *clasp)
 {
-    gc::FinalizeKind kind = gc::GetGCObjectKind(JSCLASS_RESERVED_SLOTS(clasp));
+    gc::AllocKind kind = gc::GetGCObjectKind(JSCLASS_RESERVED_SLOTS(clasp));
     return NewBuiltinClassInstance(cx, clasp, kind);
 }
 
 static inline JSProtoKey
 GetClassProtoKey(js::Class *clasp)
 {
     JSProtoKey key = JSCLASS_CACHED_PROTO_KEY(clasp);
     if (key != JSProto_Null)
@@ -1452,17 +1446,17 @@ FindProto(JSContext *cx, js::Class *clas
     return true;
 }
 
 namespace detail
 {
 template <bool withProto, bool isFunction>
 static JS_ALWAYS_INLINE JSObject *
 NewObject(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent,
-          gc::FinalizeKind kind)
+          gc::AllocKind kind)
 {
     /* Bootstrap the ur-object, and make it the default prototype object. */
     if (withProto == WithProto::Class && !proto) {
         if (!FindProto(cx, clasp, parent, &proto))
           return NULL;
     }
 
     types::TypeObject *type = proto ? proto->getNewType(cx) : &js::types::emptyTypeObject;
@@ -1473,17 +1467,17 @@ NewObject(JSContext *cx, js::Class *clas
      * Allocate an object from the GC heap and initialize all its fields before
      * doing any operation that can potentially trigger GC. Functions have a
      * larger non-standard allocation size.
      *
      * The should be specialized by the template.
      */
 
     if (!isFunction && CanBeFinalizedInBackground(kind, clasp))
-        kind = GetBackgroundFinalizeKind(kind);
+        kind = GetBackgroundAllocKind(kind);
 
     JSObject* obj = isFunction ? js_NewGCFunction(cx) : js_NewGCObject(cx, kind);
     if (!obj)
         goto out;
 
     /* This needs to match up with the size of JSFunction::data_padding. */
     JS_ASSERT_IF(isFunction, kind == gc::FINALIZE_OBJECT2);
 
@@ -1525,58 +1519,58 @@ NewFunction(JSContext *cx, JSObject *par
 {
     return detail::NewObject<WithProto::Class, true>(cx, &js_FunctionClass, NULL, parent,
                                                      gc::FINALIZE_OBJECT2);
 }
 
 template <WithProto::e withProto>
 static JS_ALWAYS_INLINE JSObject *
 NewNonFunction(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent,
-               gc::FinalizeKind kind)
+               gc::AllocKind kind)
 {
     return detail::NewObject<withProto, false>(cx, clasp, proto, parent, kind);
 }
 
 template <WithProto::e withProto>
 static JS_ALWAYS_INLINE JSObject *
 NewNonFunction(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent)
 {
-    gc::FinalizeKind kind = gc::GetGCObjectKind(JSCLASS_RESERVED_SLOTS(clasp));
+    gc::AllocKind kind = gc::GetGCObjectKind(JSCLASS_RESERVED_SLOTS(clasp));
     return detail::NewObject<withProto, false>(cx, clasp, proto, parent, kind);
 }
 
 template <WithProto::e withProto>
 static JS_ALWAYS_INLINE JSObject *
 NewObject(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent,
-          gc::FinalizeKind kind)
+          gc::AllocKind kind)
 {
     if (clasp == &js_FunctionClass)
         return detail::NewObject<withProto, true>(cx, clasp, proto, parent, kind);
     return detail::NewObject<withProto, false>(cx, clasp, proto, parent, kind);
 }
 
 template <WithProto::e withProto>
 static JS_ALWAYS_INLINE JSObject *
 NewObject(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent)
 {
-    gc::FinalizeKind kind = gc::GetGCObjectKind(JSCLASS_RESERVED_SLOTS(clasp));
+    gc::AllocKind kind = gc::GetGCObjectKind(JSCLASS_RESERVED_SLOTS(clasp));
     return NewObject<withProto>(cx, clasp, proto, parent, kind);
 }
 
 /*
  * Create a plain object with the specified type. This bypasses getNewType to
  * avoid losing creation site information for objects made by scripted 'new'.
  */
 static JS_ALWAYS_INLINE JSObject *
-NewObjectWithType(JSContext *cx, types::TypeObject *type, JSObject *parent, gc::FinalizeKind kind)
+NewObjectWithType(JSContext *cx, types::TypeObject *type, JSObject *parent, gc::AllocKind kind)
 {
     JS_ASSERT(type == type->proto->newType);
 
     if (CanBeFinalizedInBackground(kind, &js_ObjectClass))
-        kind = GetBackgroundFinalizeKind(kind);
+        kind = GetBackgroundAllocKind(kind);
 
     JSObject* obj = js_NewGCObject(cx, kind);
     if (!obj)
         goto out;
 
     /*
      * Default parent to the parent of the prototype, which was set from
      * the parent of the prototype's constructor.
@@ -1592,58 +1586,57 @@ NewObjectWithType(JSContext *cx, types::
 
 out:
     Probes::createObject(cx, obj);
     return obj;
 }
 
 extern JSObject *
 NewReshapedObject(JSContext *cx, js::types::TypeObject *type, JSObject *parent,
-                  gc::FinalizeKind kind, const Shape *shape);
+                  gc::AllocKind kind, const Shape *shape);
 
 /*
  * As for gc::GetGCObjectKind, where numSlots is a guess at the final size of
  * the object, zero if the final size is unknown. This should only be used for
  * objects that do not require any fixed slots.
  */
-static inline gc::FinalizeKind
+static inline gc::AllocKind
 GuessObjectGCKind(size_t numSlots, bool isArray)
 {
     if (numSlots)
         return gc::GetGCObjectKind(numSlots, isArray);
     return isArray ? gc::FINALIZE_OBJECT8 : gc::FINALIZE_OBJECT4;
 }
 
 /*
  * Get the GC kind to use for scripted 'new' on the given class.
  * FIXME bug 547327: estimate the size from the allocation site.
  */
-static inline gc::FinalizeKind
+static inline gc::AllocKind
 NewObjectGCKind(JSContext *cx, js::Class *clasp)
 {
     if (clasp == &js_ArrayClass || clasp == &js_SlowArrayClass)
         return gc::FINALIZE_OBJECT8;
     if (clasp == &js_FunctionClass)
         return gc::FINALIZE_OBJECT2;
     return gc::FINALIZE_OBJECT4;
 }
 
 static JS_ALWAYS_INLINE JSObject*
 NewObjectWithClassProto(JSContext *cx, Class *clasp, JSObject *proto,
-                        /*gc::FinalizeKind*/ unsigned _kind)
+                        gc::AllocKind kind)
 {
     JS_ASSERT(clasp->isNative());
-    gc::FinalizeKind kind = gc::FinalizeKind(_kind);
 
     types::TypeObject *type = proto->getNewType(cx);
     if (!type)
         return NULL;
 
     if (CanBeFinalizedInBackground(kind, clasp))
-        kind = GetBackgroundFinalizeKind(kind);
+        kind = GetBackgroundAllocKind(kind);
 
     JSObject* obj = js_NewGCObject(cx, kind);
     if (!obj)
         return NULL;
 
     if (!obj->initSharingEmptyShape(cx, clasp, type, proto->getParent(), NULL, kind))
         return NULL;
     return obj;
@@ -1651,18 +1644,17 @@ NewObjectWithClassProto(JSContext *cx, C
 
 /* Make an object with pregenerated shape from a NEWOBJECT bytecode. */
 static inline JSObject *
 CopyInitializerObject(JSContext *cx, JSObject *baseobj, types::TypeObject *type)
 {
     JS_ASSERT(baseobj->getClass() == &js_ObjectClass);
     JS_ASSERT(!baseobj->inDictionaryMode());
 
-    gc::FinalizeKind kind = gc::FinalizeKind(baseobj->finalizeKind());
-    JSObject *obj = NewBuiltinClassInstance(cx, &js_ObjectClass, kind);
+    JSObject *obj = NewBuiltinClassInstance(cx, &js_ObjectClass, baseobj->getAllocKind());
 
     if (!obj || !obj->ensureSlots(cx, baseobj->numSlots()))
         return NULL;
 
     obj->setType(type);
     obj->flags = baseobj->flags;
     obj->lastProp = baseobj->lastProp;
     obj->objShape = baseobj->objShape;
--- a/js/src/jsproxy.cpp
+++ b/js/src/jsproxy.cpp
@@ -1416,17 +1416,17 @@ FixProxy(JSContext *cx, JSObject *proxy,
     JSObject *proto = proxy->getProto();
     JSObject *parent = proxy->getParent();
     Class *clasp = proxy->isFunctionProxy() ? &CallableObjectClass : &js_ObjectClass;
 
     /*
      * Make a blank object from the recipe fix provided to us.  This must have
      * number of fixed slots as the proxy so that we can swap their contents.
      */
-    gc::FinalizeKind kind = gc::FinalizeKind(proxy->arenaHeader()->getThingKind());
+    gc::AllocKind kind = proxy->getAllocKind();
     JSObject *newborn = NewNonFunction<WithProto::Given>(cx, clasp, proto, parent, kind);
     if (!newborn)
         return false;
     AutoObjectRooter tvr2(cx, newborn);
 
     if (clasp == &CallableObjectClass) {
         newborn->setSlot(JSSLOT_CALLABLE_CALL, GetCall(proxy));
         newborn->setSlot(JSSLOT_CALLABLE_CONSTRUCT, GetConstruct(proxy));
--- a/js/src/jsscopeinlines.h
+++ b/js/src/jsscopeinlines.h
@@ -63,17 +63,17 @@ js::Shape::freeTable(JSContext *cx)
     if (hasTable()) {
         cx->delete_(getTable());
         setTable(NULL);
     }
 }
 
 inline js::EmptyShape *
 js::types::TypeObject::getEmptyShape(JSContext *cx, js::Class *aclasp,
-                                     /* gc::FinalizeKind */ unsigned kind)
+                                     gc::AllocKind kind)
 {
     JS_ASSERT(!singleton);
 
     /*
      * Empty shapes can only be on the default 'new' type for a prototype.
      * Objects with a common prototype use the same shape lineage, even if
      * their prototypes differ.
      */
--- a/js/src/methodjit/BaseAssembler.h
+++ b/js/src/methodjit/BaseAssembler.h
@@ -1231,34 +1231,35 @@ static const JSC::MacroAssembler::Regist
 
     /*
      * Get a free object for the specified GC kind in compartment, writing it
      * to result and filling it in according to templateObject. Returns a jump
      * taken if a free thing was not retrieved.
      */
     Jump getNewObject(JSContext *cx, RegisterID result, JSObject *templateObject)
     {
-        unsigned thingKind = templateObject->arenaHeader()->getThingKind();
+        gc::AllocKind allocKind = templateObject->getAllocKind();
 
-        JS_ASSERT(thingKind >= gc::FINALIZE_OBJECT0 && thingKind <= gc::FINALIZE_OBJECT_LAST);
-        size_t thingSize = gc::GCThingSizeMap[thingKind];
+        JS_ASSERT(allocKind >= gc::FINALIZE_OBJECT0 && allocKind <= gc::FINALIZE_OBJECT_LAST);
+        size_t thingSize = gc::Arena::thingSize(allocKind);
 
         JS_ASSERT(cx->typeInferenceEnabled());
         JS_ASSERT(!templateObject->hasSlotsArray());
 
 #ifdef JS_GC_ZEAL
         if (cx->runtime->needZealousGC())
             return jump();
 #endif
 
         /*
          * Inline FreeSpan::allocate. Only the case where the current freelist
          * span is not empty is handled.
          */
-        gc::FreeSpan *list = &cx->compartment->freeLists.lists[thingKind];
+        gc::FreeSpan *list = const_cast<gc::FreeSpan *>
+                             (cx->compartment->arenas.getFreeList(allocKind));
         loadPtr(&list->first, result);
 
         Jump jump = branchPtr(Assembler::BelowOrEqual, AbsoluteAddress(&list->last), result);
 
         addPtr(Imm32(thingSize), result);
         storePtr(result, &list->first);
 
         /*
--- a/js/src/methodjit/StubCalls.cpp
+++ b/js/src/methodjit/StubCalls.cpp
@@ -1345,17 +1345,17 @@ stubs::NewInitArray(VMFrame &f, uint32 c
 
 void JS_FASTCALL
 stubs::NewInitObject(VMFrame &f, JSObject *baseobj)
 {
     JSContext *cx = f.cx;
     TypeObject *type = (TypeObject *) f.scratch;
 
     if (!baseobj) {
-        gc::FinalizeKind kind = GuessObjectGCKind(0, false);
+        gc::AllocKind kind = GuessObjectGCKind(0, false);
         JSObject *obj = NewBuiltinClassInstance(cx, &js_ObjectClass, kind);
         if (!obj)
             THROW();
         if (type)
             obj->setType(type);
         f.regs.sp[0].setObject(*obj);
         return;
     }
--- a/js/src/vm/String-inl.h
+++ b/js/src/vm/String-inl.h
@@ -411,20 +411,20 @@ JSShortString::finalize(JSContext *cx)
 {
     JS_ASSERT(isShort());
 }
 
 inline void
 JSAtom::finalize(JSRuntime *rt)
 {
     JS_ASSERT(isAtom());
-    if (arenaHeader()->getThingKind() == js::gc::FINALIZE_STRING)
+    if (getAllocKind() == js::gc::FINALIZE_STRING)
         asFlat().finalize(rt);
     else
-        JS_ASSERT(arenaHeader()->getThingKind() == js::gc::FINALIZE_SHORT_STRING);
+        JS_ASSERT(getAllocKind() == js::gc::FINALIZE_SHORT_STRING);
 }
 
 inline void
 JSExternalString::finalize(JSContext *cx)
 {
     if (JSStringFinalizeOp finalizer = str_finalizers[externalType()])
         finalizer(cx, this);
 }
--- a/js/src/vm/String.cpp
+++ b/js/src/vm/String.cpp
@@ -1,9 +1,9 @@
-/* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
  * vim: set ts=4 sw=4 et tw=79 ft=cpp:
  *
  * ***** BEGIN LICENSE BLOCK *****
  * Version: MPL 1.1/GPL 2.0/LGPL 2.1
  *
  * The contents of this file are subject to the Mozilla Public License Version
  * 1.1 (the "License"); you may not use this file except in compliance with
  * the License. You may obtain a copy of the License at
@@ -44,17 +44,17 @@
 #include "String-inl.h"
 
 using namespace mozilla;
 using namespace js;
 
 bool
 JSString::isShort() const
 {
-    bool is_short = arenaHeader()->getThingKind() == gc::FINALIZE_SHORT_STRING;
+    bool is_short = (getAllocKind() == gc::FINALIZE_SHORT_STRING);
     JS_ASSERT_IF(is_short, isFlat());
     return is_short;
 }
 
 bool
 JSString::isFixed() const
 {
     return isFlat() && !isExtensible();
@@ -64,17 +64,17 @@ bool
 JSString::isInline() const
 {
     return isFixed() && (d.u1.chars == d.inlineStorage || isShort());
 }
 
 bool
 JSString::isExternal() const
 {
-    bool is_external = arenaHeader()->getThingKind() == gc::FINALIZE_EXTERNAL_STRING;
+    bool is_external = (getAllocKind() == gc::FINALIZE_EXTERNAL_STRING);
     JS_ASSERT_IF(is_external, isFixed());
     return is_external;
 }
 
 void
 JSLinearString::mark(JSTracer *)
 {
     JSLinearString *str = this;
--- a/js/src/xpconnect/src/xpcjsruntime.cpp
+++ b/js/src/xpconnect/src/xpcjsruntime.cpp
@@ -1319,23 +1319,24 @@ CompartmentCallback(JSContext *cx, void 
 
 void
 ArenaCallback(JSContext *cx, void *vdata, js::gc::Arena *arena,
               JSGCTraceKind traceKind, size_t thingSize)
 {
     IterateData *data = static_cast<IterateData *>(vdata);
     data->currCompartmentStats->gcHeapArenaHeaders +=
         sizeof(js::gc::ArenaHeader);
+    size_t allocationSpace = arena->thingsSpan(thingSize);
     data->currCompartmentStats->gcHeapArenaPadding +=
-        arena->thingsStartOffset(thingSize) - sizeof(js::gc::ArenaHeader);
+        js::gc::ArenaSize - allocationSpace - sizeof(js::gc::ArenaHeader);
     // We don't call the callback on unused things.  So we compute the
     // unused space like this:  arenaUnused = maxArenaUnused - arenaUsed.
     // We do this by setting arenaUnused to maxArenaUnused here, and then
     // subtracting thingSize for every used cell, in CellCallback().
-    data->currCompartmentStats->gcHeapArenaUnused += arena->thingsSpan(thingSize);
+    data->currCompartmentStats->gcHeapArenaUnused += allocationSpace;
 }
 
 void
 CellCallback(JSContext *cx, void *vdata, void *thing, JSGCTraceKind traceKind,
              size_t thingSize)
 {
     IterateData *data = static_cast<IterateData *>(vdata);
     CompartmentStats *curr = data->currCompartmentStats;