Bug 706885 - Implement generational GC for the SpiderMonkey interpreter; r=billm
authorTerrence Cole <terrence@mozilla.com>
Thu, 14 Mar 2013 10:26:06 -0700
changeset 140220 3297733a26610400db18d7df2f7a99ce44d79cfa
parent 140219 021bca10985b42fd1a7d9aad4b58f3ed56035316
child 140221 e88f7f550c2b83f0c1cee627b38fab8fb1c7e8cb
push id2579
push userakeybl@mozilla.com
push dateMon, 24 Jun 2013 18:52:47 +0000
treeherdermozilla-beta@b69b7de8a05a [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersbillm
bugs706885
milestone23.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 706885 - Implement generational GC for the SpiderMonkey interpreter; r=billm
js/public/GCAPI.h
js/public/HashTable.h
js/src/Makefile.in
js/src/gc/Marking.cpp
js/src/gc/Marking.h
js/src/gc/Nursery-inl.h
js/src/gc/Nursery.cpp
js/src/gc/Nursery.h
js/src/gc/StoreBuffer.cpp
js/src/gc/StoreBuffer.h
js/src/gc/Verifier.cpp
js/src/gc/Zone.h
js/src/jsapi.cpp
js/src/jscntxt.h
js/src/jscompartment.cpp
js/src/jscompartment.h
js/src/jsfriendapi.cpp
js/src/jsgc.cpp
js/src/jsgc.h
js/src/jsgcinlines.h
js/src/jsobj.cpp
js/src/jsobj.h
js/src/jsobjinlines.h
js/src/jstypedarray.cpp
js/src/jstypedarrayinlines.h
js/src/shell/js.cpp
js/src/vm/ObjectImpl.h
js/src/vm/Shape.cpp
js/src/vm/Shape.h
--- a/js/public/GCAPI.h
+++ b/js/public/GCAPI.h
@@ -19,16 +19,41 @@ namespace JS {
     D(DESTROY_CONTEXT)                          \
     D(LAST_DITCH)                               \
     D(TOO_MUCH_MALLOC)                          \
     D(ALLOC_TRIGGER)                            \
     D(DEBUG_GC)                                 \
     D(DEBUG_MODE_GC)                            \
     D(TRANSPLANT)                               \
     D(RESET)                                    \
+    D(OUT_OF_NURSERY)                           \
+    D(EVICT_NURSERY)                            \
+    D(FULL_STORE_BUFFER)                        \
+                                                \
+    /* These are reserved for future use. */    \
+    D(RESERVED0)                                \
+    D(RESERVED1)                                \
+    D(RESERVED2)                                \
+    D(RESERVED3)                                \
+    D(RESERVED4)                                \
+    D(RESERVED5)                                \
+    D(RESERVED6)                                \
+    D(RESERVED7)                                \
+    D(RESERVED8)                                \
+    D(RESERVED9)                                \
+    D(RESERVED10)                               \
+    D(RESERVED11)                               \
+    D(RESERVED12)                               \
+    D(RESERVED13)                               \
+    D(RESERVED14)                               \
+    D(RESERVED15)                               \
+    D(RESERVED16)                               \
+    D(RESERVED17)                               \
+    D(RESERVED18)                               \
+    D(RESERVED19)                               \
                                                 \
     /* Reasons from Firefox */                  \
     D(DOM_WINDOW_UTILS)                         \
     D(COMPONENT_UTILS)                          \
     D(MEM_PRESSURE)                             \
     D(CC_WAITING)                               \
     D(CC_FORCED)                                \
     D(LOAD_END)                                 \
--- a/js/public/HashTable.h
+++ b/js/public/HashTable.h
@@ -575,16 +575,19 @@ class HashMapEntry
 
   public:
     template<typename KeyInput, typename ValueInput>
     HashMapEntry(const KeyInput &k, const ValueInput &v) : key(k), value(v) {}
 
     HashMapEntry(MoveRef<HashMapEntry> rhs)
       : key(Move(rhs->key)), value(Move(rhs->value)) { }
 
+    typedef Key KeyType;
+    typedef Value ValueType;
+
     const Key key;
     Value value;
 };
 
 } // namespace js
 
 namespace mozilla {
 
--- a/js/src/Makefile.in
+++ b/js/src/Makefile.in
@@ -130,16 +130,17 @@ CPPSRCS		= \
 		RegExpObject.cpp \
 		RegExpStatics.cpp \
 		RegExp.cpp \
 		RootMarking.cpp \
 		Marking.cpp \
 		Memory.cpp \
 		Statistics.cpp \
 		StoreBuffer.cpp \
+		Nursery.cpp \
 		Iteration.cpp \
 		Zone.cpp \
 		Verifier.cpp \
 		StringBuffer.cpp \
 		Unicode.cpp \
 		Xdr.cpp \
 		Module.cpp \
 		$(NULL)
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -5,16 +5,17 @@
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "mozilla/DebugOnly.h"
 
 #include "jsprf.h"
 #include "jsstr.h"
 
 #include "gc/Marking.h"
+#include "gc/Nursery-inl.h"
 #include "methodjit/MethodJIT.h"
 #include "vm/Shape.h"
 
 #include "jsobjinlines.h"
 
 #include "ion/IonCode.h"
 #include "vm/Shape-inl.h"
 #include "vm/String-inl.h"
@@ -100,84 +101,99 @@ IsThingPoisoned(T *thing)
     const uint32_t pw = pb | (pb << 8) | (pb << 16) | (pb << 24);
     JS_STATIC_ASSERT(sizeof(T) >= sizeof(FreeSpan) + sizeof(uint32_t));
     uint32_t *p =
         reinterpret_cast<uint32_t *>(reinterpret_cast<FreeSpan *>(thing) + 1);
     return *p == pw;
 }
 #endif
 
+static GCMarker *
+AsGCMarker(JSTracer *trc)
+{
+    JS_ASSERT(IS_GC_MARKING_TRACER(trc));
+    return static_cast<GCMarker *>(trc);
+}
+
 template<typename T>
 static inline void
 CheckMarkedThing(JSTracer *trc, T *thing)
 {
+#ifdef DEBUG
+    /* This function uses data that's not available in the nursery. */
+    if (IsInsideNursery(trc->runtime, thing))
+        return;
+
     JS_ASSERT(trc);
     JS_ASSERT(thing);
     JS_ASSERT(thing->zone());
     JS_ASSERT(thing->zone()->rt == trc->runtime);
     JS_ASSERT(trc->debugPrinter || trc->debugPrintArg);
 
     DebugOnly<JSRuntime *> rt = trc->runtime;
 
     JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc) && rt->gcManipulatingDeadZones,
                  !thing->zone()->scheduledForDestruction);
 
-#ifdef DEBUG
     rt->assertValidThread();
-#endif
 
-    JS_ASSERT_IF(thing->zone()->requireGCTracer(), IS_GC_MARKING_TRACER(trc));
+    JS_ASSERT_IF(thing->zone()->requireGCTracer(),
+                 IS_GC_MARKING_TRACER(trc));
 
     JS_ASSERT(thing->isAligned());
 
-    JS_ASSERT_IF(thing->isTenured(), MapTypeToTraceKind<T>::kind == GetGCThingTraceKind(thing));
+    JS_ASSERT(MapTypeToTraceKind<T>::kind == GetGCThingTraceKind(thing));
 
     JS_ASSERT_IF(rt->gcStrictCompartmentChecking,
                  thing->zone()->isCollecting() ||
                  thing->zone() == rt->atomsCompartment->zone());
 
-    JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc) && ((GCMarker *)trc)->getMarkColor() == GRAY,
+    JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc) && AsGCMarker(trc)->getMarkColor() == GRAY,
                  thing->zone()->isGCMarkingGray() ||
                  thing->zone() == rt->atomsCompartment->zone());
 
     /*
      * Try to assert that the thing is allocated.  This is complicated by the
      * fact that allocated things may still contain the poison pattern if that
      * part has not been overwritten, and that the free span list head in the
      * ArenaHeader may not be synced with the real one in ArenaLists.
      */
     JS_ASSERT_IF(IsThingPoisoned(thing) && rt->isHeapBusy(),
                  !InFreeList(thing->arenaHeader(), thing));
-}
-
-static GCMarker *
-AsGCMarker(JSTracer *trc)
-{
-    JS_ASSERT(IS_GC_MARKING_TRACER(trc));
-    return static_cast<GCMarker *>(trc);
+#endif
 }
 
 template<typename T>
 static void
 MarkInternal(JSTracer *trc, T **thingp)
 {
     JS_ASSERT(thingp);
     T *thing = *thingp;
 
     CheckMarkedThing(trc, thing);
 
-    /*
-     * Don't mark things outside a compartment if we are in a per-compartment
-     * GC.
-     */
     if (!trc->callback) {
-        if (thing->zone()->isGCMarking()) {
-            PushMarkStack(AsGCMarker(trc), thing);
-            thing->zone()->maybeAlive = true;
-        }
+        /*
+         * We may mark a Nursery thing outside the context of the
+         * MinorCollectionTracer because of a pre-barrier. The pre-barrier is
+         * not needed in this case because we perform a minor collection before
+         * each incremental slice.
+         */
+        if (IsInsideNursery(trc->runtime, thing))
+            return;
+
+        /*
+         * Don't mark things outside a compartment if we are in a
+         * per-compartment GC.
+         */
+        if (!thing->zone()->isGCMarking())
+            return;
+
+        PushMarkStack(AsGCMarker(trc), thing);
+        thing->zone()->maybeAlive = true;
     } else {
         trc->callback(trc, (void **)thingp, MapTypeToTraceKind<T>::kind);
         JS_UNSET_TRACING_LOCATION(trc);
     }
 
     trc->debugPrinter = NULL;
     trc->debugPrintArg = NULL;
 }
@@ -247,28 +263,38 @@ namespace js {
 namespace gc {
 
 template <typename T>
 static bool
 IsMarked(T **thingp)
 {
     JS_ASSERT(thingp);
     JS_ASSERT(*thingp);
+#ifdef JSGC_GENERATIONAL
+    Nursery &nursery = (*thingp)->runtime()->gcNursery;
+    if (nursery.isInside(*thingp))
+        return nursery.getForwardedPointer(thingp);
+#endif
     Zone *zone = (*thingp)->tenuredZone();
     if (!zone->isCollecting() || zone->isGCFinished())
         return true;
     return (*thingp)->isMarked();
 }
 
 template <typename T>
 static bool
 IsAboutToBeFinalized(T **thingp)
 {
     JS_ASSERT(thingp);
     JS_ASSERT(*thingp);
+#ifdef JSGC_GENERATIONAL
+    Nursery &nursery = (*thingp)->runtime()->gcNursery;
+    if (nursery.isInside(*thingp))
+        return !nursery.getForwardedPointer(thingp);
+#endif
     if (!(*thingp)->tenuredZone()->isGCSweeping())
         return false;
     return !(*thingp)->isMarked();
 }
 
 #define DeclMarkerImpl(base, type)                                                                \
 void                                                                                              \
 Mark##base(JSTracer *trc, EncapsulatedPtr<type> *thing, const char *name)                         \
@@ -323,22 +349,22 @@ Is##base##AboutToBeFinalized(Encapsulate
 {                                                                                                 \
     return IsAboutToBeFinalized<type>(thingp->unsafeGet());                                       \
 }
 
 DeclMarkerImpl(BaseShape, BaseShape)
 DeclMarkerImpl(BaseShape, UnownedBaseShape)
 DeclMarkerImpl(IonCode, ion::IonCode)
 DeclMarkerImpl(Object, ArgumentsObject)
+DeclMarkerImpl(Object, ArrayBufferObject)
 DeclMarkerImpl(Object, DebugScopeObject)
 DeclMarkerImpl(Object, GlobalObject)
 DeclMarkerImpl(Object, JSObject)
 DeclMarkerImpl(Object, JSFunction)
 DeclMarkerImpl(Object, ScopeObject)
-DeclMarkerImpl(Object, ArrayBufferObject)
 DeclMarkerImpl(Script, JSScript)
 DeclMarkerImpl(Shape, Shape)
 DeclMarkerImpl(String, JSAtom)
 DeclMarkerImpl(String, JSString)
 DeclMarkerImpl(String, JSFlatString)
 DeclMarkerImpl(String, JSLinearString)
 DeclMarkerImpl(String, PropertyName)
 DeclMarkerImpl(TypeObject, js::types::TypeObject)
@@ -348,17 +374,18 @@ DeclMarkerImpl(TypeObject, js::types::Ty
 
 /*** Externally Typed Marking ***/
 
 void
 gc::MarkKind(JSTracer *trc, void **thingp, JSGCTraceKind kind)
 {
     JS_ASSERT(thingp);
     JS_ASSERT(*thingp);
-    JS_ASSERT(kind == GetGCThingTraceKind(*thingp));
+    DebugOnly<Cell *> cell = static_cast<Cell *>(*thingp);
+    JS_ASSERT_IF(cell->isTenured(), kind == MapAllocToTraceKind(cell->tenuredGetAllocKind()));
     switch (kind) {
       case JSTRACE_OBJECT:
         MarkInternal(trc, reinterpret_cast<RawObject *>(thingp));
         break;
       case JSTRACE_STRING:
         MarkInternal(trc, reinterpret_cast<RawString *>(thingp));
         break;
       case JSTRACE_SCRIPT:
@@ -720,43 +747,47 @@ gc::IsCellAboutToBeFinalized(Cell **thin
 #define JS_COMPARTMENT_ASSERT_STR(rt, thing)                            \
     JS_ASSERT((thing)->zone()->isGCMarking() ||                         \
               (thing)->zone() == (rt)->atomsCompartment->zone());
 
 static void
 PushMarkStack(GCMarker *gcmarker, JSObject *thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime, thing);
+    JS_ASSERT(!IsInsideNursery(thing->runtime(), thing));
 
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         gcmarker->pushObject(thing);
 }
 
 static void
 PushMarkStack(GCMarker *gcmarker, JSFunction *thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime, thing);
+    JS_ASSERT(!IsInsideNursery(thing->runtime(), thing));
 
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         gcmarker->pushObject(thing);
 }
 
 static void
 PushMarkStack(GCMarker *gcmarker, types::TypeObject *thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime, thing);
+    JS_ASSERT(!IsInsideNursery(thing->runtime(), thing));
 
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         gcmarker->pushType(thing);
 }
 
 static void
 PushMarkStack(GCMarker *gcmarker, RawScript thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime, thing);
+    JS_ASSERT(!IsInsideNursery(thing->runtime(), thing));
 
     /*
      * We mark scripts directly rather than pushing on the stack as they can
      * refer to other scripts only indirectly (like via nested functions) and
      * we cannot get to deep recursion.
      */
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         MarkChildren(gcmarker, thing);
@@ -764,38 +795,41 @@ PushMarkStack(GCMarker *gcmarker, RawScr
 
 static void
 ScanShape(GCMarker *gcmarker, RawShape shape);
 
 static void
 PushMarkStack(GCMarker *gcmarker, RawShape thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime, thing);
+    JS_ASSERT(!IsInsideNursery(thing->runtime(), thing));
 
     /* We mark shapes directly rather than pushing on the stack. */
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         ScanShape(gcmarker, thing);
 }
 
 static void
 PushMarkStack(GCMarker *gcmarker, ion::IonCode *thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime, thing);
+    JS_ASSERT(!IsInsideNursery(thing->runtime(), thing));
 
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         gcmarker->pushIonCode(thing);
 }
 
 static inline void
 ScanBaseShape(GCMarker *gcmarker, RawBaseShape base);
 
 static void
 PushMarkStack(GCMarker *gcmarker, RawBaseShape thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime, thing);
+    JS_ASSERT(!IsInsideNursery(thing->runtime(), thing));
 
     /* We mark base shapes directly rather than pushing on the stack. */
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         ScanBaseShape(gcmarker, thing);
 }
 
 static void
 ScanShape(GCMarker *gcmarker, RawShape shape)
--- a/js/src/gc/Marking.h
+++ b/js/src/gc/Marking.h
@@ -7,32 +7,34 @@
 #ifndef gc_marking_h___
 #define gc_marking_h___
 
 #include "jsgc.h"
 #include "jscntxt.h"
 #include "jslock.h"
 
 #include "gc/Barrier.h"
+#include "gc/Nursery.h"
 #include "js/TemplateLib.h"
 #include "ion/IonCode.h"
 
 extern "C" {
 struct JSContext;
 class JSFunction;
 class JSObject;
 class JSScript;
 }
 
 class JSAtom;
 class JSLinearString;
 
 namespace js {
 
 class ArgumentsObject;
+class ArrayBufferObject;
 class BaseShape;
 class GlobalObject;
 class UnownedBaseShape;
 class Shape;
 
 template<class, typename> class HeapPtr;
 
 namespace gc {
@@ -87,22 +89,22 @@ bool Is##base##Marked(type **thingp);   
 bool Is##base##Marked(EncapsulatedPtr<type> *thingp);                                             \
 bool Is##base##AboutToBeFinalized(type **thingp);                                                 \
 bool Is##base##AboutToBeFinalized(EncapsulatedPtr<type> *thingp);
 
 DeclMarker(BaseShape, BaseShape)
 DeclMarker(BaseShape, UnownedBaseShape)
 DeclMarker(IonCode, ion::IonCode)
 DeclMarker(Object, ArgumentsObject)
+DeclMarker(Object, ArrayBufferObject)
 DeclMarker(Object, DebugScopeObject)
 DeclMarker(Object, GlobalObject)
 DeclMarker(Object, JSObject)
 DeclMarker(Object, JSFunction)
 DeclMarker(Object, ScopeObject)
-DeclMarker(Object, ArrayBufferObject)
 DeclMarker(Script, JSScript)
 DeclMarker(Shape, Shape)
 DeclMarker(String, JSAtom)
 DeclMarker(String, JSString)
 DeclMarker(String, JSFlatString)
 DeclMarker(String, JSLinearString)
 DeclMarker(String, PropertyName)
 DeclMarker(TypeObject, types::TypeObject)
@@ -274,22 +276,30 @@ Mark(JSTracer *trc, EncapsulatedPtrScrip
 }
 
 inline void
 Mark(JSTracer *trc, HeapPtr<ion::IonCode> *code, const char *name)
 {
     MarkIonCode(trc, code, name);
 }
 
+/* For use by WeakMap's HashKeyRef instantiation. */
 inline void
 Mark(JSTracer *trc, JSObject **objp, const char *name)
 {
     MarkObjectUnbarriered(trc, objp, name);
 }
 
+/* For use by Debugger::WeakMap's proxiedScopes HashKeyRef instantiation. */
+inline void
+Mark(JSTracer *trc, ScopeObject **obj, const char *name)
+{
+    MarkObjectUnbarriered(trc, obj, name);
+}
+
 bool
 IsCellMarked(Cell **thingp);
 
 bool
 IsCellAboutToBeFinalized(Cell **thing);
 
 inline bool
 IsMarked(EncapsulatedValue *v)
new file mode 100644
--- /dev/null
+++ b/js/src/gc/Nursery-inl.h
@@ -0,0 +1,83 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=4 sw=4 et tw=79 ft=cpp:
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifdef JSGC_GENERATIONAL
+#ifndef gc_Nursery_inl_h__
+#define gc_Nursery_inl_h__
+
+#include "gc/Heap.h"
+#include "gc/Nursery.h"
+
+namespace js {
+namespace gc {
+
+/*
+ * This structure overlays a Cell in the Nursery and re-purposes its memory
+ * for managing the Nursery collection process.
+ */
+class RelocationOverlay
+{
+    friend struct MinorCollectionTracer;
+
+    /* The low bit is set so this should never equal a normal pointer. */
+    const static uintptr_t Relocated = uintptr_t(0xbad0bad1);
+
+    /* Set to Relocated when moved. */
+    uintptr_t magic_;
+
+    /* The location |this| was moved to. */
+    Cell *newLocation_;
+
+    /* A list entry to track all relocated things. */
+    RelocationOverlay *next_;
+
+  public:
+    static RelocationOverlay *fromCell(Cell *cell) {
+        JS_ASSERT(!cell->isTenured());
+        return reinterpret_cast<RelocationOverlay *>(cell);
+    }
+
+    bool isForwarded() const {
+        return magic_ == Relocated;
+    }
+
+    Cell *forwardingAddress() const {
+        JS_ASSERT(isForwarded());
+        return newLocation_;
+    }
+
+    void forwardTo(Cell *cell) {
+        JS_ASSERT(!isForwarded());
+        magic_ = Relocated;
+        newLocation_ = cell;
+        next_ = NULL;
+    }
+
+    RelocationOverlay *next() const {
+        return next_;
+    }
+};
+
+} /* namespace gc */
+} /* namespace js */
+
+template <typename T>
+JS_ALWAYS_INLINE bool
+js::Nursery::getForwardedPointer(T **ref)
+{
+    JS_ASSERT(ref);
+    JS_ASSERT(isInside(*ref));
+    const gc::RelocationOverlay *overlay = reinterpret_cast<const gc::RelocationOverlay *>(*ref);
+    if (!overlay->isForwarded())
+        return false;
+    /* This static cast from Cell* restricts T to valid (GC thing) types. */
+    *ref = static_cast<T *>(overlay->forwardingAddress());
+    return true;
+}
+
+#endif /* gc_Nursery_inl_h__ */
+#endif /* JSGC_GENERATIONAL */
new file mode 100644
--- /dev/null
+++ b/js/src/gc/Nursery.cpp
@@ -0,0 +1,516 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sw=4 et tw=78:
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifdef JSGC_GENERATIONAL
+
+#include "jscompartment.h"
+#include "jsgc.h"
+
+#include "gc/GCInternals.h"
+#include "gc/Memory.h"
+#include "vm/Debugger.h"
+
+#include "gc/Barrier-inl.h"
+#include "gc/Nursery-inl.h"
+
+using namespace js;
+using namespace gc;
+using namespace mozilla;
+
+bool
+js::Nursery::enable()
+{
+    if (isEnabled())
+        return true;
+
+    if (!hugeSlots.init())
+        return false;
+
+    fallbackBitmap.clear(false);
+
+    void *heap = MapAlignedPages(NurserySize, Alignment);
+    if (!heap)
+        return false;
+
+    JSRuntime *rt = runtime();
+    rt->gcNurseryStart_ = position_ = uintptr_t(heap);
+    rt->gcNurseryEnd_ = start() + NurseryUsableSize;
+    asLayout().runtime = rt;
+    JS_POISON(asLayout().data, FreshNursery, sizeof(asLayout().data));
+
+    JS_ASSERT(isEnabled());
+    return true;
+}
+
+void
+js::Nursery::disable()
+{
+    if (!isEnabled())
+        return;
+
+    hugeSlots.finish();
+    JS_ASSERT(start());
+    UnmapPages((void *)start(), NurserySize);
+    runtime()->gcNurseryStart_ = runtime()->gcNurseryEnd_ = position_ = 0;
+}
+
+js::Nursery::~Nursery()
+{
+    disable();
+}
+
+void *
+js::Nursery::allocate(size_t size)
+{
+    JS_ASSERT(size % ThingAlignment == 0);
+    JS_ASSERT(position() % ThingAlignment == 0);
+
+    if (position() + size > end())
+        return NULL;
+
+    void *thing = (void *)position();
+    position_ = position() + size;
+
+    JS_POISON(thing, AllocatedThing, size);
+    return thing;
+}
+
+/* Internally, this function is used to allocate elements as well as slots. */
+HeapSlot *
+js::Nursery::allocateSlots(JSContext *cx, JSObject *obj, uint32_t nslots)
+{
+    JS_ASSERT(obj);
+    JS_ASSERT(nslots > 0);
+
+    if (!isInside(obj))
+        return cx->pod_malloc<HeapSlot>(nslots);
+
+    if (nslots > MaxNurserySlots)
+        return allocateHugeSlots(cx, nslots);
+
+    size_t size = sizeof(HeapSlot) * nslots;
+    HeapSlot *slots = static_cast<HeapSlot *>(allocate(size));
+    if (slots)
+        return slots;
+
+    return allocateHugeSlots(cx, nslots);
+}
+
+ObjectElements *
+js::Nursery::allocateElements(JSContext *cx, JSObject *obj, uint32_t nelems)
+{
+    return reinterpret_cast<ObjectElements *>(allocateSlots(cx, obj, nelems));
+}
+
+HeapSlot *
+js::Nursery::reallocateSlots(JSContext *cx, JSObject *obj, HeapSlot *oldSlots,
+                             uint32_t oldCount, uint32_t newCount)
+{
+    size_t oldSize = oldCount * sizeof(HeapSlot);
+    size_t newSize = newCount * sizeof(HeapSlot);
+
+    if (!isInside(obj))
+        return static_cast<HeapSlot *>(cx->realloc_(oldSlots, oldSize, newSize));
+
+    if (!isInside(oldSlots)) {
+        HeapSlot *newSlots = static_cast<HeapSlot *>(cx->realloc_(oldSlots, oldSize, newSize));
+        if (oldSlots != newSlots) {
+            hugeSlots.remove(oldSlots);
+            /* If this put fails, we will only leak the slots. */
+            (void)hugeSlots.put(newSlots);
+        }
+        return newSlots;
+    }
+
+    /* The nursery cannot make use of the returned slots data. */
+    if (newCount < oldCount)
+        return oldSlots;
+
+    HeapSlot *newSlots = allocateSlots(cx, obj, newCount);
+    PodCopy(newSlots, oldSlots, oldCount);
+    return newSlots;
+}
+
+ObjectElements *
+js::Nursery::reallocateElements(JSContext *cx, JSObject *obj, ObjectElements *oldHeader,
+                                uint32_t oldCount, uint32_t newCount)
+{
+    HeapSlot *slots = reallocateSlots(cx, obj, reinterpret_cast<HeapSlot *>(oldHeader),
+                                      oldCount, newCount);
+    return reinterpret_cast<ObjectElements *>(slots);
+}
+
+HeapSlot *
+js::Nursery::allocateHugeSlots(JSContext *cx, size_t nslots)
+{
+    HeapSlot *slots = cx->pod_malloc<HeapSlot>(nslots);
+    /* If this put fails, we will only leak the slots. */
+    (void)hugeSlots.put(slots);
+    return slots;
+}
+
+void
+js::Nursery::notifyInitialSlots(Cell *cell, HeapSlot *slots)
+{
+    if (isInside(cell) && !isInside(slots)) {
+        /* If this put fails, we will only leak the slots. */
+        (void)hugeSlots.put(slots);
+    }
+}
+
+namespace js {
+namespace gc {
+
+class MinorCollectionTracer : public JSTracer
+{
+  public:
+    Nursery *nursery;
+    JSRuntime *runtime;
+    AutoTraceSession session;
+
+    /*
+     * This list is threaded through the Nursery using the space from already
+     * moved things. The list is used to fix up the moved things and to find
+     * things held live by intra-Nursery pointers.
+     */
+    RelocationOverlay *head;
+    RelocationOverlay **tail;
+
+    /* Save and restore all of the runtime state we use during MinorGC. */
+    bool priorNeedsBarrier;
+
+    /* Insert the given relocation entry into the list of things to visit. */
+    JS_ALWAYS_INLINE void insertIntoFixupList(RelocationOverlay *entry) {
+        *tail = entry;
+        tail = &entry->next_;
+        *tail = NULL;
+    }
+
+    MinorCollectionTracer(JSRuntime *rt, Nursery *nursery)
+      : JSTracer(),
+        nursery(nursery),
+        runtime(rt),
+        session(runtime, MinorCollecting),
+        head(NULL),
+        tail(&head),
+        priorNeedsBarrier(runtime->needsBarrier())
+    {
+        JS_TracerInit(this, runtime, Nursery::MinorGCCallback);
+        eagerlyTraceWeakMaps = TraceWeakMapKeysValues;
+
+        runtime->gcNumber++;
+        runtime->setNeedsBarrier(false);
+        ++runtime->gcDisableStrictProxyCheckingCount;
+    }
+
+    ~MinorCollectionTracer() {
+        --runtime->gcDisableStrictProxyCheckingCount;
+        runtime->setNeedsBarrier(priorNeedsBarrier);
+    }
+};
+
+} /* namespace gc */
+} /* namespace js */
+
+static AllocKind
+GetObjectAllocKindForCopy(JSObject *obj)
+{
+    if (obj->isArray()) {
+        JS_ASSERT(obj->numFixedSlots() == 0);
+        size_t nelements = obj->getDenseInitializedLength();
+        return GetBackgroundAllocKind(GetGCArrayKind(nelements));
+    }
+
+    if (obj->isFunction())
+        return obj->toFunction()->getAllocKind();
+
+    AllocKind kind = GetGCObjectFixedSlotsKind(obj->numFixedSlots());
+    if (CanBeFinalizedInBackground(kind, obj->getClass()))
+        kind = GetBackgroundAllocKind(kind);
+    return kind;
+}
+
+void *
+js::Nursery::allocateFromTenured(Zone *zone, AllocKind thingKind)
+{
+    void *t = zone->allocator.arenas.allocateFromFreeList(thingKind, Arena::thingSize(thingKind));
+    if (!t) {
+        zone->allocator.arenas.checkEmptyFreeList(thingKind);
+        t = zone->allocator.arenas.allocateFromArena(zone, thingKind);
+    }
+    return t;
+}
+
+void *
+js::Nursery::moveToTenured(MinorCollectionTracer *trc, JSObject *src)
+{
+    Zone *zone = src->zone();
+    AllocKind dstKind = GetObjectAllocKindForCopy(src);
+    JSObject *dst = static_cast<JSObject *>(allocateFromTenured(zone, dstKind));
+    if (!dst)
+        MOZ_CRASH();
+
+    moveObjectToTenured(dst, src, dstKind);
+
+    RelocationOverlay *overlay = reinterpret_cast<RelocationOverlay *>(src);
+    overlay->forwardTo(dst);
+    trc->insertIntoFixupList(overlay);
+
+    return static_cast<void *>(dst);
+}
+
+void
+js::Nursery::moveObjectToTenured(JSObject *dst, JSObject *src, AllocKind dstKind)
+{
+    size_t srcSize = Arena::thingSize(dstKind);
+
+    /*
+     * Arrays do not necessarily have the same AllocKind between src and dst.
+     * We deal with this by copying elements manually, possibly re-inlining
+     * them if there is adequate room inline in dst.
+     */
+    if (src->isArray())
+        srcSize = sizeof(ObjectImpl);
+
+    js_memcpy(dst, src, srcSize);
+    moveSlotsToTenured(dst, src, dstKind);
+    moveElementsToTenured(dst, src, dstKind);
+
+    /* The shape's list head may point into the old object. */
+    if (&src->shape_ == dst->shape_->listp)
+        dst->shape_->listp = &dst->shape_;
+}
+
+void
+js::Nursery::moveSlotsToTenured(JSObject *dst, JSObject *src, AllocKind dstKind)
+{
+    /* Fixed slots have already been copied over. */
+    if (!src->hasDynamicSlots())
+        return;
+
+    if (!isInside(src->slots)) {
+        hugeSlots.remove(src->slots);
+        return;
+    }
+
+    Allocator *alloc = &src->zone()->allocator;
+    size_t count = src->numDynamicSlots();
+    dst->slots = alloc->pod_malloc<HeapSlot>(count);
+    PodCopy(dst->slots, src->slots, count);
+}
+
+void
+js::Nursery::moveElementsToTenured(JSObject *dst, JSObject *src, AllocKind dstKind)
+{
+    if (src->hasEmptyElements())
+        return;
+
+    Allocator *alloc = &src->zone()->allocator;
+    ObjectElements *srcHeader = src->getElementsHeader();
+    ObjectElements *dstHeader;
+
+    if (!isInside(srcHeader)) {
+        JS_ASSERT(src->elements == dst->elements);
+        hugeSlots.remove(reinterpret_cast<HeapSlot*>(srcHeader));
+        return;
+    }
+
+    /* ArrayBuffer stores byte-length, not Value count. */
+    if (src->isArrayBuffer()) {
+        size_t nbytes = sizeof(ObjectElements) + srcHeader->initializedLength;
+        if (src->hasDynamicElements()) {
+            dstHeader = static_cast<ObjectElements *>(alloc->malloc_(nbytes));
+            if (!dstHeader)
+                MOZ_CRASH();
+        } else {
+            dst->setFixedElements();
+            dstHeader = dst->getElementsHeader();
+        }
+        js_memcpy(dstHeader, srcHeader, nbytes);
+        dst->elements = dstHeader->elements();
+        return;
+    }
+
+    size_t nslots = ObjectElements::VALUES_PER_HEADER + srcHeader->initializedLength;
+
+    /* Unlike other objects, Arrays can have fixed elements. */
+    if (src->isArray() && nslots <= GetGCKindSlots(dstKind)) {
+        dst->setFixedElements();
+        dstHeader = dst->getElementsHeader();
+        js_memcpy(dstHeader, srcHeader, nslots * sizeof(HeapSlot));
+        dstHeader->capacity = GetGCKindSlots(dstKind) - ObjectElements::VALUES_PER_HEADER;
+        return;
+    }
+
+    size_t nbytes = nslots * sizeof(HeapValue);
+    dstHeader = static_cast<ObjectElements *>(alloc->malloc_(nbytes));
+    if (!dstHeader)
+        MOZ_CRASH();
+    js_memcpy(dstHeader, srcHeader, nslots * sizeof(HeapSlot));
+    dstHeader->capacity = srcHeader->initializedLength;
+    dst->elements = dstHeader->elements();
+}
+
+static bool
+ShouldMoveToTenured(MinorCollectionTracer *trc, void **thingp)
+{
+    Cell *cell = static_cast<Cell *>(*thingp);
+    Nursery &nursery = *trc->nursery;
+    return !nursery.isInside(thingp) && nursery.isInside(cell) &&
+           !nursery.getForwardedPointer(thingp);
+}
+
+/* static */ void
+js::Nursery::MinorGCCallback(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
+{
+    MinorCollectionTracer *trc = static_cast<MinorCollectionTracer *>(jstrc);
+    if (ShouldMoveToTenured(trc, thingp))
+        *thingp = trc->nursery->moveToTenured(trc, static_cast<JSObject *>(*thingp));
+}
+
+void
+js::Nursery::markFallback(Cell *cell)
+{
+    JS_ASSERT(uintptr_t(cell) >= start());
+    size_t offset = uintptr_t(cell) - start();
+    JS_ASSERT(offset < end() - start());
+    JS_ASSERT(offset % ThingAlignment == 0);
+    fallbackBitmap.set(offset / ThingAlignment);
+}
+
+void
+js::Nursery::moveFallbackToTenured(gc::MinorCollectionTracer *trc)
+{
+    for (size_t i = 0; i < FallbackBitmapBits; ++i) {
+        if (fallbackBitmap.get(i)) {
+            JSObject *src = reinterpret_cast<JSObject *>(start() + i * ThingAlignment);
+            moveToTenured(trc, src);
+        }
+    }
+    fallbackBitmap.clear(false);
+}
+
+/* static */ void
+js::Nursery::MinorFallbackMarkingCallback(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
+{
+    MinorCollectionTracer *trc = static_cast<MinorCollectionTracer *>(jstrc);
+    if (ShouldMoveToTenured(trc, thingp))
+        trc->nursery->markFallback(static_cast<JSObject *>(*thingp));
+}
+
+/* static */ void
+js::Nursery::MinorFallbackFixupCallback(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
+{
+    MinorCollectionTracer *trc = static_cast<MinorCollectionTracer *>(jstrc);
+    if (trc->nursery->isInside(*thingp))
+        trc->nursery->getForwardedPointer(thingp);
+}
+
+static void
+TraceHeapWithCallback(JSTracer *trc, JSTraceCallback callback)
+{
+    JSTraceCallback prior = trc->callback;
+
+    AutoCopyFreeListToArenas copy(trc->runtime);
+    trc->callback = callback;
+    for (ZonesIter zone(trc->runtime); !zone.done(); zone.next()) {
+        for (size_t i = 0; i < FINALIZE_LIMIT; ++i) {
+            AllocKind kind = AllocKind(i);
+            for (CellIterUnderGC cells(zone, kind); !cells.done(); cells.next())
+                JS_TraceChildren(trc, cells.getCell(), MapAllocToTraceKind(kind));
+        }
+    }
+
+    trc->callback = prior;
+}
+
+void
+js::Nursery::markStoreBuffer(MinorCollectionTracer *trc)
+{
+    JSRuntime *rt = trc->runtime;
+    if (!rt->gcStoreBuffer.hasOverflowed()) {
+        rt->gcStoreBuffer.mark(trc);
+        return;
+    }
+
+    /*
+     * If the store buffer has overflowed, we need to walk the full heap to
+     * discover cross-generation edges. Since we cannot easily walk the heap
+     * while simultaneously allocating, we use a three pass algorithm:
+     *   1) Walk the major heap and mark live things in the nursery in a
+     *      pre-allocated bitmap.
+     *   2) Use the bitmap to move all live nursery things to the tenured
+     *      heap.
+     *   3) Walk the heap a second time to find and update all of the moved
+     *      references in the tenured heap.
+     */
+    TraceHeapWithCallback(trc, MinorFallbackMarkingCallback);
+    moveFallbackToTenured(trc);
+    TraceHeapWithCallback(trc, MinorFallbackFixupCallback);
+}
+
+void
+js::Nursery::collect(JSRuntime *rt, JS::gcreason::Reason reason)
+{
+    JS_AbortIfWrongThread(rt);
+
+    if (!isEnabled())
+        return;
+
+    if (position() == start())
+        return;
+
+    rt->gcHelperThread.waitBackgroundSweepEnd();
+
+    /* Move objects pointed to by roots from the nursery to the major heap. */
+    MinorCollectionTracer trc(rt, this);
+    MarkRuntime(&trc);
+    Debugger::markAll(&trc);
+    for (CompartmentsIter comp(rt); !comp.done(); comp.next()) {
+        comp->markAllCrossCompartmentWrappers(&trc);
+        comp->markAllInitialShapeTableEntries(&trc);
+    }
+    markStoreBuffer(&trc);
+
+    /*
+     * Most of the work is done here. This loop iterates over objects that have
+     * been moved to the major heap. If these objects have any outgoing pointers
+     * to the nursery, then those nursery objects get moved as well, until no
+     * objects are left to move. That is, we iterate to a fixed point.
+     */
+    for (RelocationOverlay *p = trc.head; p; p = p->next()) {
+        JSObject *obj = static_cast<JSObject*>(p->forwardingAddress());
+        JS_TraceChildren(&trc, obj, MapAllocToTraceKind(obj->tenuredGetAllocKind()));
+    }
+
+    /* Sweep. */
+    sweep(rt->defaultFreeOp());
+    rt->gcStoreBuffer.clear();
+
+    /*
+     * We ignore gcMaxBytes when allocating for minor collection. However, if we
+     * overflowed, we disable the nursery. The next time we allocate, we'll fail
+     * because gcBytes >= gcMaxBytes.
+     */
+    if (rt->gcBytes >= rt->gcMaxBytes)
+        disable();
+}
+
+
+void
+js::Nursery::sweep(FreeOp *fop)
+{
+    for (HugeSlotsSet::Range r = hugeSlots.all(); !r.empty(); r.popFront())
+        fop->free_(r.front());
+    hugeSlots.clear();
+
+    JS_POISON((void *)start(), SweptNursery, NurserySize - sizeof(JSRuntime *));
+
+    position_ = start();
+}
+
+#endif /* JSGC_GENERATIONAL */
new file mode 100644
--- /dev/null
+++ b/js/src/gc/Nursery.h
@@ -0,0 +1,185 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sw=4 et tw=78:
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef jsgc_nursery_h___
+#define jsgc_nursery_h___
+
+#ifdef JSGC_GENERATIONAL
+
+#include "ds/BitArray.h"
+#include "js/HashTable.h"
+
+#include "jsgc.h"
+#include "jspubtd.h"
+
+namespace js {
+
+class ObjectElements;
+
+namespace gc {
+class MinorCollectionTracer;
+} /* namespace gc */
+
+class Nursery
+{
+  public:
+    const static size_t Alignment = gc::ChunkSize;
+    const static size_t NurserySize = gc::ChunkSize;
+    const static size_t NurseryMask = NurserySize - 1;
+
+    explicit Nursery(JSRuntime *rt)
+      : runtime_(rt),
+        position_(0)
+    {}
+    ~Nursery();
+
+    bool enable();
+    void disable();
+    bool isEnabled() const { return bool(start()); }
+
+    template <typename T>
+    JS_ALWAYS_INLINE bool isInside(const T *p) const {
+        return uintptr_t(p) >= start() && uintptr_t(p) < end();
+    }
+
+    /*
+     * Allocate and return a pointer to a new GC thing. Returns NULL if the
+     * Nursery is full.
+     */
+    void *allocate(size_t size);
+
+    /* Allocate a slots array for the given object. */
+    HeapSlot *allocateSlots(JSContext *cx, JSObject *obj, uint32_t nslots);
+
+    /* Allocate an elements vector for the given object. */
+    ObjectElements *allocateElements(JSContext *cx, JSObject *obj, uint32_t nelems);
+
+    /* Resize an existing slots array. */
+    HeapSlot *reallocateSlots(JSContext *cx, JSObject *obj, HeapSlot *oldSlots,
+                              uint32_t oldCount, uint32_t newCount);
+
+    /* Resize an existing elements vector. */
+    ObjectElements *reallocateElements(JSContext *cx, JSObject *obj, ObjectElements *oldHeader,
+                                       uint32_t oldCount, uint32_t newCount);
+
+    /* Add a slots to our tracking list if it is out-of-line. */
+    void notifyInitialSlots(gc::Cell *cell, HeapSlot *slots);
+
+    /* Do a minor collection. */
+    void collect(JSRuntime *rt, JS::gcreason::Reason reason);
+
+    /*
+     * Check if the thing at |*ref| in the Nursery has been forwarded. If so,
+     * sets |*ref| to the new location of the object and returns true. Otherwise
+     * returns false and leaves |*ref| unset.
+     */
+    template <typename T>
+    JS_ALWAYS_INLINE bool getForwardedPointer(T **ref);
+
+  private:
+    /*
+     * The start and end pointers are stored under the runtime so that we can
+     * inline the isInsideNursery check into embedder code. Use the start()
+     * and end() functions to access these values.
+     */
+    JSRuntime *runtime_;
+
+    /* Pointer to the first unallocated byte in the nursery. */
+    uintptr_t position_;
+
+    /*
+     * The set of externally malloced slots potentially kept live by objects
+     * stored in the nursery. Any external slots that do not belong to a
+     * tenured thing at the end of a minor GC must be freed.
+     */
+    typedef HashSet<HeapSlot *, PointerHasher<HeapSlot *, 3>, SystemAllocPolicy> HugeSlotsSet;
+    HugeSlotsSet hugeSlots;
+
+    /* The marking bitmap for the fallback marker. */
+    const static size_t ThingAlignment = sizeof(Value);
+    const static size_t FallbackBitmapBits = NurserySize / ThingAlignment;
+    BitArray<FallbackBitmapBits> fallbackBitmap;
+
+#ifdef DEBUG
+    /*
+     * In DEBUG builds, these bytes indicate the state of an unused segment of
+     * nursery-allocated memory.
+     */
+    const static uint8_t FreshNursery = 0x2a;
+    const static uint8_t SweptNursery = 0x2b;
+    const static uint8_t AllocatedThing = 0x2c;
+#endif
+
+    /* The maximum number of slots allowed to reside inline in the nursery. */
+    const static size_t MaxNurserySlots = 100;
+
+    /* The amount of space in the mapped nursery available to allocations. */
+    const static size_t NurseryUsableSize = NurserySize - sizeof(JSRuntime *);
+
+    struct Layout {
+        char data[NurseryUsableSize];
+        JSRuntime *runtime;
+    };
+    Layout &asLayout() {
+        JS_STATIC_ASSERT(sizeof(Layout) == NurserySize);
+        JS_ASSERT(start());
+        return *reinterpret_cast<Layout *>(start());
+    }
+
+    JS_ALWAYS_INLINE uintptr_t start() const {
+        JS_ASSERT(runtime_);
+        return ((JS::shadow::Runtime *)runtime_)->gcNurseryStart_;
+    }
+
+    JS_ALWAYS_INLINE uintptr_t end() const {
+        JS_ASSERT(runtime_);
+        return ((JS::shadow::Runtime *)runtime_)->gcNurseryEnd_;
+    }
+
+    uintptr_t position() const { return position_; }
+
+    JSRuntime *runtime() const { return runtime_; }
+
+    /* Allocates and registers external slots with the nursery. */
+    HeapSlot *allocateHugeSlots(JSContext *cx, size_t nslots);
+
+    /* Allocates a new GC thing from the tenured generation during minor GC. */
+    void *allocateFromTenured(Zone *zone, gc::AllocKind thingKind);
+
+    /*
+     * Move the object at |src| in the Nursery to an already-allocated cell
+     * |dst| in Tenured.
+     */
+    void *moveToTenured(gc::MinorCollectionTracer *trc, JSObject *src);
+    void moveObjectToTenured(JSObject *dst, JSObject *src, gc::AllocKind dstKind);
+    void moveElementsToTenured(JSObject *dst, JSObject *src, gc::AllocKind dstKind);
+    void moveSlotsToTenured(JSObject *dst, JSObject *src, gc::AllocKind dstKind);
+
+    /* Handle fallback marking. See the comment in MarkStoreBuffer. */
+    void markFallback(gc::Cell *cell);
+    void moveFallbackToTenured(gc::MinorCollectionTracer *trc);
+
+    void markStoreBuffer(gc::MinorCollectionTracer *trc);
+
+    /*
+     * Frees all non-live nursery-allocated things at the end of a minor
+     * collection. This operation takes time proportional to the number of
+     * dead things.
+     */
+    void sweep(FreeOp *fop);
+
+    static void MinorGCCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind);
+    static void MinorFallbackMarkingCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind);
+    static void MinorFallbackFixupCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind);
+
+    friend class gc::MinorCollectionTracer;
+};
+
+} /* namespace js */
+
+#endif /* JSGC_GENERATIONAL */
+#endif /* jsgc_nursery_h___ */
--- a/js/src/gc/StoreBuffer.cpp
+++ b/js/src/gc/StoreBuffer.cpp
@@ -71,16 +71,23 @@ StoreBuffer::MonoTypeBuffer<T>::enable(u
 template <typename T>
 void
 StoreBuffer::MonoTypeBuffer<T>::disable()
 {
     base = pos = top = NULL;
 }
 
 template <typename T>
+void
+StoreBuffer::MonoTypeBuffer<T>::clear()
+{
+    pos = base;
+}
+
+template <typename T>
 template <typename NurseryType>
 void
 StoreBuffer::MonoTypeBuffer<T>::compactNotInSet(NurseryType *nursery)
 {
     T *insert = base;
     for (T *v = base; v != pos; ++v) {
         if (v->inRememberedSet(nursery))
             *insert++ = *v;
@@ -90,17 +97,19 @@ StoreBuffer::MonoTypeBuffer<T>::compactN
 
 template <typename T>
 void
 StoreBuffer::MonoTypeBuffer<T>::compact()
 {
 #ifdef JS_GC_ZEAL
     if (owner->runtime->gcVerifyPostData)
         compactNotInSet(&owner->runtime->gcVerifierNursery);
+    else
 #endif
+        compactNotInSet(&owner->runtime->gcNursery);
 }
 
 template <typename T>
 void
 StoreBuffer::MonoTypeBuffer<T>::put(const T &v)
 {
     /* Check if we have been enabled. */
     if (!pos)
@@ -108,18 +117,37 @@ StoreBuffer::MonoTypeBuffer<T>::put(cons
 
     /*
      * Note: it is sometimes valid for a put to happen in the middle of a GC,
      * e.g. a rekey of a Relocatable may end up here. In general, we do not
      * care about these new entries or any overflows they cause.
      */
     *pos++ = v;
     if (isFull()) {
-        owner->setOverflowed();
-        pos = base;
+        compact();
+        if (isFull()) {
+            owner->setOverflowed();
+            pos = base;
+        }
+    }
+}
+
+template <typename T>
+void
+StoreBuffer::MonoTypeBuffer<T>::mark(JSTracer *trc)
+{
+    compact();
+    T *cursor = base;
+    while (cursor != pos) {
+        T edge = *cursor++;
+
+        if (edge.isNullEdge())
+            continue;
+
+        edge.mark(trc);
     }
 }
 
 template <typename T>
 bool
 StoreBuffer::MonoTypeBuffer<T>::accumulateEdges(EdgeSet &edges)
 {
     compact();
@@ -191,37 +219,84 @@ StoreBuffer::GenericBuffer::enable(uint8
 }
 
 void
 StoreBuffer::GenericBuffer::disable()
 {
     base = pos = top = NULL;
 }
 
+void
+StoreBuffer::GenericBuffer::clear()
+{
+    pos = base;
+}
+
+void
+StoreBuffer::GenericBuffer::mark(JSTracer *trc)
+{
+    uint8_t *p = base;
+    while (p < pos) {
+        unsigned size = *((unsigned *)p);
+        p += sizeof(unsigned);
+
+        BufferableRef *edge = reinterpret_cast<BufferableRef *>(p);
+        edge->mark(trc);
+
+        p += size;
+    }
+}
+
 bool
 StoreBuffer::GenericBuffer::containsEdge(void *location) const
 {
     uint8_t *p = base;
     while (p < pos) {
         unsigned size = *((unsigned *)p);
         p += sizeof(unsigned);
 
         if (((BufferableRef *)p)->match(location))
             return true;
 
         p += size;
     }
     return false;
 }
 
+/*** Edges ***/
+
+void
+StoreBuffer::CellPtrEdge::mark(JSTracer *trc)
+{
+    MarkObjectRoot(trc, reinterpret_cast<JSObject**>(edge), "store buffer edge");
+}
+
+void
+StoreBuffer::ValueEdge::mark(JSTracer *trc)
+{
+    MarkValueRoot(trc, edge, "store buffer edge");
+}
+
+void
+StoreBuffer::SlotEdge::mark(JSTracer *trc)
+{
+    if (kind == HeapSlot::Element)
+        MarkSlot(trc, (HeapSlot*)&object->getDenseElement(offset), "store buffer edge");
+    else
+        MarkSlot(trc, &object->getSlotRef(offset), "store buffer edge");
+}
+
 /*** StoreBuffer ***/
 
 bool
 StoreBuffer::enable()
 {
+    if (enabled)
+        return true;
+
     buffer = js_malloc(TotalSize);
     if (!buffer)
         return false;
 
     /* Initialize the individual edge buffers in sub-regions. */
     uint8_t *asBytes = static_cast<uint8_t *>(buffer);
     size_t offset = 0;
 
@@ -269,16 +344,53 @@ StoreBuffer::disable()
     bufferGeneric.disable();
 
     js_free(buffer);
     enabled = false;
     overflowed = false;
 }
 
 bool
+StoreBuffer::clear()
+{
+    if (!enabled)
+        return true;
+
+    bufferVal.clear();
+    bufferCell.clear();
+    bufferSlot.clear();
+    bufferRelocVal.clear();
+    bufferRelocCell.clear();
+    bufferGeneric.clear();
+
+    return true;
+}
+
+void
+StoreBuffer::mark(JSTracer *trc)
+{
+    JS_ASSERT(isEnabled());
+    JS_ASSERT(!overflowed);
+
+    bufferVal.mark(trc);
+    bufferCell.mark(trc);
+    bufferSlot.mark(trc);
+    bufferRelocVal.mark(trc);
+    bufferRelocCell.mark(trc);
+    bufferGeneric.mark(trc);
+}
+
+void
+StoreBuffer::setOverflowed()
+{
+    JS_ASSERT(enabled);
+    overflowed = true;
+}
+
+bool
 StoreBuffer::coalesceForVerification()
 {
     if (!edgeSet.initialized()) {
         if (!edgeSet.init())
             return false;
     }
     JS_ASSERT(edgeSet.empty());
     if (!bufferVal.accumulateEdges(edgeSet))
--- a/js/src/gc/StoreBuffer.h
+++ b/js/src/gc/StoreBuffer.h
@@ -24,17 +24,17 @@ namespace gc {
  * Note: this is a stub Nursery that does not actually contain a heap, just a
  * set of pointers which are "inside" the nursery to implement verification.
  */
 class VerifierNursery
 {
     HashSet<const void *, PointerHasher<const void *, 3>, SystemAllocPolicy> nursery;
 
   public:
-    VerifierNursery() : nursery() {}
+    explicit VerifierNursery() : nursery() {}
 
     bool enable() {
         if (!nursery.initialized())
             return nursery.init();
         return true;
     }
 
     void disable() {
@@ -48,17 +48,16 @@ class VerifierNursery
     }
 
     bool clear() {
         disable();
         return enable();
     }
 
     bool isInside(const void *cell) const {
-        JS_ASSERT((uintptr_t(cell) & 0x3) == 0);
         return nursery.initialized() && nursery.has(cell);
     }
 
     void insertPointer(void *cell) {
         nursery.putNew(cell);
     }
 };
 #endif /* JS_GC_ZEAL */
@@ -82,58 +81,49 @@ class BufferableRef
  * type to insert into the generic buffer with putGeneric.
  */
 template <typename Map, typename Key>
 class HashKeyRef : public BufferableRef
 {
     Map *map;
     Key key;
 
+    typedef typename Map::Entry::ValueType ValueType;
     typedef typename Map::Ptr Ptr;
 
   public:
     HashKeyRef(Map *m, const Key &k) : map(m), key(k) {}
 
     bool match(void *location) {
         Ptr p = map->lookup(key);
         if (!p)
             return false;
         return &p->key == location;
     }
 
-    void mark(JSTracer *trc) {}
+    void mark(JSTracer *trc) {
+        Key prior = key;
+        typename Map::Ptr p = map->lookup(key);
+        if (!p)
+            return;
+        ValueType value = p->value;
+        Mark(trc, &key, "HashKeyRef");
+        if (prior != key) {
+            map->remove(prior);
+            map->put(key, value);
+        }
+    }
 };
 
 /*
  * The StoreBuffer observes all writes that occur in the system and performs
  * efficient filtering of them to derive a remembered set for nursery GC.
  */
 class StoreBuffer
 {
-#ifdef JS_GC_ZEAL
-    /* For verification, we approximate an infinitly large buffer. */
-    static const size_t ValueBufferSize = 1024 * 1024 * sizeof(Value *);
-    static const size_t CellBufferSize = 1024 * 1024 * sizeof(Cell **);
-    static const size_t SlotBufferSize = 1024 * 1024 * (sizeof(JSObject *) + 2 * sizeof(uint32_t));
-    static const size_t RelocValueBufferSize = 1 * 1024 * sizeof(Value *);
-    static const size_t RelocCellBufferSize = 1 * 1024 * sizeof(Cell **);
-    static const size_t GenericBufferSize = 1024 * 1024 * sizeof(int);
-#else
-    /* TODO: profile to find the ideal size for these. */
-    static const size_t ValueBufferSize = 1 * 1024 * sizeof(Value *);
-    static const size_t CellBufferSize = 2 * 1024 * sizeof(Cell **);
-    static const size_t SlotBufferSize = 2 * 1024 * (sizeof(JSObject *) + sizeof(uint32_t));
-    static const size_t RelocValueBufferSize = 1 * 1024 * sizeof(Value *);
-    static const size_t RelocCellBufferSize = 1 * 1024 * sizeof(Cell **);
-    static const size_t GenericBufferSize = 1 * 1024 * sizeof(int);
-#endif
-    static const size_t TotalSize = ValueBufferSize + CellBufferSize +
-                                    SlotBufferSize + RelocValueBufferSize + RelocCellBufferSize +
-                                    GenericBufferSize;
-
     typedef HashSet<void *, PointerHasher<void *, 3>, SystemAllocPolicy> EdgeSet;
 
     /*
      * This buffer holds only a single type of edge. Using this buffer is more
      * efficient than the generic buffer when many writes will be to the same
      * type of edge: e.g. Value or Cell*.
      */
     template<typename T>
@@ -150,16 +140,17 @@ class StoreBuffer
         MonoTypeBuffer(StoreBuffer *owner)
           : owner(owner), base(NULL), pos(NULL), top(NULL)
         {}
 
         MonoTypeBuffer &operator=(const MonoTypeBuffer& other) MOZ_DELETE;
 
         bool enable(uint8_t *region, size_t len);
         void disable();
+        void clear();
 
         bool isEmpty() const { return pos == base; }
         bool isFull() const { JS_ASSERT(pos <= top); return pos == top; }
 
         /* Compaction algorithms. */
         template <typename NurseryType>
         void compactNotInSet(NurseryType *nursery);
 
@@ -167,16 +158,19 @@ class StoreBuffer
          * Attempts to reduce the usage of the buffer by removing unnecessary
          * entries.
          */
         virtual void compact();
 
         /* Add one item to the buffer. */
         void put(const T &v);
 
+        /* Mark the source of all edges in the store buffer. */
+        void mark(JSTracer *trc);
+
         /* For verification. */
         bool accumulateEdges(EdgeSet &edges);
     };
 
     /*
      * Overrides the MonoTypeBuffer to support pointers that may be moved in
      * memory outside of the GC's control.
      */
@@ -210,16 +204,20 @@ class StoreBuffer
         GenericBuffer(StoreBuffer *owner)
           : owner(owner)
         {}
 
         GenericBuffer &operator=(const GenericBuffer& other) MOZ_DELETE;
 
         bool enable(uint8_t *region, size_t len);
         void disable();
+        void clear();
+
+        /* Mark all generic edges. */
+        void mark(JSTracer *trc);
 
         /* Check if a pointer is present in the buffer. */
         bool containsEdge(void *location) const;
 
         template <typename T>
         void put(const T &t) {
             /* Check if we have been enabled. */
             if (!pos)
@@ -258,16 +256,18 @@ class StoreBuffer
         bool inRememberedSet(NurseryType *nursery) const {
             return !nursery->isInside(edge) && nursery->isInside(*edge);
         }
 
         bool isNullEdge() const {
             return !*edge;
         }
 
+        void mark(JSTracer *trc);
+
         CellPtrEdge tagged() const { return CellPtrEdge((Cell **)(uintptr_t(edge) | 1)); }
         CellPtrEdge untagged() const { return CellPtrEdge((Cell **)(uintptr_t(edge) & ~1)); }
         bool isTagged() const { return bool(uintptr_t(edge) & 1); }
     };
 
     class ValueEdge
     {
         friend class StoreBuffer;
@@ -287,16 +287,18 @@ class StoreBuffer
         bool inRememberedSet(NurseryType *nursery) const {
             return !nursery->isInside(edge) && nursery->isInside(deref());
         }
 
         bool isNullEdge() const {
             return !deref();
         }
 
+        void mark(JSTracer *trc);
+
         ValueEdge tagged() const { return ValueEdge((Value *)(uintptr_t(edge) | 1)); }
         ValueEdge untagged() const { return ValueEdge((Value *)(uintptr_t(edge) & ~1)); }
         bool isTagged() const { return bool(uintptr_t(edge) & 1); }
     };
 
     struct SlotEdge
     {
         friend class StoreBuffer;
@@ -323,16 +325,18 @@ class StoreBuffer
         JS_ALWAYS_INLINE void *deref() const;
 
         JS_ALWAYS_INLINE void *location() const;
 
         template <typename NurseryType>
         JS_ALWAYS_INLINE bool inRememberedSet(NurseryType *nursery) const;
 
         JS_ALWAYS_INLINE bool isNullEdge() const;
+
+        void mark(JSTracer *trc);
     };
 
     MonoTypeBuffer<ValueEdge> bufferVal;
     MonoTypeBuffer<CellPtrEdge> bufferCell;
     MonoTypeBuffer<SlotEdge> bufferSlot;
     RelocatableMonoTypeBuffer<ValueEdge> bufferRelocVal;
     RelocatableMonoTypeBuffer<CellPtrEdge> bufferRelocCell;
     GenericBuffer bufferGeneric;
@@ -342,34 +346,52 @@ class StoreBuffer
     void *buffer;
 
     bool overflowed;
     bool enabled;
 
     /* For the verifier. */
     EdgeSet edgeSet;
 
+#ifdef JS_GC_ZEAL
+    /* For verification, we approximate an infinitly large buffer. */
+    static const size_t ValueBufferSize = 1024 * 1024 * sizeof(ValueEdge);
+    static const size_t CellBufferSize = 1024 * 1024 * sizeof(CellPtrEdge);
+    static const size_t SlotBufferSize = 1024 * 1024 * sizeof(SlotEdge);
+    static const size_t RelocValueBufferSize = 1 * 1024 * sizeof(ValueEdge);
+    static const size_t RelocCellBufferSize = 1 * 1024 * sizeof(CellPtrEdge);
+    static const size_t GenericBufferSize = 1024 * 1024 * sizeof(int);
+#else
+    /* TODO: profile to find the ideal size for these. */
+    static const size_t ValueBufferSize = 1 * 1024 * sizeof(ValueEdge);
+    static const size_t CellBufferSize = 2 * 1024 * sizeof(CellPtrEdge);
+    static const size_t SlotBufferSize = 2 * 1024 * sizeof(SlotEdge);
+    static const size_t RelocValueBufferSize = 1 * 1024 * sizeof(ValueEdge);
+    static const size_t RelocCellBufferSize = 1 * 1024 * sizeof(CellPtrEdge);
+    static const size_t GenericBufferSize = 1 * 1024 * sizeof(int);
+#endif
+    static const size_t TotalSize = ValueBufferSize + CellBufferSize +
+                                    SlotBufferSize + RelocValueBufferSize + RelocCellBufferSize +
+                                    GenericBufferSize;
+
     /* For use by our owned buffers. */
-    void setOverflowed() { overflowed = true; }
+    void setOverflowed();
 
   public:
-    StoreBuffer(JSRuntime *rt)
+    explicit StoreBuffer(JSRuntime *rt)
       : bufferVal(this), bufferCell(this), bufferSlot(this),
         bufferRelocVal(this), bufferRelocCell(this), bufferGeneric(this),
         runtime(rt), buffer(NULL), overflowed(false), enabled(false)
     {}
 
     bool enable();
     void disable();
     bool isEnabled() { return enabled; }
 
-    bool clear() {
-        disable();
-        return enable();
-    }
+    bool clear();
 
     /* Get the overflowed status. */
     bool hasOverflowed() const { return overflowed; }
 
     /* Insert a single edge into the buffer/remembered set. */
     void putValue(Value *v) {
         bufferVal.put(v);
     }
@@ -395,16 +417,19 @@ class StoreBuffer
     }
 
     /* Insert an entry into the generic buffer. */
     template <typename T>
     void putGeneric(const T &t) {
         bufferGeneric.put(t);
     }
 
+    /* Mark the source of all edges in the store buffer. */
+    void mark(JSTracer *trc);
+
     /* For the verifier. */
     bool coalesceForVerification();
     void releaseVerificationData();
     bool containsEdgeAt(void *loc) const;
 };
 
 } /* namespace gc */
 } /* namespace js */
--- a/js/src/gc/Verifier.cpp
+++ b/js/src/gc/Verifier.cpp
@@ -321,30 +321,37 @@ JS::CheckStackRoots(JSContext *cx)
 
 static void
 DisableGGCForVerification(JSRuntime *rt)
 {
 #ifdef JSGC_GENERATIONAL
     if (rt->gcVerifyPreData || rt->gcVerifyPostData)
         return;
 
+    if (rt->gcNursery.isEnabled()) {
+        MinorGC(rt, JS::gcreason::API);
+        rt->gcNursery.disable();
+    }
+
     if (rt->gcStoreBuffer.isEnabled())
         rt->gcStoreBuffer.disable();
 #endif
 }
 
 static void
 EnableGGCAfterVerification(JSRuntime *rt)
 {
 #ifdef JSGC_GENERATIONAL
     if (rt->gcVerifyPreData || rt->gcVerifyPostData)
         return;
 
-    if (rt->gcGenerationalEnabled)
+    if (rt->gcGenerationalEnabled) {
+        rt->gcNursery.enable();
         rt->gcStoreBuffer.enable();
+    }
 #endif
 }
 
 /*
  * Write barrier verification
  *
  * The next few functions are for write barrier verification.
  *
@@ -759,20 +766,16 @@ js::gc::EndVerifyPostBarriers(JSRuntime 
 
     if (rt->gcStoreBuffer.hasOverflowed())
         goto oom;
 
     if (!rt->gcStoreBuffer.coalesceForVerification())
         goto oom;
 
     /* Walk the heap. */
-    for (CompartmentsIter comp(rt); !comp.done(); comp.next()) {
-        if (comp->watchpointMap)
-            comp->watchpointMap->markAll(trc);
-    }
     for (GCZoneGroupIter zone(rt); !zone.done(); zone.next()) {
         for (size_t kind = 0; kind < FINALIZE_LIMIT; ++kind) {
             for (CellIterUnderGC cells(zone, AllocKind(kind)); !cells.done(); cells.next()) {
                 Cell *src = cells.getCell();
                 if (!rt->gcVerifierNursery.isInside(src))
                     JS_TraceChildren(trc, src, MapAllocToTraceKind(AllocKind(kind)));
             }
         }
--- a/js/src/gc/Zone.h
+++ b/js/src/gc/Zone.h
@@ -176,17 +176,17 @@ struct Zone : private JS::shadow::Zone, 
         return gcPreserveCode;
     }
 
     /*
      * If this returns true, all object tracing must be done with a GC marking
      * tracer.
      */
     bool requireGCTracer() const {
-        return rt->isHeapCollecting() && gcState != NoGC;
+        return rt->isHeapMajorCollecting() && gcState != NoGC;
     }
 
     void setGCState(CompartmentGCState state) {
         JS_ASSERT(rt->isHeapBusy());
         gcState = state;
     }
 
     void scheduleGC() {
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -236,17 +236,17 @@ static void
 AssertHeapIsIdle(JSContext *cx)
 {
     AssertHeapIsIdle(cx->runtime);
 }
 
 static void
 AssertHeapIsIdleOrIterating(JSRuntime *rt)
 {
-    JS_ASSERT(rt->heapState != js::Collecting);
+    JS_ASSERT(!rt->isHeapCollecting());
 }
 
 static void
 AssertHeapIsIdleOrIterating(JSContext *cx)
 {
     AssertHeapIsIdleOrIterating(cx->runtime);
 }
 
@@ -838,16 +838,17 @@ JSRuntime::JSRuntime(JSUseHelperThreads 
     gcManipulatingDeadZones(false),
     gcObjectsMarkedInDeadZones(0),
     gcPoke(false),
     heapState(Idle),
 #ifdef JSGC_GENERATIONAL
 # ifdef JS_GC_ZEAL
     gcVerifierNursery(),
 # endif
+    gcNursery(thisFromCtor()),
     gcStoreBuffer(thisFromCtor()),
 #endif
 #ifdef JS_GC_ZEAL
     gcZeal_(0),
     gcZealFrequency(0),
     gcNextScheduled(0),
     gcDeterministicOnly(false),
     gcIncrementalLimit(0),
@@ -1069,16 +1070,21 @@ JSRuntime::~JSRuntime()
 #endif
 #ifdef JS_ION
     js_delete(ionRuntime_);
 #endif
     js_delete(execAlloc_);  /* Delete after jaegerRuntime_. */
 
     if (ionPcScriptCache)
         js_delete(ionPcScriptCache);
+
+#ifdef JSGC_GENERATIONAL
+    gcStoreBuffer.disable();
+    gcNursery.disable();
+#endif
 }
 
 #ifdef JS_THREADSAFE
 void
 JSRuntime::setOwnerThread()
 {
     JS_ASSERT(ownerThread_ == (void *)0xc1ea12);  /* "clear" */
     JS_ASSERT(requestDepth == 0);
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -24,16 +24,17 @@
 #include "jsgc.h"
 #include "jspropertycache.h"
 #include "jspropertytree.h"
 #include "jsprototypes.h"
 #include "jsutil.h"
 #include "prmjtime.h"
 
 #include "ds/LifoAlloc.h"
+#include "gc/Nursery.h"
 #include "gc/Statistics.h"
 #include "gc/StoreBuffer.h"
 #include "js/HashTable.h"
 #include "js/Vector.h"
 #include "ion/AsmJS.h"
 #include "vm/DateTime.h"
 #include "vm/SPSProfiler.h"
 #include "vm/Stack.h"
@@ -598,18 +599,22 @@ struct MallocProvider
         Client *client = static_cast<Client *>(this);
         client->updateMallocCounter(bytes);
         void *p = js_calloc(bytes);
         return JS_LIKELY(!!p) ? p : client->onOutOfMemory(reinterpret_cast<void *>(1), bytes);
     }
 
     void *realloc_(void *p, size_t oldBytes, size_t newBytes) {
         Client *client = static_cast<Client *>(this);
-        JS_ASSERT(oldBytes < newBytes);
-        client->updateMallocCounter(newBytes - oldBytes);
+        /*
+         * For compatibility we do not account for realloc that decreases
+         * previously allocated memory.
+         */
+        if (newBytes > oldBytes)
+            client->updateMallocCounter(newBytes - oldBytes);
         void *p2 = js_realloc(p, newBytes);
         return JS_LIKELY(!!p2) ? p2 : client->onOutOfMemory(p, newBytes);
     }
 
     void *realloc_(void *p, size_t bytes) {
         Client *client = static_cast<Client *>(this);
         /*
          * For compatibility we do not account for realloc that increases
@@ -659,17 +664,17 @@ class MarkingValidator;
 } // namespace gc
 
 class JS_FRIEND_API(AutoEnterPolicy);
 
 typedef Vector<JS::Zone *, 1, SystemAllocPolicy> ZoneVector;
 
 } // namespace js
 
-struct JSRuntime : private JS::shadow::Runtime,
+struct JSRuntime : public JS::shadow::Runtime,
                    public js::MallocProvider<JSRuntime>
 {
     /*
      * Per-thread data for the main thread that is associated with
      * this JSRuntime, as opposed to any worker threads used in
      * parallel sections.  See definition of |PerThreadData| struct
      * above for more details.
      *
@@ -932,17 +937,17 @@ struct JSRuntime : private JS::shadow::R
 
     /* Incremented on every GC slice. */
     uint64_t            gcNumber;
 
     /* The gcNumber at the time of the most recent GC's first slice. */
     uint64_t            gcStartNumber;
 
     /* Whether the currently running GC can finish in multiple slices. */
-    int                 gcIsIncremental;
+    bool                gcIsIncremental;
 
     /* Whether all compartments are being collected in first GC slice. */
     bool                gcIsFull;
 
     /* The reason that an interrupt-triggered GC should be called. */
     JS::gcreason::Reason gcTriggerReason;
 
     /*
@@ -1037,23 +1042,25 @@ struct JSRuntime : private JS::shadow::R
      */
     unsigned            gcObjectsMarkedInDeadZones;
 
     bool                gcPoke;
 
     volatile js::HeapState heapState;
 
     bool isHeapBusy() { return heapState != js::Idle; }
-
-    bool isHeapCollecting() { return heapState == js::Collecting; }
+    bool isHeapMajorCollecting() { return heapState == js::MajorCollecting; }
+    bool isHeapMinorCollecting() { return heapState == js::MinorCollecting; }
+    bool isHeapCollecting() { return isHeapMajorCollecting() || isHeapMinorCollecting(); }
 
 #ifdef JSGC_GENERATIONAL
 # ifdef JS_GC_ZEAL
     js::gc::VerifierNursery      gcVerifierNursery;
 # endif
+    js::Nursery                  gcNursery;
     js::gc::StoreBuffer          gcStoreBuffer;
 #endif
 
     /*
      * These options control the zealousness of the GC. The fundamental values
      * are gcNextScheduled and gcDebugCompartmentGC. At every allocation,
      * gcNextScheduled is decremented. When it reaches zero, we do either a
      * full or a compartmental GC, based on gcDebugCompartmentGC.
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -483,16 +483,36 @@ JSCompartment::markCrossCompartmentWrapp
              */
             Value referent = GetProxyPrivate(wrapper);
             MarkValueRoot(trc, &referent, "cross-compartment wrapper");
             JS_ASSERT(referent == GetProxyPrivate(wrapper));
         }
     }
 }
 
+/*
+ * This method marks and keeps live all pointers in the cross compartment
+ * wrapper map. It should be called only for minor GCs, since minor GCs cannot,
+ * by their nature, apply the weak constraint to safely remove items from the
+ * wrapper map.
+ */
+void
+JSCompartment::markAllCrossCompartmentWrappers(JSTracer *trc)
+{
+    for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
+        CrossCompartmentKey key = e.front().key;
+        MarkGCThingRoot(trc, (void **)&key.wrapped, "CrossCompartmentKey::wrapped");
+        if (key.debugger)
+            MarkObjectRoot(trc, &key.debugger, "CrossCompartmentKey::debugger");
+        MarkValueRoot(trc, e.front().value.unsafeGet(), "CrossCompartmentWrapper");
+        if (key.wrapped != e.front().key.wrapped || key.debugger != e.front().key.debugger)
+            e.rekeyFront(key);
+    }
+}
+
 void
 JSCompartment::mark(JSTracer *trc)
 {
 #ifdef JS_ION
     if (ionCompartment_)
         ionCompartment_->mark(trc, this);
 #endif
 
@@ -755,16 +775,17 @@ JSCompartment::clearBreakpointsIn(FreeOp
         if (script->compartment() == this && script->hasAnyBreakpointsOrStepMode())
             script->clearBreakpointsIn(fop, dbg, handler);
     }
 }
 
 void
 JSCompartment::clearTraps(FreeOp *fop)
 {
+    MinorGC(rt, JS::gcreason::EVICT_NURSERY);
     for (gc::CellIter i(zone(), gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
         JSScript *script = i.get<JSScript>();
         if (script->compartment() == this && script->hasAnyBreakpointsOrStepMode())
             script->clearTraps(fop);
     }
 }
 
 void
--- a/js/src/jscompartment.h
+++ b/js/src/jscompartment.h
@@ -207,16 +207,17 @@ struct JSCompartment
 
     /* Set of all unowned base shapes in the compartment. */
     js::BaseShapeSet             baseShapes;
     void sweepBaseShapeTable();
 
     /* Set of initial shapes in the compartment. */
     js::InitialShapeSet          initialShapes;
     void sweepInitialShapeTable();
+    void markAllInitialShapeTableEntries(JSTracer *trc);
 
     /* Set of default 'new' or lazy types in the compartment. */
     js::types::TypeObjectSet     newTypeObjects;
     js::types::TypeObjectSet     lazyTypeObjects;
     void sweepNewTypeObjectTable(js::types::TypeObjectSet &table);
 
     js::types::TypeObject *getNewType(JSContext *cx, js::Class *clasp, js::TaggedProto proto,
                                       JSFunction *fun = NULL);
@@ -257,16 +258,17 @@ struct JSCompartment
   public:
     JSCompartment(JS::Zone *zone);
     ~JSCompartment();
 
     bool init(JSContext *cx);
 
     /* Mark cross-compartment wrappers. */
     void markCrossCompartmentWrappers(JSTracer *trc);
+    void markAllCrossCompartmentWrappers(JSTracer *trc);
 
     bool wrap(JSContext *cx, JS::MutableHandleValue vp, JS::HandleObject existing = js::NullPtr());
     bool wrap(JSContext *cx, JSString **strp);
     bool wrap(JSContext *cx, js::HeapPtrString *strp);
     bool wrap(JSContext *cx, JSObject **objp, JSObject *existing = NULL);
     bool wrapId(JSContext *cx, jsid *idp);
     bool wrap(JSContext *cx, js::PropertyOp *op);
     bool wrap(JSContext *cx, js::StrictPropertyOp *op);
--- a/js/src/jsfriendapi.cpp
+++ b/js/src/jsfriendapi.cpp
@@ -864,16 +864,17 @@ JS::DisableIncrementalGC(JSRuntime *rt)
     rt->gcIncrementalEnabled = false;
 }
 
 extern JS_FRIEND_API(void)
 JS::DisableGenerationalGC(JSRuntime *rt)
 {
     rt->gcGenerationalEnabled = false;
 #ifdef JSGC_GENERATIONAL
+    rt->gcNursery.disable();
     rt->gcStoreBuffer.disable();
 #endif
 }
 
 JS_FRIEND_API(bool)
 JS::IsIncrementalBarrierNeeded(JSRuntime *rt)
 {
     return (rt->gcIncrementalState == gc::MARK && !rt->isHeapBusy());
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -87,16 +87,17 @@
 #include "ion/IonFrameIterator.h"
 #endif
 
 #include "jsgcinlines.h"
 #include "jsinterpinlines.h"
 #include "jsobjinlines.h"
 
 #include "gc/FindSCCs-inl.h"
+#include "gc/Nursery-inl.h"
 #include "vm/ScopeObject-inl.h"
 #include "vm/String-inl.h"
 
 #ifdef XP_WIN
 # include "jswin.h"
 #else
 # include <unistd.h>
 #endif
@@ -772,18 +773,17 @@ Chunk::fetchNextFreeArena(JSRuntime *rt)
 }
 
 ArenaHeader *
 Chunk::allocateArena(Zone *zone, AllocKind thingKind)
 {
     JS_ASSERT(hasAvailableArenas());
 
     JSRuntime *rt = zone->rt;
-    JS_ASSERT(rt->gcBytes <= rt->gcMaxBytes);
-    if (rt->gcMaxBytes - rt->gcBytes < ArenaSize)
+    if (!rt->isHeapMinorCollecting() && rt->gcBytes >= rt->gcMaxBytes)
         return NULL;
 
     ArenaHeader *aheader = JS_LIKELY(info.numArenasFreeCommitted > 0)
                            ? fetchNextFreeArena(rt)
                            : fetchNextDecommittedArena();
     aheader->init(zone, thingKind);
     if (JS_UNLIKELY(!hasAvailableArenas()))
         removeFromAvailableList();
@@ -969,16 +969,19 @@ js_InitGC(JSRuntime *rt, uint32_t maxbyt
     rt->gcMaxBytes = maxbytes;
     rt->setGCMaxMallocBytes(maxbytes);
 
 #ifndef JS_MORE_DETERMINISTIC
     rt->gcJitReleaseTime = PRMJ_Now() + JIT_SCRIPT_RELEASE_TYPES_INTERVAL;
 #endif
 
 #ifdef JSGC_GENERATIONAL
+    if (!rt->gcNursery.enable())
+        return false;
+
     if (!rt->gcStoreBuffer.enable())
         return false;
 #endif
 
 #ifdef JS_GC_ZEAL
     if (!InitGCZeal(rt))
         return false;
 #endif
@@ -1197,21 +1200,21 @@ ArenaLists::parallelAllocate(Zone *zone,
      * fails, then we return NULL which will cause the parallel
      * section to abort.
      */
 
     void *t = allocateFromFreeList(thingKind, thingSize);
     if (t)
         return t;
 
-    return allocateFromArena(zone, thingKind);
+    return allocateFromArenaInline(zone, thingKind);
 }
 
 inline void *
-ArenaLists::allocateFromArena(Zone *zone, AllocKind thingKind)
+ArenaLists::allocateFromArenaInline(Zone *zone, AllocKind thingKind)
 {
     /*
      * Parallel JS Note:
      *
      * This function can be called from parallel threads all of which
      * are associated with the same compartment. In that case, each
      * thread will have a distinct ArenaLists.  Therefore, whenever we
      * fall through to PickChunk() we must be sure that we are holding
@@ -1319,16 +1322,22 @@ ArenaLists::allocateFromArena(Zone *zone
     /* See comments before allocateFromNewArena about this assert. */
     JS_ASSERT(!aheader->hasFreeThings());
     uintptr_t arenaAddr = aheader->arenaAddress();
     return freeLists[thingKind].allocateFromNewArena(arenaAddr,
                                                      Arena::firstThingOffset(thingKind),
                                                      Arena::thingSize(thingKind));
 }
 
+void *
+ArenaLists::allocateFromArena(JS::Zone *zone, AllocKind thingKind)
+{
+    return allocateFromArenaInline(zone, thingKind);
+}
+
 void
 ArenaLists::finalizeNow(FreeOp *fop, AllocKind thingKind)
 {
     JS_ASSERT(!IsBackgroundFinalized(thingKind));
     JS_ASSERT(backgroundFinalizeState[thingKind] == BFS_DONE ||
               backgroundFinalizeState[thingKind] == BFS_JUST_FINISHED);
 
     ArenaHeader *arenas = arenaLists[thingKind].head;
@@ -1532,17 +1541,17 @@ ArenaLists::refillFreeList(JSContext *cx
          * allocateFromArena may fail while the background finalization still
          * run. In that case we want to wait for it to finish and restart.
          * However, checking for that is racy as the background finalization
          * could free some things after allocateFromArena decided to fail but
          * at this point it may have already stopped. To avoid this race we
          * always try to allocate twice.
          */
         for (bool secondAttempt = false; ; secondAttempt = true) {
-            void *thing = zone->allocator.arenas.allocateFromArena(zone, thingKind);
+            void *thing = zone->allocator.arenas.allocateFromArenaInline(zone, thingKind);
             if (JS_LIKELY(!!thing))
                 return thing;
             if (secondAttempt)
                 break;
 
             rt->gcHelperThread.waitBackgroundSweepEnd();
         }
 
@@ -4009,28 +4018,28 @@ class AutoGCSession : AutoTraceSession {
 
 /* Start a new heap session. */
 AutoTraceSession::AutoTraceSession(JSRuntime *rt, js::HeapState heapState)
   : runtime(rt),
     prevState(rt->heapState)
 {
     JS_ASSERT(!rt->noGCOrAllocationCheck);
     JS_ASSERT(!rt->isHeapBusy());
-    JS_ASSERT(heapState == Collecting || heapState == Tracing);
+    JS_ASSERT(heapState != Idle);
     rt->heapState = heapState;
 }
 
 AutoTraceSession::~AutoTraceSession()
 {
     JS_ASSERT(runtime->isHeapBusy());
     runtime->heapState = prevState;
 }
 
 AutoGCSession::AutoGCSession(JSRuntime *rt)
-  : AutoTraceSession(rt, Collecting)
+  : AutoTraceSession(rt, MajorCollecting)
 {
     runtime->gcIsNeeded = false;
     runtime->gcInterFrameGC = true;
 
     runtime->gcNumber++;
 }
 
 AutoGCSession::~AutoGCSession()
@@ -4467,16 +4476,39 @@ ShouldCleanUpEverything(JSRuntime *rt, J
     // has changed; debug mode affects the results of bytecode analysis, so
     // we need to clear everything away.
     return !rt->hasContexts() ||
            reason == JS::gcreason::SHUTDOWN_CC ||
            reason == JS::gcreason::DEBUG_MODE_GC ||
            gckind == GC_SHRINK;
 }
 
+#ifdef JSGC_GENERATIONAL
+class AutoDisableStoreBuffer
+{
+    JSRuntime *runtime;
+    bool prior;
+
+  public:
+    AutoDisableStoreBuffer(JSRuntime *rt) : runtime(rt) {
+        prior = rt->gcStoreBuffer.isEnabled();
+        rt->gcStoreBuffer.disable();
+    }
+    ~AutoDisableStoreBuffer() {
+        if (prior)
+            runtime->gcStoreBuffer.enable();
+    }
+};
+#else
+struct AutoDisableStoreBuffer
+{
+    AutoDisableStoreBuffer(JSRuntime *) {}
+};
+#endif
+
 static void
 Collect(JSRuntime *rt, bool incremental, int64_t budget,
         JSGCInvocationKind gckind, JS::gcreason::Reason reason)
 {
     /* GC shouldn't be running in parallel execution mode */
     JS_ASSERT(!InParallelSection());
 
     JS_AbortIfWrongThread(rt);
@@ -4521,16 +4553,24 @@ Collect(JSRuntime *rt, bool incremental,
             if (restartPreVerifier)
                 StartVerifyPreBarriers(runtime);
             if (restartPostVerifier)
                 StartVerifyPostBarriers(runtime);
         }
     } av(rt, isShutdown);
 #endif
 
+    MinorGC(rt, reason);
+
+    /*
+     * Marking can trigger many incidental post barriers, some of them for
+     * objects which are not going to be live after the GC.
+     */
+    AutoDisableStoreBuffer adsb(rt);
+
     RecordNativeStackTopForGC(rt);
 
     int zoneCount = 0;
     int compartmentCount = 0;
     int collectedCount = 0;
     for (ZonesIter zone(rt); !zone.done(); zone.next()) {
         if (rt->gcMode == JSGC_MODE_GLOBAL)
             zone->scheduleGC();
@@ -4647,16 +4687,24 @@ JS::ShrinkGCBuffers(JSRuntime *rt)
 
     if (!rt->useHelperThreads())
         ExpireChunksAndArenas(rt, true);
     else
         rt->gcHelperThread.startBackgroundShrink();
 }
 
 void
+js::MinorGC(JSRuntime *rt, JS::gcreason::Reason reason)
+{
+#ifdef JSGC_GENERATIONAL
+    rt->gcNursery.collect(rt, reason);
+#endif
+}
+
+void
 js::gc::FinishBackgroundFinalize(JSRuntime *rt)
 {
     rt->gcHelperThread.waitBackgroundSweepEnd();
 }
 
 AutoFinishGC::AutoFinishGC(JSRuntime *rt)
 {
     if (JS::IsIncrementalGCInProgress(rt)) {
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -38,26 +38,28 @@ struct JSLinearString;
 namespace js {
 
 class ArgumentsObject;
 class ArrayBufferObject;
 class BaseShape;
 class DebugScopeObject;
 class GCHelperThread;
 class GlobalObject;
+class Nursery;
 class PropertyName;
 class ScopeObject;
 class Shape;
 class UnownedBaseShape;
 struct SliceBudget;
 
 enum HeapState {
-    Idle,       // doing nothing with the GC heap
-    Tracing,    // tracing the GC heap without collecting, e.g. IterateCompartments()
-    Collecting  // doing a GC of the heap
+    Idle,             // doing nothing with the GC heap
+    Tracing,          // tracing the GC heap without collecting, e.g. IterateCompartments()
+    MajorCollecting,  // doing a GC of the major heap
+    MinorCollecting   // doing a GC of the minor heap (nursery)
 };
 
 namespace ion {
     class IonCode;
 }
 
 namespace gc {
 
@@ -236,18 +238,18 @@ struct ArenaList {
     void clear() {
         head = NULL;
         cursor = &head;
     }
 
     void insert(ArenaHeader *arena);
 };
 
-struct ArenaLists {
-
+struct ArenaLists
+{
   private:
     /*
      * For each arena kind its free list is represented as the first span with
      * free things. Initially all the spans are initialized as empty. After we
      * find a new arena with available things we move its first free span into
      * the list and set the arena as fully allocated. way we do not need to
      * update the arena header after the initial allocation. When starting the
      * GC we only move the head of the of the list of spans back to the arena
@@ -492,17 +494,20 @@ struct ArenaLists {
      */
     void *parallelAllocate(JS::Zone *zone, AllocKind thingKind, size_t thingSize);
 
   private:
     inline void finalizeNow(FreeOp *fop, AllocKind thingKind);
     inline void queueForForegroundSweep(FreeOp *fop, AllocKind thingKind);
     inline void queueForBackgroundSweep(FreeOp *fop, AllocKind thingKind);
 
-    inline void *allocateFromArena(JS::Zone *zone, AllocKind thingKind);
+    void *allocateFromArena(JS::Zone *zone, AllocKind thingKind);
+    inline void *allocateFromArenaInline(JS::Zone *zone, AllocKind thingKind);
+
+    friend class js::Nursery;
 };
 
 /*
  * Initial allocation size for data structures holding chunks is set to hold
  * chunks with total capacity of 16MB to avoid buffer resizes during browser
  * startup.
  */
 const size_t INITIAL_CHUNK_CAPACITY = 16 * 1024 * 1024 / ChunkSize;
@@ -597,16 +602,19 @@ extern void
 GCFinalSlice(JSRuntime *rt, JSGCInvocationKind gckind, JS::gcreason::Reason reason);
 
 extern void
 GCDebugSlice(JSRuntime *rt, bool limit, int64_t objCount);
 
 extern void
 PrepareForDebugGC(JSRuntime *rt);
 
+extern void
+MinorGC(JSRuntime *rt, JS::gcreason::Reason reason);
+
 #ifdef JS_GC_ZEAL
 extern void
 SetGCZeal(JSRuntime *rt, uint8_t zeal, uint32_t frequency);
 #endif
 
 /* Functions for managing cross compartment gray pointers. */
 
 extern void
--- a/js/src/jsgcinlines.h
+++ b/js/src/jsgcinlines.h
@@ -185,16 +185,17 @@ ShouldNurseryAllocate(const NurseryType 
 inline bool
 IsInsideNursery(JSRuntime *rt, const void *thing)
 {
 #ifdef JSGC_GENERATIONAL
 #if JS_GC_ZEAL
     if (rt->gcVerifyPostData)
         return rt->gcVerifierNursery.isInside(thing);
 #endif
+    return rt->gcNursery.isInside(thing);
 #endif
     return false;
 }
 
 inline JSGCTraceKind
 GetGCThingTraceKind(const void *thing)
 {
     JS_ASSERT(thing);
@@ -458,23 +459,51 @@ class GCZoneGroupIter {
     }
 
     operator JS::Zone *() const { return get(); }
     JS::Zone *operator->() const { return get(); }
 };
 
 typedef CompartmentsIterT<GCZoneGroupIter> GCCompartmentGroupIter;
 
+#ifdef JSGC_GENERATIONAL
+/*
+ * Attempt to allocate a new GC thing out of the nursery. If there is not enough
+ * room in the nursery or there is an OOM, this method will return NULL.
+ */
+template <typename T, AllowGC allowGC>
+inline T *
+TryNewNurseryGCThing(JSContext *cx, size_t thingSize)
+{
+    JS_ASSERT(!IsAtomsCompartment(cx->compartment));
+    JSRuntime *rt = cx->runtime;
+    Nursery &nursery = rt->gcNursery;
+    T *t = static_cast<T *>(nursery.allocate(thingSize));
+    if (t)
+        return t;
+    if (allowGC && !rt->mainThread.suppressGC) {
+        MinorGC(rt, JS::gcreason::OUT_OF_NURSERY);
+
+        /* Exceeding gcMaxBytes while tenuring can disable the Nursery. */
+        if (nursery.isEnabled()) {
+            t = static_cast<T *>(nursery.allocate(thingSize));
+            JS_ASSERT(t);
+            return t;
+        }
+    }
+    return NULL;
+}
+#endif /* JSGC_GENERATIONAL */
+
 /*
  * Allocates a new GC thing. After a successful allocation the caller must
  * fully initialize the thing before calling any function that can potentially
  * trigger GC. This will ensure that GC tracing never sees junk values stored
  * in the partially initialized thing.
  */
-
 template <typename T, AllowGC allowGC>
 inline T *
 NewGCThing(JSContext *cx, AllocKind kind, size_t thingSize, InitialHeap heap)
 {
     JS_ASSERT(thingSize == js::gc::Arena::thingSize(kind));
     JS_ASSERT_IF(cx->compartment == cx->runtime->atomsCompartment,
                  kind == FINALIZE_STRING ||
                  kind == FINALIZE_SHORT_STRING ||
@@ -488,16 +517,24 @@ NewGCThing(JSContext *cx, AllocKind kind
 #ifdef JS_GC_ZEAL
     if (cx->runtime->needZealousGC() && allowGC)
         js::gc::RunDebugGC(cx);
 #endif
 
     if (allowGC)
         MaybeCheckStackRoots(cx);
 
+#if defined(JSGC_GENERATIONAL)
+    if (ShouldNurseryAllocate(cx->runtime->gcNursery, kind, heap)) {
+        T *t = TryNewNurseryGCThing<T, allowGC>(cx, thingSize);
+        if (t)
+            return t;
+    }
+#endif
+
     JS::Zone *zone = cx->zone();
     T *t = static_cast<T *>(zone->allocator.arenas.allocateFromFreeList(kind, thingSize));
     if (!t)
         t = static_cast<T *>(js::gc::ArenaLists::refillFreeList<allowGC>(cx, kind));
 
     JS_ASSERT_IF(t && zone->wasGCStarted() && (zone->isGCMarking() || zone->isGCSweeping()),
                  t->arenaHeader()->allocatedDuringIncremental);
 
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -2360,16 +2360,38 @@ JSObject::setSlotSpan(JSContext *cx, Han
 
     if (!JSObject::updateSlotsForSpan(cx, obj, oldSpan, span))
         return false;
 
     obj->lastProperty()->base()->setSlotSpan(span);
     return true;
 }
 
+static HeapSlot *
+AllocateSlots(JSContext *cx, JSObject *obj, uint32_t nslots)
+{
+#ifdef JSGC_GENERATIONAL
+    return cx->runtime->gcNursery.allocateSlots(cx, obj, nslots);
+#else
+    return cx->pod_malloc<HeapSlot>(nslots);
+#endif
+}
+
+static HeapSlot *
+ReallocateSlots(JSContext *cx, JSObject *obj, HeapSlot *oldSlots,
+                uint32_t oldCount, uint32_t newCount)
+{
+#ifdef JSGC_GENERATIONAL
+    return cx->runtime->gcNursery.reallocateSlots(cx, obj, oldSlots, oldCount, newCount);
+#else
+    return (HeapSlot *)cx->realloc_(oldSlots, oldCount * sizeof(HeapSlot),
+                                    newCount * sizeof(HeapSlot));
+#endif
+}
+
 /* static */ bool
 JSObject::growSlots(JSContext *cx, HandleObject obj, uint32_t oldCount, uint32_t newCount)
 {
     JS_ASSERT(newCount > oldCount);
     JS_ASSERT(newCount >= SLOT_CAPACITY_MIN);
 
     /*
      * Slot capacities are determined by the span of allocated objects. Due to
@@ -2398,63 +2420,71 @@ JSObject::growSlots(JSContext *cx, Handl
 
             typeObj->newScript->allocKind = kind;
             typeObj->newScript->shape = reshapedObj->lastProperty();
             typeObj->markStateChange(cx);
         }
     }
 
     if (!oldCount) {
-        obj->slots = cx->pod_malloc<HeapSlot>(newCount);
+        obj->slots = AllocateSlots(cx, obj, newCount);
         if (!obj->slots)
             return false;
         Debug_SetSlotRangeToCrashOnTouch(obj->slots, newCount);
         return true;
     }
 
-    HeapSlot *newslots = (HeapSlot*) cx->realloc_(obj->slots, oldCount * sizeof(HeapSlot),
-                                                  newCount * sizeof(HeapSlot));
+    HeapSlot *newslots = ReallocateSlots(cx, obj, obj->slots, oldCount, newCount);
     if (!newslots)
         return false;  /* Leave slots at its old size. */
 
     bool changed = obj->slots != newslots;
     obj->slots = newslots;
 
     Debug_SetSlotRangeToCrashOnTouch(obj->slots + oldCount, newCount - oldCount);
 
     /* Changes in the slots of global objects can trigger recompilation. */
     if (changed && obj->isGlobal())
         types::MarkObjectStateChange(cx, obj);
 
     return true;
 }
 
+static void
+FreeSlots(JSContext *cx, HeapSlot *slots)
+{
+#ifdef JSGC_GENERATIONAL
+    if (!cx->runtime->gcNursery.isInside(slots))
+#endif
+        js_free(slots);
+}
+
 /* static */ void
 JSObject::shrinkSlots(JSContext *cx, HandleObject obj, uint32_t oldCount, uint32_t newCount)
 {
     JS_ASSERT(newCount < oldCount);
 
     /*
      * Refuse to shrink slots for call objects. This only happens in a very
      * obscure situation (deleting names introduced by a direct 'eval') and
      * allowing the slots pointer to change may require updating pointers in
      * the function's active args/vars information.
      */
     if (obj->isCall())
         return;
 
     if (newCount == 0) {
-        js_free(obj->slots);
+        FreeSlots(cx, obj->slots);
         obj->slots = NULL;
         return;
     }
 
     JS_ASSERT(newCount >= SLOT_CAPACITY_MIN);
 
-    HeapSlot *newslots = (HeapSlot *) cx->realloc_(obj->slots, newCount * sizeof(HeapSlot));
+    HeapSlot *newslots = ReallocateSlots(cx, obj, obj->slots, oldCount, newCount);
     if (!newslots)
         return;  /* Leave slots at its old size. */
 
     bool changed = obj->slots != newslots;
     obj->slots = newslots;
 
     /* Watch for changes in global object slots, as for growSlots. */
     if (changed && obj->isGlobal())
@@ -2647,37 +2677,53 @@ JSObject::maybeDensifySparseElements(JSC
      * to grow the object.
      */
     if (!obj->clearFlag(cx, BaseShape::INDEXED))
         return ED_FAILED;
 
     return ED_OK;
 }
 
-bool
-JSObject::growElements(JSContext *cx, unsigned newcap)
+ObjectElements *
+AllocateElements(JSObject::MaybeContext maybecx, JSObject *obj, uint32_t nelems)
 {
-    if (!growElements(&cx->zone()->allocator, newcap)) {
-        JS_ReportOutOfMemory(cx);
-        return false;
+    if (JSContext *cx = maybecx.context) {
+#ifdef JSGC_GENERATIONAL
+        return cx->runtime->gcNursery.allocateElements(cx, obj, nelems);
+#else
+        return static_cast<js::ObjectElements *>(cx->malloc_(nelems * sizeof(HeapValue)));
+#endif
     }
 
-    return true;
+    Allocator *alloc = maybecx.allocator;
+    return static_cast<js::ObjectElements *>(alloc->malloc_(nelems * sizeof(HeapValue)));
+}
+
+ObjectElements *
+ReallocateElements(JSObject::MaybeContext maybecx, JSObject *obj, ObjectElements *oldHeader,
+                   uint32_t oldCount, uint32_t newCount)
+{
+    if (JSContext *cx = maybecx.context) {
+#ifdef JSGC_GENERATIONAL
+        return cx->runtime->gcNursery.reallocateElements(cx, obj, oldHeader, oldCount, newCount);
+#else
+        return static_cast<js::ObjectElements *>(cx->realloc_(oldHeader,
+                                                              oldCount * sizeof(HeapValue),
+                                                              newCount * sizeof(HeapSlot)));
+#endif
+    }
+
+    Allocator *alloc = maybecx.allocator;
+    return static_cast<js::ObjectElements *>(alloc->realloc_(oldHeader, oldCount * sizeof(HeapSlot),
+                                                             newCount * sizeof(HeapSlot)));
 }
 
 bool
-JSObject::growElements(js::Allocator *alloc, unsigned newcap)
+JSObject::growElements(MaybeContext cx, unsigned newcap)
 {
-    /*
-     * This version of |growElements()|, which takes a
-     * |js::Allocator*| as opposed to a |JSContext*|, is intended to
-     * run either during sequential or parallel execution.  As per
-     * convention, since it does not take a JSContext*, it does not
-     * report an error on out of memory but simply returns false.
-     */
     JS_ASSERT(isExtensible());
 
     /*
      * When an object with CAPACITY_DOUBLING_MAX or fewer elements needs to
      * grow, double its capacity, to add N elements in amortized O(N) time.
      *
      * Above this limit, grow by 12.5% each time. Speed is still amortized
      * O(N), with a higher constant factor, and we waste less space.
@@ -2699,30 +2745,28 @@ JSObject::growElements(js::Allocator *al
         actualCapacity = SLOT_CAPACITY_MIN;
 
     /* Don't let nelements get close to wrapping around uint32_t. */
     if (actualCapacity >= NELEMENTS_LIMIT || actualCapacity < oldcap || actualCapacity < newcap) {
         return false;
     }
 
     uint32_t initlen = getDenseInitializedLength();
+    uint32_t oldAllocated = oldcap + ObjectElements::VALUES_PER_HEADER;
     uint32_t newAllocated = actualCapacity + ObjectElements::VALUES_PER_HEADER;
 
     ObjectElements *newheader;
     if (hasDynamicElements()) {
-        uint32_t oldAllocated = oldcap + ObjectElements::VALUES_PER_HEADER;
-        newheader = (ObjectElements *)
-            alloc->realloc_(getElementsHeader(), oldAllocated * sizeof(Value),
-                            newAllocated * sizeof(Value));
+        newheader = ReallocateElements(cx, this, getElementsHeader(), oldAllocated, newAllocated);
         if (!newheader)
-            return false;  /* Leave elements as its old size. */
+            return false; /* Leave elements as its old size. */
     } else {
-        newheader = (ObjectElements *) alloc->malloc_(newAllocated * sizeof(Value));
+        newheader = AllocateElements(cx, this, newAllocated);
         if (!newheader)
-            return false;  /* Ditto. */
+            return false; /* Leave elements as its old size. */
         js_memcpy(newheader, getElementsHeader(),
                   (ObjectElements::VALUES_PER_HEADER + initlen) * sizeof(Value));
     }
 
     newheader->capacity = actualCapacity;
     elements = newheader->elements();
 
     Debug_SetSlotRangeToCrashOnTouch(elements + initlen, actualCapacity - initlen);
@@ -2737,20 +2781,21 @@ JSObject::shrinkElements(JSContext *cx, 
     JS_ASSERT(newcap <= oldcap);
 
     /* Don't shrink elements below the minimum capacity. */
     if (oldcap <= SLOT_CAPACITY_MIN || !hasDynamicElements())
         return;
 
     newcap = Max(newcap, SLOT_CAPACITY_MIN);
 
+    uint32_t oldAllocated = oldcap + ObjectElements::VALUES_PER_HEADER;
     uint32_t newAllocated = newcap + ObjectElements::VALUES_PER_HEADER;
 
-    ObjectElements *newheader = (ObjectElements *)
-        cx->realloc_(getElementsHeader(), newAllocated * sizeof(Value));
+    ObjectElements *newheader = ReallocateElements(cx, this, getElementsHeader(),
+                                                   oldAllocated, newAllocated);
     if (!newheader)
         return;  /* Leave elements at its old size. */
 
     newheader->capacity = newcap;
     elements = newheader->elements();
 }
 
 static JSObject *
--- a/js/src/jsobj.h
+++ b/js/src/jsobj.h
@@ -34,16 +34,17 @@ struct ObjectsExtraSizes;
 
 namespace js {
 
 class AutoPropDescArrayRooter;
 class BaseProxyHandler;
 class CallObject;
 struct GCMarker;
 struct NativeIterator;
+class Nursery;
 ForwardDeclare(Shape);
 struct StackShape;
 
 namespace mjit { class Compiler; }
 
 inline JSObject *
 CastAsObject(PropertyOp op)
 {
@@ -276,16 +277,17 @@ class WithObject;
  * GC size-class.
  */
 class JSObject : public js::ObjectImpl
 {
   private:
     friend class js::Shape;
     friend struct js::GCMarker;
     friend class  js::NewObjectCache;
+    friend class js::Nursery;
 
     /* Make the type object to use for LAZY_TYPE objects. */
     static js::types::TypeObject *makeLazyType(JSContext *cx, js::HandleObject obj);
 
   public:
     /*
      * Update the last property, keeping the number of allocated slots in sync
      * with the object's new slot span.
@@ -556,19 +558,26 @@ class JSObject : public js::ObjectImpl
         return isSealedOrFrozen(cx, obj, SEAL, resultp);
     }
     static inline bool isFrozen(JSContext *cx, js::HandleObject obj, bool *resultp) {
         return isSealedOrFrozen(cx, obj, FREEZE, resultp);
     }
 
     /* Accessors for elements. */
 
+    struct MaybeContext {
+        js::Allocator *allocator;
+        JSContext *context;
+
+        MaybeContext(JSContext *cx) : allocator(NULL), context(cx) {}
+        MaybeContext(js::Allocator *alloc) : allocator(alloc), context(NULL) {}
+    };
+
     inline bool ensureElements(JSContext *cx, unsigned cap);
-    bool growElements(JSContext *cx, unsigned cap);
-    bool growElements(js::Allocator *alloc, unsigned cap);
+    bool growElements(MaybeContext cx, unsigned newcap);
     void shrinkElements(JSContext *cx, unsigned cap);
     inline void setDynamicElements(js::ObjectElements *header);
 
     inline uint32_t getDenseCapacity();
     inline void setDenseInitializedLength(uint32_t length);
     inline void ensureDenseInitializedLength(JSContext *cx, unsigned index, unsigned extra);
     inline void setDenseElement(unsigned idx, const js::Value &val);
     inline void initDenseElement(unsigned idx, const js::Value &val);
@@ -596,18 +605,19 @@ class JSObject : public js::ObjectImpl
      * failure to grow the array, ED_SPARSE when the object is too sparse to
      * grow (this includes the case of index + extra overflow). In the last
      * two cases the object is kept intact.
      */
     enum EnsureDenseResult { ED_OK, ED_FAILED, ED_SPARSE };
     inline EnsureDenseResult ensureDenseElements(JSContext *cx, unsigned index, unsigned extra);
     inline EnsureDenseResult parExtendDenseElements(js::Allocator *alloc, js::Value *v,
                                                     uint32_t extra);
-    template<typename CONTEXT>
-    inline EnsureDenseResult extendDenseElements(CONTEXT *cx, unsigned requiredCapacity, unsigned extra);
+    template<typename MallocProviderType>
+    inline EnsureDenseResult extendDenseElements(MallocProviderType *cx,
+                                                 unsigned requiredCapacity, unsigned extra);
 
     /* Convert a single dense element to a sparse property. */
     static bool sparsifyDenseElement(JSContext *cx, js::HandleObject obj, unsigned index);
 
     /* Convert all dense elements to sparse properties. */
     static bool sparsifyDenseElements(JSContext *cx, js::HandleObject obj);
 
     /* Small objects are dense, no matter what. */
--- a/js/src/jsobjinlines.h
+++ b/js/src/jsobjinlines.h
@@ -589,19 +589,20 @@ JSObject::ensureDenseInitializedLength(J
         for (js::HeapSlot *sp = elements + initlen;
              sp != elements + (index + extra);
              sp++, offset++)
             sp->init(rt, this, js::HeapSlot::Element, offset, js::MagicValue(JS_ELEMENTS_HOLE));
         initlen = index + extra;
     }
 }
 
-template<typename CONTEXT>
+template<typename MallocProviderType>
 JSObject::EnsureDenseResult
-JSObject::extendDenseElements(CONTEXT *cx, unsigned requiredCapacity, unsigned extra)
+JSObject::extendDenseElements(MallocProviderType *cx,
+                              unsigned requiredCapacity, unsigned extra)
 {
     /*
      * Don't grow elements for non-extensible objects or watched objects. Dense
      * elements can be added/written with no extensible or watchpoint checks as
      * long as there is capacity for them.
      */
     if (!isExtensible() || watched()) {
         JS_ASSERT(getDenseCapacity() == 0);
@@ -944,16 +945,20 @@ JSObject::create(JSContext *cx, js::gc::
     }
 
     JSObject *obj = js_NewGCObject<js::CanGC>(cx, kind, heap);
     if (!obj) {
         js_free(slots);
         return NULL;
     }
 
+#ifdef JSGC_GENERATIONAL
+    cx->runtime->gcNursery.notifyInitialSlots(obj, slots);
+#endif
+
     obj->shape_.init(shape);
     obj->type_.init(type);
     obj->slots = slots;
     obj->elements = js::emptyObjectElements;
 
     const js::Class *clasp = type->clasp;
     if (clasp->hasPrivate())
         obj->privateRef(shape->numFixedSlots()) = NULL;
--- a/js/src/jstypedarray.cpp
+++ b/js/src/jstypedarray.cpp
@@ -254,33 +254,33 @@ ArrayBufferObject::allocateSlots(JSConte
 }
 
 static JSObject *
 NextView(JSObject *obj)
 {
     return static_cast<JSObject*>(obj->getFixedSlot(BufferView::NEXT_VIEW_SLOT).toPrivate());
 }
 
-static JSObject **
+static HeapPtrObject *
 GetViewList(ArrayBufferObject *obj)
 {
 #if USE_NEW_OBJECT_REPRESENTATION
     // untested
     return obj->getElementsHeader()->asArrayBufferElements().viewList();
 #else
     // The list of views must be stored somewhere in the ArrayBufferObject, but
     // the slots are already being used for the element storage and the private
     // field is used for a delegate object. The ObjectElements header has space
     // for it, but I don't want to mess around with adding unions to it with
     // USE_NEW_OBJECT_REPRESENTATION pending, since it will solve this much
     // more cleanly.
     struct OldObjectRepresentationHack {
             uint32_t capacity;
             uint32_t initializedLength;
-            JSObject *views;
+            HeapPtrObject views;
     };
     return &reinterpret_cast<OldObjectRepresentationHack*>(obj->getElementsHeader())->views;
 #endif
 }
 
 void
 ArrayBufferObject::changeContents(JSContext *maybecx, ObjectElements *newHeader)
 {
@@ -463,18 +463,20 @@ class WeakObjectSlotRef : public js::gc:
     {
     }
 
     virtual bool match(void *location) {
         return location == owner->getFixedSlot(slot).toPrivate();
     }
 
     virtual void mark(JSTracer *trc) {
+        MarkObjectUnbarriered(trc, &owner, "weak TypeArrayView ref");
         JSObject *obj = static_cast<JSObject*>(owner->getFixedSlot(slot).toPrivate());
-        MarkObjectUnbarriered(trc, &obj, desc);
+        if (obj && obj != UNSET_BUFFER_LINK)
+            MarkObjectUnbarriered(trc, &obj, desc);
         owner->setFixedSlot(slot, PrivateValue(obj));
     }
 };
 #endif
 
 // Custom barrier is necessary for PrivateValues because they are not traced by
 // default.
 static void
@@ -503,17 +505,17 @@ ArrayBufferObject::addView(RawObject vie
     // This view should never have been associated with a buffer before
     JS_ASSERT(BufferLink(view) == UNSET_BUFFER_LINK);
 
     // Note that pre-barriers are not needed here because either the list was
     // previously empty, in which case no pointer is being overwritten, or the
     // list was nonempty and will be made weak during this call (and weak
     // pointers cannot violate the snapshot-at-the-beginning invariant.)
 
-    JSObject **views = GetViewList(this);
+    HeapPtrObject *views = GetViewList(this);
     if (*views == NULL) {
         // This ArrayBuffer will have a single view at this point, so it is a
         // strong pointer (it will be marked during tracing.)
         JS_ASSERT(NextView(view) == NULL);
     } else {
         view->setFixedSlot(BufferView::NEXT_VIEW_SLOT, PrivateValue(*views));
         WeakObjectSlotBarrierPost(view, BufferView::NEXT_VIEW_SLOT, "arraybuffer.nextview");
 
@@ -633,17 +635,17 @@ ArrayBufferObject::stealContents(JSConte
         *data = reinterpret_cast<uint8_t *>(newheader + 1);
 
         if (buffer.isAsmJSArrayBuffer())
             ArrayBufferObject::neuterAsmJSArrayBuffer(buffer);
     }
 
     // Neuter the donor ArrayBuffer and all views of it
     ArrayBufferObject::setElementsHeader(header, 0);
-    *GetViewList(&buffer) = views;
+    GetViewList(&buffer)->init(views);
     for (JSObject *view = views; view; view = NextView(view))
         TypedArray::neuter(view);
 
     return true;
 }
 
 void
 ArrayBufferObject::obj_trace(JSTracer *trc, RawObject obj)
@@ -666,30 +668,42 @@ ArrayBufferObject::obj_trace(JSTracer *t
     // Instead, ArrayBuffers with a single view hold a strong pointer to the
     // view. This can entrain garbage when the single view becomes otherwise
     // unreachable while the buffer is still live, but this is expected to be
     // rare. ArrayBuffers with 0-1 views are expected to be by far the most
     // common cases. ArrayBuffers with multiple views are collected into a
     // linked list during collection, and then swept to prune out their dead
     // views.
 
-    JSObject **views = GetViewList(&obj->asArrayBuffer());
+    HeapPtrObject *views = GetViewList(&obj->asArrayBuffer());
     if (!*views)
         return;
 
+    // During minor collections, these edges are normally kept alive by the
+    // store buffer. If the store buffer overflows, fallback marking should
+    // just treat these as strong references for simplicity.
+    if (trc->runtime->isHeapMinorCollecting()) {
+        MarkObject(trc, views, "arraybuffer.viewlist");
+        JSObject *prior = views->get();
+        for (JSObject *view = NextView(prior); view; prior = view, view = NextView(view)) {
+            MarkObjectUnbarriered(trc, &view, "arraybuffer.views");
+            prior->setFixedSlot(BufferView::NEXT_VIEW_SLOT, PrivateValue(view));
+        }
+        return;
+    }
+
     JSObject *firstView = *views;
     if (NextView(firstView) == NULL) {
         // Single view: mark it, but only if we're actually doing a GC pass
         // right now. Otherwise, the tracing pass for barrier verification will
         // fail if we add another view and the pointer becomes weak.
         if (IS_GC_MARKING_TRACER(trc))
-            MarkObjectUnbarriered(trc, views, "arraybuffer.singleview");
+            MarkObject(trc, views, "arraybuffer.singleview");
     } else {
         // Multiple views: do not mark, but append buffer to list.
-
         if (IS_GC_MARKING_TRACER(trc)) {
             // obj_trace may be called multiple times before sweep(), so avoid
             // adding this buffer to the list multiple times.
             if (BufferLink(firstView) == UNSET_BUFFER_LINK) {
                 JS_ASSERT(obj->compartment() == firstView->compartment());
                 JSObject **bufList = &obj->compartment()->gcLiveArrayBuffers;
                 SetBufferLink(firstView, *bufList);
                 *bufList = obj;
@@ -710,17 +724,17 @@ ArrayBufferObject::obj_trace(JSTracer *t
 void
 ArrayBufferObject::sweep(JSCompartment *compartment)
 {
     JSObject *buffer = compartment->gcLiveArrayBuffers;
     JS_ASSERT(buffer != UNSET_BUFFER_LINK);
     compartment->gcLiveArrayBuffers = NULL;
 
     while (buffer) {
-        JSObject **views = GetViewList(&buffer->asArrayBuffer());
+        HeapPtrObject *views = GetViewList(&buffer->asArrayBuffer());
         JS_ASSERT(*views);
 
         JSObject *nextBuffer = BufferLink(*views);
         JS_ASSERT(nextBuffer != UNSET_BUFFER_LINK);
         SetBufferLink(*views, UNSET_BUFFER_LINK);
 
         // Rebuild the list of views of the ArrayBuffer, discarding dead views.
         // If there is only one view, it will have already been marked.
@@ -730,17 +744,17 @@ ArrayBufferObject::sweep(JSCompartment *
             JS_ASSERT(buffer->compartment() == view->compartment());
             JSObject *nextView = NextView(view);
             if (!IsObjectAboutToBeFinalized(&view)) {
                 view->setFixedSlot(BufferView::NEXT_VIEW_SLOT, PrivateValue(prevLiveView));
                 prevLiveView = view;
             }
             view = nextView;
         }
-        *views = prevLiveView;
+        *(views->unsafeGet()) = prevLiveView;
 
         buffer = nextBuffer;
     }
 }
 
 void
 ArrayBufferObject::resetArrayBufferList(JSCompartment *comp)
 {
@@ -1736,17 +1750,17 @@ class TypedArrayTemplate
                      obj->tenuredGetAllocKind() == gc::FINALIZE_OBJECT8_BACKGROUND);
 
         obj->setSlot(TYPE_SLOT, Int32Value(ArrayTypeID()));
         obj->setSlot(BUFFER_SLOT, ObjectValue(*bufobj));
 
         JS_ASSERT(bufobj->isArrayBuffer());
         Rooted<ArrayBufferObject *> buffer(cx, &bufobj->asArrayBuffer());
 
-        InitTypedArrayDataPointer(obj, buffer, byteOffset);
+        InitArrayBufferViewDataPointer(obj, buffer, byteOffset);
         obj->setSlot(LENGTH_SLOT, Int32Value(len));
         obj->setSlot(BYTEOFFSET_SLOT, Int32Value(byteOffset));
         obj->setSlot(BYTELENGTH_SLOT, Int32Value(len * sizeof(NativeType)));
         obj->setSlot(NEXT_VIEW_SLOT, PrivateValue(NULL));
         obj->setSlot(NEXT_BUFFER_SLOT, PrivateValue(UNSET_BUFFER_LINK));
 
         // Mark the object as non-extensible. We cannot simply call
         // obj->preventExtensions() because that has to iterate through all
--- a/js/src/jstypedarrayinlines.h
+++ b/js/src/jstypedarrayinlines.h
@@ -175,47 +175,52 @@ TypedArray::slotWidth(JSObject *obj) {
 
 bool
 DataViewObject::is(const Value &v)
 {
     return v.isObject() && v.toObject().hasClass(&DataViewClass);
 }
 
 #ifdef JSGC_GENERATIONAL
-class TypedArrayPrivateRef : public gc::BufferableRef
+class ArrayBufferViewByteOffsetRef : public gc::BufferableRef
 {
     JSObject *obj;
-    ArrayBufferObject *buffer;
-    size_t byteOffset;
 
   public:
-    TypedArrayPrivateRef(JSObject *obj, ArrayBufferObject *buffer, size_t byteOffset)
-      : obj(obj), buffer(buffer), byteOffset(byteOffset) {}
+    explicit ArrayBufferViewByteOffsetRef(JSObject *obj) : obj(obj) {}
 
     bool match(void *location) {
-        // The private field  of obj is not traced, but needs to be updated by mark.
+        /* The private field  of obj is not traced, but needs to be updated by mark. */
         return false;
     }
 
-    void mark(JSTracer *trc) {}
+    void mark(JSTracer *trc) {
+        /* Update obj's private to point to the moved buffer's array data. */
+        MarkObjectUnbarriered(trc, &obj, "TypedArray");
+        HeapSlot &bufSlot = obj->getReservedSlotRef(BufferView::BUFFER_SLOT);
+        gc::MarkSlot(trc, &bufSlot, "TypedArray::BUFFER_SLOT");
+        ArrayBufferObject &buf = bufSlot.toObject().asArrayBuffer();
+        int32_t offset = obj->getReservedSlot(BufferView::BYTEOFFSET_SLOT).toInt32();
+        obj->initPrivate(buf.dataPointer() + offset);
+    }
 };
 #endif
 
 static inline void
-InitTypedArrayDataPointer(JSObject *obj, ArrayBufferObject *buffer, size_t byteOffset)
+InitArrayBufferViewDataPointer(JSObject *obj, ArrayBufferObject *buffer, size_t byteOffset)
 {
     /*
      * N.B. The base of the array's data is stored in the object's
      * private data rather than a slot to avoid alignment restrictions
      * on private Values.
      */
     obj->initPrivate(buffer->dataPointer() + byteOffset);
 #ifdef JSGC_GENERATIONAL
-    if (IsInsideNursery(obj->runtime(), buffer))
-        obj->runtime()->gcStoreBuffer.putGeneric(TypedArrayPrivateRef(obj, buffer, byteOffset));
+    if (IsInsideNursery(obj->runtime(), buffer) && buffer->hasFixedElements())
+        obj->runtime()->gcStoreBuffer.putGeneric(ArrayBufferViewByteOffsetRef(obj));
 #endif
 }
 
 static NewObjectKind
 DataViewNewObjectKind(JSContext *cx, uint32_t byteLength, JSObject *proto)
 {
     if (!proto && byteLength >= TypedArray::SINGLETON_TYPE_BYTE_LENGTH)
         return SingletonObject;
@@ -262,17 +267,17 @@ DataViewObject::create(JSContext *cx, ui
     JS_ASSERT(arrayBuffer->isArrayBuffer());
 
     DataViewObject &dvobj = obj->asDataView();
     dvobj.setFixedSlot(BYTEOFFSET_SLOT, Int32Value(byteOffset));
     dvobj.setFixedSlot(BYTELENGTH_SLOT, Int32Value(byteLength));
     dvobj.setFixedSlot(BUFFER_SLOT, ObjectValue(*arrayBuffer));
     dvobj.setFixedSlot(NEXT_VIEW_SLOT, PrivateValue(NULL));
     dvobj.setFixedSlot(NEXT_BUFFER_SLOT, PrivateValue(UNSET_BUFFER_LINK));
-    InitTypedArrayDataPointer(obj, arrayBuffer, byteOffset);
+    InitArrayBufferViewDataPointer(obj, arrayBuffer, byteOffset);
     JS_ASSERT(byteOffset + byteLength <= arrayBuffer->byteLength());
 
     // Verify that the private slot is at the expected place
     JS_ASSERT(dvobj.numFixedSlots() == DATA_SLOT);
 
     arrayBuffer->asArrayBuffer().addView(&dvobj);
 
     return &dvobj;
--- a/js/src/shell/js.cpp
+++ b/js/src/shell/js.cpp
@@ -5300,17 +5300,17 @@ main(int argc, char **argv, char **envp)
         || !op.addBoolOption('\0', "no-baseline", "Disable baseline compiler")
         || !op.addBoolOption('\0', "baseline-eager", "Always baseline-compile methods")
         || !op.addIntOption('\0', "baseline-uses-before-compile", "COUNT",
                             "Wait for COUNT calls or iterations before baseline-compiling "
                             "(default: 10)", -1)
         || !op.addBoolOption('\0', "no-fpu", "Pretend CPU does not support floating-point operations "
                              "to test JIT codegen (no-op on platforms other than x86).")
 #ifdef JSGC_GENERATIONAL
-        || !op.addBoolOption('\0', "ggc", "Enable Generational GC")
+        || !op.addBoolOption('\0', "no-ggc", "Disable Generational GC")
 #endif
     )
     {
         return EXIT_FAILURE;
     }
 
     op.setArgTerminatesOptions("script", true);
 
@@ -5350,17 +5350,17 @@ main(int argc, char **argv, char **envp)
     if (!rt)
         return 1;
     gTimeoutFunc = NullValue();
     if (!JS_AddNamedValueRootRT(rt, &gTimeoutFunc, "gTimeoutFunc"))
         return 1;
 
     JS_SetGCParameter(rt, JSGC_MAX_BYTES, 0xffffffff);
 #ifdef JSGC_GENERATIONAL
-    if (!op.getBoolOption("ggc"))
+    if (op.getBoolOption("no-ggc"))
         JS::DisableGenerationalGC(rt);
 #endif
 
     JS_SetTrustedPrincipals(rt, &shellTrustedPrincipals);
     JS_SetSecurityCallbacks(rt, &securityCallbacks);
 
     JS_SetNativeStackQuota(rt, gMaxStackSize);
 
--- a/js/src/vm/ObjectImpl.h
+++ b/js/src/vm/ObjectImpl.h
@@ -19,16 +19,17 @@
 #include "js/Value.h"
 #include "vm/NumericConversions.h"
 #include "vm/String.h"
 
 namespace js {
 
 class Debugger;
 class ObjectImpl;
+class Nursery;
 ForwardDeclare(Shape);
 
 class AutoPropDescArrayRooter;
 
 static inline PropertyOp
 CastAsPropertyOp(JSObject *object)
 {
     return JS_DATA_TO_FUNC_PTR(PropertyOp, object);
@@ -925,16 +926,17 @@ class ObjectElements
         CONVERT_DOUBLE_ELEMENTS = 0x1,
         ASMJS_ARRAY_BUFFER = 0x2
     };
 
   private:
     friend class ::JSObject;
     friend class ObjectImpl;
     friend class ArrayBufferObject;
+    friend class Nursery;
 
     /* See Flags enum above. */
     uint32_t flags;
 
     /*
      * Number of initialized elements. This is <= the capacity, and for arrays
      * is <= the length. Memory for elements above the initialized length is
      * uninitialized, but values between the initialized length and the proper
@@ -1408,16 +1410,20 @@ class ObjectImpl : public gc::Cell
          * a spurious 'true' result, if the end of this object is exactly
          * aligned with the end of its arena and dynamic slots are allocated
          * immediately afterwards. Such cases cannot occur for dense arrays
          * (which have at least two fixed slots) and can only result in a leak.
          */
         return elements != emptyObjectElements && elements != fixedElements();
     }
 
+    inline bool hasFixedElements() const {
+        return elements == fixedElements();
+    }
+
     inline bool hasEmptyElements() const {
         return elements == emptyObjectElements;
     }
 
     /* GC support. */
     JS_ALWAYS_INLINE Zone *zone() const;
     static inline ThingRootKind rootKind() { return THING_ROOT_OBJECT; }
     static inline void readBarrier(ObjectImpl *obj);
--- a/js/src/vm/Shape.cpp
+++ b/js/src/vm/Shape.cpp
@@ -1336,16 +1336,40 @@ EmptyShape::insertInitialShape(JSContext
      * not required for correctness (though it may bust on the above asserts):
      * the NewObject must always check for a nativeEmpty() result and generate
      * the appropriate properties if found. Clearing the cache entry avoids
      * this duplicate regeneration.
      */
     cx->runtime->newObjectCache.invalidateEntriesForShape(cx, shape, proto);
 }
 
+/*
+ * This is called by the minor GC to ensure that any relocated proto objects
+ * get updated in the shape table.
+ */
+void
+JSCompartment::markAllInitialShapeTableEntries(JSTracer *trc)
+{
+    if (!initialShapes.initialized())
+        return;
+
+    for (InitialShapeSet::Enum e(initialShapes); !e.empty(); e.popFront()) {
+        if (!e.front().proto.isObject())
+            continue;
+        JSObject *proto = e.front().proto.toObject();
+        JS_SET_TRACING_LOCATION(trc, (void*)&e.front().proto);
+        MarkObjectRoot(trc, &proto, "InitialShapeSet proto");
+        if (proto != e.front().proto.toObject()) {
+            InitialShapeEntry moved = e.front();
+            moved.proto = TaggedProto(proto);
+            e.rekeyFront(e.front().getLookup(), moved);
+        }
+    }
+}
+
 void
 JSCompartment::sweepInitialShapeTable()
 {
     gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_TABLES_INITIAL_SHAPE);
 
     if (initialShapes.initialized()) {
         for (InitialShapeSet::Enum e(initialShapes); !e.empty(); e.popFront()) {
             const InitialShapeEntry &entry = e.front();
--- a/js/src/vm/Shape.h
+++ b/js/src/vm/Shape.h
@@ -90,16 +90,17 @@
  * a single BaseShape.
  */
 
 class JSObject;
 
 namespace js {
 
 class Bindings;
+class Nursery;
 
 /* Limit on the number of slotful properties in an object. */
 static const uint32_t SHAPE_INVALID_SLOT = JS_BIT(24) - 1;
 static const uint32_t SHAPE_MAXIMUM_SLOT = JS_BIT(24) - 2;
 
 /*
  * Shapes use multiplicative hashing, but specialized to
  * minimize footprint.
@@ -456,16 +457,17 @@ typedef HashSet<ReadBarriered<UnownedBas
                 StackBaseShape,
                 SystemAllocPolicy> BaseShapeSet;
 
 class Shape : public js::gc::Cell
 {
     friend class ::JSObject;
     friend class ::JSFunction;
     friend class js::Bindings;
+    friend class js::Nursery;
     friend class js::ObjectImpl;
     friend class js::PropertyTree;
     friend class js::StaticBlockObject;
     friend struct js::StackShape;
     friend struct js::StackBaseShape;
 
   protected:
     HeapPtrBaseShape    base_;