Back out de1c0246854c (bug 851057), 89a472c35979 (bug 873142), f849dec1a6df (bug 870496), 45fbd0b38bc5 (bug 873136) for incomprehensible Android reftest failures
authorPhil Ringnalda <philringnalda@gmail.com>
Mon, 20 May 2013 22:10:15 -0700
changeset 139608 b54ce66659aa483f4568f3175b6d960abf7900ee
parent 139607 a701f70457806d2b1876c32b2d230d6a8f41270d
child 139609 689a40de8c5ac12baa51e752edfa2869180da156
push id3911
push userakeybl@mozilla.com
push dateMon, 24 Jun 2013 20:17:26 +0000
treeherdermozilla-aurora@7e26ca8db92b [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
bugs851057, 873142, 870496, 873136
milestone24.0a1
backs outde1c0246854c7691b6ecb485af0ac0c061a10fd8
Back out de1c0246854c (bug 851057), 89a472c35979 (bug 873142), f849dec1a6df (bug 870496), 45fbd0b38bc5 (bug 873136) for incomprehensible Android reftest failures
js/src/gc/Nursery.cpp
js/src/gc/Nursery.h
js/src/ion/BaselineFrame.cpp
js/src/ion/BaselineFrame.h
js/src/ion/CodeGenerator.cpp
js/src/ion/CodeGenerator.h
js/src/ion/CompilerRoot.h
js/src/ion/Ion.cpp
js/src/ion/IonBuilder.cpp
js/src/ion/IonBuilder.h
js/src/ion/IonFrames.cpp
js/src/ion/IonFrames.h
js/src/ion/IonMacroAssembler.cpp
js/src/ion/LIR-Common.h
js/src/ion/LOpcodes.h
js/src/ion/LinearScan.cpp
js/src/ion/Lowering.cpp
js/src/ion/Lowering.h
js/src/ion/MCallOptimize.cpp
js/src/ion/MIR.h
js/src/ion/MOpcodes.h
js/src/ion/ParallelArrayAnalysis.cpp
js/src/ion/RegisterAllocator.cpp
js/src/ion/Registers.h
js/src/ion/Safepoints.cpp
js/src/ion/VMFunctions.cpp
js/src/ion/VMFunctions.h
js/src/jscntxt.h
js/src/jscntxtinlines.h
js/src/vm/ForkJoin.cpp
--- a/js/src/gc/Nursery.cpp
+++ b/js/src/gc/Nursery.cpp
@@ -165,16 +165,17 @@ js::Nursery::notifyInitialSlots(Cell *ce
 
 namespace js {
 namespace gc {
 
 class MinorCollectionTracer : public JSTracer
 {
   public:
     Nursery *nursery;
+    JSRuntime *runtime;
     AutoTraceSession session;
 
     /*
      * This list is threaded through the Nursery using the space from already
      * moved things. The list is used to fix up the moved things and to find
      * things held live by intra-Nursery pointers.
      */
     RelocationOverlay *head;
@@ -189,52 +190,48 @@ class MinorCollectionTracer : public JST
         *tail = entry;
         tail = &entry->next_;
         *tail = NULL;
     }
 
     MinorCollectionTracer(JSRuntime *rt, Nursery *nursery)
       : JSTracer(),
         nursery(nursery),
-        session(rt, MinorCollecting),
+        runtime(rt),
+        session(runtime, MinorCollecting),
         head(NULL),
         tail(&head),
-        savedNeedsBarrier(rt->needsBarrier()),
-        disableStrictProxyChecking(rt)
+        savedNeedsBarrier(runtime->needsBarrier()),
+        disableStrictProxyChecking(runtime)
     {
-        JS_TracerInit(this, rt, Nursery::MinorGCCallback);
+        JS_TracerInit(this, runtime, Nursery::MinorGCCallback);
         eagerlyTraceWeakMaps = TraceWeakMapKeysValues;
 
-        rt->gcNumber++;
-        rt->setNeedsBarrier(false);
+        runtime->gcNumber++;
+        runtime->setNeedsBarrier(false);
         for (ZonesIter zone(rt); !zone.done(); zone.next())
             zone->saveNeedsBarrier(false);
     }
 
     ~MinorCollectionTracer() {
         runtime->setNeedsBarrier(savedNeedsBarrier);
         for (ZonesIter zone(runtime); !zone.done(); zone.next())
             zone->restoreNeedsBarrier();
     }
 };
 
 } /* namespace gc */
 } /* namespace js */
 
 static AllocKind
-GetObjectAllocKindForCopy(JSRuntime *rt, JSObject *obj)
+GetObjectAllocKindForCopy(JSObject *obj)
 {
     if (obj->isArray()) {
         JS_ASSERT(obj->numFixedSlots() == 0);
-
-        /* Use minimal size object if we are just going to copy the pointer. */
-        if (!IsInsideNursery(rt, (void *)obj->getElementsHeader()))
-            return FINALIZE_OBJECT0_BACKGROUND;
-
-        size_t nelements = obj->getDenseCapacity();
+        size_t nelements = obj->getDenseInitializedLength();
         return GetBackgroundAllocKind(GetGCArrayKind(nelements));
     }
 
     if (obj->isFunction())
         return obj->toFunction()->getAllocKind();
 
     AllocKind kind = GetGCObjectFixedSlotsKind(obj->numFixedSlots());
     if (CanBeFinalizedInBackground(kind, obj->getClass()))
@@ -260,17 +257,17 @@ js::Nursery::allocateFromTenured(Zone *z
 
     return t;
 }
 
 void *
 js::Nursery::moveToTenured(MinorCollectionTracer *trc, JSObject *src)
 {
     Zone *zone = src->zone();
-    AllocKind dstKind = GetObjectAllocKindForCopy(trc->runtime, src);
+    AllocKind dstKind = GetObjectAllocKindForCopy(src);
     JSObject *dst = static_cast<JSObject *>(allocateFromTenured(zone, dstKind));
     if (!dst)
         MOZ_CRASH();
 
     moveObjectToTenured(dst, src, dstKind);
 
     RelocationOverlay *overlay = reinterpret_cast<RelocationOverlay *>(src);
     overlay->forwardTo(dst);
@@ -324,17 +321,16 @@ js::Nursery::moveElementsToTenured(JSObj
 {
     if (src->hasEmptyElements())
         return;
 
     Allocator *alloc = &src->zone()->allocator;
     ObjectElements *srcHeader = src->getElementsHeader();
     ObjectElements *dstHeader;
 
-    /* TODO Bug 874151: Prefer to put element data inline if we have space. */
     if (!isInside(srcHeader)) {
         JS_ASSERT(src->elements == dst->elements);
         hugeSlots.remove(reinterpret_cast<HeapSlot*>(srcHeader));
         return;
     }
 
     /* ArrayBuffer stores byte-length, not Value count. */
     if (src->isArrayBuffer()) {
@@ -347,31 +343,33 @@ js::Nursery::moveElementsToTenured(JSObj
             dst->setFixedElements();
             dstHeader = dst->getElementsHeader();
         }
         js_memcpy(dstHeader, srcHeader, nbytes);
         dst->elements = dstHeader->elements();
         return;
     }
 
-    size_t nslots = ObjectElements::VALUES_PER_HEADER + srcHeader->capacity;
+    size_t nslots = ObjectElements::VALUES_PER_HEADER + srcHeader->initializedLength;
 
     /* Unlike other objects, Arrays can have fixed elements. */
     if (src->isArray() && nslots <= GetGCKindSlots(dstKind)) {
         dst->setFixedElements();
         dstHeader = dst->getElementsHeader();
         js_memcpy(dstHeader, srcHeader, nslots * sizeof(HeapSlot));
+        dstHeader->capacity = GetGCKindSlots(dstKind) - ObjectElements::VALUES_PER_HEADER;
         return;
     }
 
     size_t nbytes = nslots * sizeof(HeapValue);
     dstHeader = static_cast<ObjectElements *>(alloc->malloc_(nbytes));
     if (!dstHeader)
         MOZ_CRASH();
     js_memcpy(dstHeader, srcHeader, nslots * sizeof(HeapSlot));
+    dstHeader->capacity = srcHeader->initializedLength;
     dst->elements = dstHeader->elements();
 }
 
 static bool
 ShouldMoveToTenured(MinorCollectionTracer *trc, void **thingp)
 {
     Cell *cell = static_cast<Cell *>(*thingp);
     Nursery &nursery = *trc->nursery;
@@ -485,17 +483,16 @@ js::Nursery::collect(JSRuntime *rt, JS::
     MinorCollectionTracer trc(rt, this);
     MarkRuntime(&trc);
     Debugger::markAll(&trc);
     for (CompartmentsIter comp(rt); !comp.done(); comp.next()) {
         comp->markAllCrossCompartmentWrappers(&trc);
         comp->markAllInitialShapeTableEntries(&trc);
     }
     markStoreBuffer(&trc);
-    rt->newObjectCache.clearNurseryObjects(rt);
 
     /*
      * Most of the work is done here. This loop iterates over objects that have
      * been moved to the major heap. If these objects have any outgoing pointers
      * to the nursery, then those nursery objects get moved as well, until no
      * objects are left to move. That is, we iterate to a fixed point.
      */
     for (RelocationOverlay *p = trc.head; p; p = p->next()) {
--- a/js/src/gc/Nursery.h
+++ b/js/src/gc/Nursery.h
@@ -19,21 +19,16 @@
 namespace js {
 
 class ObjectElements;
 
 namespace gc {
 class MinorCollectionTracer;
 } /* namespace gc */
 
-namespace ion {
-class CodeGenerator;
-class MacroAssembler;
-}
-
 class Nursery
 {
   public:
     const static size_t Alignment = gc::ChunkSize;
     const static size_t NurserySize = gc::ChunkSize;
     const static size_t NurseryMask = NurserySize - 1;
 
     explicit Nursery(JSRuntime *rt)
@@ -139,23 +134,18 @@ class Nursery
         JS_ASSERT(runtime_);
         return ((JS::shadow::Runtime *)runtime_)->gcNurseryStart_;
     }
 
     JS_ALWAYS_INLINE uintptr_t end() const {
         JS_ASSERT(runtime_);
         return ((JS::shadow::Runtime *)runtime_)->gcNurseryEnd_;
     }
-    void *addressOfCurrentEnd() const {
-        JS_ASSERT(runtime_);
-        return (void*)&((JS::shadow::Runtime *)runtime_)->gcNurseryEnd_;
-    }
 
     uintptr_t position() const { return position_; }
-    void *addressOfPosition() const { return (void*)&position_; }
 
     JSRuntime *runtime() const { return runtime_; }
 
     /* Allocates and registers external slots with the nursery. */
     HeapSlot *allocateHugeSlots(JSContext *cx, size_t nslots);
 
     /* Allocates a new GC thing from the tenured generation during minor GC. */
     void *allocateFromTenured(Zone *zone, gc::AllocKind thingKind);
@@ -182,16 +172,14 @@ class Nursery
      */
     void sweep(FreeOp *fop);
 
     static void MinorGCCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind);
     static void MinorFallbackMarkingCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind);
     static void MinorFallbackFixupCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind);
 
     friend class gc::MinorCollectionTracer;
-    friend class ion::CodeGenerator;
-    friend class ion::MacroAssembler;
 };
 
 } /* namespace js */
 
 #endif /* JSGC_GENERATIONAL */
 #endif /* jsgc_nursery_h___ */
--- a/js/src/ion/BaselineFrame.cpp
+++ b/js/src/ion/BaselineFrame.cpp
@@ -15,17 +15,17 @@
 #include "vm/ScopeObject.h"
 
 using namespace js;
 using namespace js::ion;
 
 void
 BaselineFrame::trace(JSTracer *trc)
 {
-    replaceCalleeToken(MarkCalleeToken(trc, calleeToken()));
+    MarkCalleeToken(trc, calleeToken());
 
     gc::MarkValueRoot(trc, &thisValue(), "baseline-this");
 
     // Mark actual and formal args.
     if (isNonEvalFunctionFrame()) {
         unsigned numArgs = js::Max(numActualArgs(), numFormalArgs());
         JS_ASSERT(actuals() == formals());
         gc::MarkValueRootRange(trc, numArgs, actuals(), "baseline-args");
--- a/js/src/ion/BaselineFrame.h
+++ b/js/src/ion/BaselineFrame.h
@@ -108,20 +108,16 @@ class BaselineFrame
 
     inline void pushOnScopeChain(ScopeObject &scope);
     inline void popOffScopeChain();
 
     CalleeToken calleeToken() const {
         uint8_t *pointer = (uint8_t *)this + Size() + offsetOfCalleeToken();
         return *(CalleeToken *)pointer;
     }
-    void replaceCalleeToken(CalleeToken token) {
-        uint8_t *pointer = (uint8_t *)this + Size() + offsetOfCalleeToken();
-        *(CalleeToken *)pointer = token;
-    }
     JSScript *script() const {
         if (isEvalFrame())
             return evalScript();
         return ScriptFromCalleeToken(calleeToken());
     }
     JSFunction *fun() const {
         return CalleeTokenToFunction(calleeToken());
     }
--- a/js/src/ion/CodeGenerator.cpp
+++ b/js/src/ion/CodeGenerator.cpp
@@ -15,17 +15,16 @@
 #include "MIRGenerator.h"
 #include "shared/CodeGenerator-shared-inl.h"
 #include "jsnum.h"
 #include "jsmath.h"
 #include "jsinterpinlines.h"
 #include "ParallelFunctions.h"
 #include "ExecutionModeInlines.h"
 #include "builtin/Eval.h"
-#include "gc/Nursery.h"
 #include "vm/ForkJoin.h"
 
 #include "vm/StringObject-inl.h"
 
 using namespace js;
 using namespace js::ion;
 
 using mozilla::DebugOnly;
@@ -1158,148 +1157,16 @@ CodeGenerator::visitMonitorTypes(LMonito
     masm.guardTypeSet(operand, lir->mir()->typeSet(), scratch, &matched, &miss);
     masm.jump(&miss);
     if (!bailoutFrom(&miss, lir->snapshot()))
         return false;
     masm.bind(&matched);
     return true;
 }
 
-#ifdef JSGC_GENERATIONAL
-// Out-of-line path to update the store buffer.
-class OutOfLineCallPostWriteBarrier : public OutOfLineCodeBase<CodeGenerator>
-{
-    LInstruction *lir_;
-    const LAllocation *object_;
-
-  public:
-    OutOfLineCallPostWriteBarrier(LInstruction *lir, const LAllocation *object)
-      : lir_(lir), object_(object)
-    { }
-
-    bool accept(CodeGenerator *codegen) {
-        return codegen->visitOutOfLineCallPostWriteBarrier(this);
-    }
-
-    LInstruction *lir() const {
-        return lir_;
-    }
-    const LAllocation *object() const {
-        return object_;
-    }
-};
-
-bool
-CodeGenerator::visitOutOfLineCallPostWriteBarrier(OutOfLineCallPostWriteBarrier *ool)
-{
-    saveLive(ool->lir());
-
-    const LAllocation *obj = ool->object();
-
-    GeneralRegisterSet regs;
-    regs.add(CallTempReg0);
-    regs.add(CallTempReg1);
-    regs.add(CallTempReg2);
-
-    Register objreg;
-    if (obj->isConstant()) {
-        objreg = regs.takeAny();
-        masm.movePtr(ImmGCPtr(&obj->toConstant()->toObject()), objreg);
-    } else {
-        objreg = ToRegister(obj);
-        if (regs.has(objreg))
-            regs.take(objreg);
-    }
-
-    Register runtimereg = regs.takeAny();
-    masm.mov(ImmWord(GetIonContext()->compartment->rt), runtimereg);
-
-    masm.setupUnalignedABICall(2, regs.takeAny());
-    masm.passABIArg(runtimereg);
-    masm.passABIArg(objreg);
-    masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, PostWriteBarrier));
-
-    restoreLive(ool->lir());
-
-    masm.jump(ool->rejoin());
-    return true;
-}
-#endif
-
-bool
-CodeGenerator::visitPostWriteBarrierO(LPostWriteBarrierO *lir)
-{
-#ifdef JSGC_GENERATIONAL
-    OutOfLineCallPostWriteBarrier *ool = new OutOfLineCallPostWriteBarrier(lir, lir->object());
-    if (!addOutOfLineCode(ool))
-        return false;
-
-    Nursery &nursery = GetIonContext()->compartment->rt->gcNursery;
-
-    if (lir->object()->isConstant()) {
-        JSObject *obj = &lir->object()->toConstant()->toObject();
-        JS_ASSERT(!nursery.isInside(obj));
-        /*
-        if (nursery.isInside(obj))
-            return true;
-        */
-    } else {
-        Label tenured;
-        Register objreg = ToRegister(lir->object());
-        masm.branchPtr(Assembler::Below, objreg, ImmWord(nursery.start()), &tenured);
-        masm.branchPtr(Assembler::Below, objreg, ImmWord(nursery.end()), ool->rejoin());
-        masm.bind(&tenured);
-    }
-
-    Register valuereg = ToRegister(lir->value());
-    masm.branchPtr(Assembler::Below, valuereg, ImmWord(nursery.start()), ool->rejoin());
-    masm.branchPtr(Assembler::Below, valuereg, ImmWord(nursery.end()), ool->entry());
-
-    masm.bind(ool->rejoin());
-#endif
-    return true;
-}
-
-bool
-CodeGenerator::visitPostWriteBarrierV(LPostWriteBarrierV *lir)
-{
-#ifdef JSGC_GENERATIONAL
-    OutOfLineCallPostWriteBarrier *ool = new OutOfLineCallPostWriteBarrier(lir, lir->object());
-    if (!addOutOfLineCode(ool))
-        return false;
-
-    ValueOperand value = ToValue(lir, LPostWriteBarrierV::Input);
-    masm.branchTestObject(Assembler::NotEqual, value, ool->rejoin());
-
-    Nursery &nursery = GetIonContext()->compartment->rt->gcNursery;
-
-    if (lir->object()->isConstant()) {
-        JSObject *obj = &lir->object()->toConstant()->toObject();
-        JS_ASSERT(!nursery.isInside(obj));
-        /*
-        if (nursery.isInside(obj))
-            return true;
-        */
-    } else {
-        Label tenured;
-        Register objreg = ToRegister(lir->object());
-        masm.branchPtr(Assembler::Below, objreg, ImmWord(nursery.start()), &tenured);
-        masm.branchPtr(Assembler::Below, objreg, ImmWord(nursery.end()), ool->rejoin());
-        masm.bind(&tenured);
-    }
-
-    Register valuereg = masm.extractObject(value, ToRegister(lir->temp()));
-    masm.branchPtr(Assembler::Below, valuereg, ImmWord(nursery.start()), ool->rejoin());
-    masm.branchPtr(Assembler::Below, valuereg, ImmWord(nursery.end()), ool->entry());
-
-    masm.bind(ool->rejoin());
-#endif
-    return true;
-}
-
 bool
 CodeGenerator::visitCallNative(LCallNative *call)
 {
     JSFunction *target = call->getSingleTarget();
     JS_ASSERT(target);
     JS_ASSERT(target->isNative());
 
     int callargslot = call->argslot();
@@ -4808,31 +4675,23 @@ CodeGenerator::visitIteratorStart(LItera
     masm.branchPtr(Assembler::NotEqual,
                    Address(obj, JSObject::offsetOfElements()),
                    ImmWord(js::emptyObjectElements),
                    ool->entry());
 
     // Write barrier for stores to the iterator. We only need to take a write
     // barrier if NativeIterator::obj is actually going to change.
     {
-#ifdef JSGC_GENERATIONAL
-        // Bug 867815: When using a nursery, we unconditionally take this out-
-        // of-line so that we do not have to post-barrier the store to
-        // NativeIter::obj. This just needs JIT support for the Cell* buffer.
-        Address objAddr(niTemp, offsetof(NativeIterator, obj));
-        masm.branchPtr(Assembler::NotEqual, objAddr, obj, ool->entry());
-#else
         Label noBarrier;
         masm.branchTestNeedsBarrier(Assembler::Zero, temp1, &noBarrier);
 
         Address objAddr(niTemp, offsetof(NativeIterator, obj));
         masm.branchPtr(Assembler::NotEqual, objAddr, obj, ool->entry());
 
         masm.bind(&noBarrier);
-#endif // !JSGC_GENERATIONAL
     }
 
     // Mark iterator as active.
     masm.storePtr(obj, Address(niTemp, offsetof(NativeIterator, obj)));
     masm.or32(Imm32(JSITER_ACTIVE), Address(niTemp, offsetof(NativeIterator, flags)));
 
     // Chain onto the active iterator stack.
     masm.movePtr(ImmWord(GetIonContext()->compartment), temp1);
--- a/js/src/ion/CodeGenerator.h
+++ b/js/src/ion/CodeGenerator.h
@@ -28,17 +28,16 @@ class CheckOverRecursedFailure;
 class ParCheckOverRecursedFailure;
 class OutOfLineParCheckInterrupt;
 class OutOfLineUnboxDouble;
 class OutOfLineStoreElementHole;
 class OutOfLineTypeOfV;
 class OutOfLineLoadTypedArray;
 class OutOfLineParNewGCThing;
 class OutOfLineUpdateCache;
-class OutOfLineCallPostWriteBarrier;
 
 class CodeGenerator : public CodeGeneratorSpecific
 {
     bool generateArgumentsChecks();
     bool generateBody();
 
   public:
     CodeGenerator(MIRGenerator *gen, LIRGraph *graph, MacroAssembler *masm = NULL);
@@ -83,19 +82,16 @@ class CodeGenerator : public CodeGenerat
     bool visitParLambda(LParLambda *lir);
     bool visitPointer(LPointer *lir);
     bool visitSlots(LSlots *lir);
     bool visitStoreSlotV(LStoreSlotV *store);
     bool visitElements(LElements *lir);
     bool visitConvertElementsToDoubles(LConvertElementsToDoubles *lir);
     bool visitTypeBarrier(LTypeBarrier *lir);
     bool visitMonitorTypes(LMonitorTypes *lir);
-    bool visitPostWriteBarrierO(LPostWriteBarrierO *lir);
-    bool visitPostWriteBarrierV(LPostWriteBarrierV *lir);
-    bool visitOutOfLineCallPostWriteBarrier(OutOfLineCallPostWriteBarrier *ool);
     bool visitCallNative(LCallNative *call);
     bool emitCallInvokeFunction(LInstruction *call, Register callereg,
                                 uint32_t argc, uint32_t unusedStack);
     bool visitCallGeneric(LCallGeneric *call);
     bool visitCallKnown(LCallKnown *call);
     bool emitCallInvokeFunction(LApplyArgsGeneric *apply, Register extraStackSize);
     void emitPushArguments(LApplyArgsGeneric *apply, Register extraStackSpace);
     void emitPopArguments(LApplyArgsGeneric *apply, Register extraStackSize);
--- a/js/src/ion/CompilerRoot.h
+++ b/js/src/ion/CompilerRoot.h
@@ -19,20 +19,18 @@ namespace ion {
 // The CompilerRoot must be heap-allocated and may not go out of scope.
 template <typename T>
 class CompilerRoot : public CompilerRootNode
 {
   public:
     CompilerRoot(T ptr)
       : CompilerRootNode(NULL)
     {
-        if (ptr) {
-            JS_ASSERT(!IsInsideNursery(GetIonContext()->compartment->rt, ptr));
+        if (ptr)
             setRoot(ptr);
-        }
     }
 
   public:
     // Sets the pointer and inserts into root list. The pointer becomes read-only.
     void setRoot(T root) {
         CompilerRootNode *&rootList = GetIonContext()->temp->rootList();
 
         JS_ASSERT(!ptr_);
--- a/js/src/ion/Ion.cpp
+++ b/js/src/ion/Ion.cpp
@@ -351,17 +351,16 @@ FinishAllOffThreadCompilations(IonCompar
         FinishOffThreadBuilder(builder);
     }
     compilations.clear();
 }
 
 /* static */ void
 IonRuntime::Mark(JSTracer *trc)
 {
-    JS_ASSERT(!trc->runtime->isHeapMinorCollecting());
     Zone *zone = trc->runtime->atomsCompartment->zone();
     for (gc::CellIterUnderGC i(zone, gc::FINALIZE_IONCODE); !i.done(); i.next()) {
         IonCode *code = i.get<IonCode>();
         MarkIonCodeRoot(trc, &code, "wrapper");
     }
 }
 
 void
--- a/js/src/ion/IonBuilder.cpp
+++ b/js/src/ion/IonBuilder.cpp
@@ -4226,20 +4226,17 @@ IonBuilder::createDeclEnvObject(MDefinit
     JS_ASSERT(!templateObj->hasDynamicSlots());
 
     // Allocate the actual object. It is important that no intervening
     // instructions could potentially bailout, thus leaking the dynamic slots
     // pointer.
     MInstruction *declEnvObj = MNewDeclEnvObject::New(templateObj);
     current->add(declEnvObj);
 
-    // Initialize the object's reserved slots. No post barrier is needed here:
-    // the object will be allocated in the nursery if possible, and if the
-    // tenured heap is used instead, a minor collection will have been performed
-    // that moved scope/callee to the tenured heap.
+    // Initialize the object's reserved slots.
     current->add(MStoreFixedSlot::New(declEnvObj, DeclEnvObject::enclosingScopeSlot(), scope));
     current->add(MStoreFixedSlot::New(declEnvObj, DeclEnvObject::lambdaSlot(), callee));
 
     return declEnvObj;
 }
 
 MInstruction *
 IonBuilder::createCallObject(MDefinition *callee, MDefinition *scope)
@@ -4266,18 +4263,17 @@ IonBuilder::createCallObject(MDefinition
     current->add(slots);
 
     // Allocate the actual object. It is important that no intervening
     // instructions could potentially bailout, thus leaking the dynamic slots
     // pointer.
     MInstruction *callObj = MNewCallObject::New(templateObj, slots);
     current->add(callObj);
 
-    // Initialize the object's reserved slots. No post barrier is needed here,
-    // for the same reason as in createDeclEnvObject.
+    // Initialize the object's reserved slots.
     current->add(MStoreFixedSlot::New(callObj, CallObject::enclosingScopeSlot(), scope));
     current->add(MStoreFixedSlot::New(callObj, CallObject::calleeSlot(), callee));
 
     // Initialize argument slots.
     for (AliasedFormalIter i(script()); i; i++) {
         unsigned slot = i.scopeSlot();
         unsigned formal = i.frameIndex();
         MDefinition *param = current->getSlot(info().argSlotUnchecked(formal));
@@ -4354,17 +4350,17 @@ IonBuilder::createThisScriptedSingleton(
     // Generate an inline path to create a new |this| object with
     // the given singleton prototype.
     types::TypeObject *type = proto->getNewType(cx, &ObjectClass, target);
     if (!type)
         return NULL;
     if (!types::TypeScript::ThisTypes(target->nonLazyScript())->hasType(types::Type::ObjectType(type)))
         return NULL;
 
-    RootedObject templateObject(cx, CreateThisForFunctionWithProto(cx, target, proto, TenuredObject));
+    RootedObject templateObject(cx, CreateThisForFunctionWithProto(cx, target, proto));
     if (!templateObject)
         return NULL;
 
     // Trigger recompilation if the templateObject changes.
     if (templateObject->type()->newScript)
         types::HeapTypeSet::WatchObjectStateChange(cx, templateObject->type());
 
     MCreateThisWithTemplate *createThis = MCreateThisWithTemplate::New(templateObject);
@@ -5053,21 +5049,16 @@ IonBuilder::jsop_compare(JSOp op)
     return true;
 }
 
 JSObject *
 IonBuilder::getNewArrayTemplateObject(uint32_t count)
 {
     RootedScript scriptRoot(cx, script());
     NewObjectKind newKind = types::UseNewTypeForInitializer(cx, scriptRoot, pc, JSProto_Array);
-
-    // Do not allocate template objects in the nursery.
-    if (newKind == GenericObject)
-        newKind = TenuredObject;
-
     RootedObject templateObject(cx, NewDenseUnallocatedArray(cx, count, NULL, newKind));
     if (!templateObject)
         return NULL;
 
     if (newKind != SingletonObject) {
         types::TypeObject *type = types::TypeScript::InitObject(cx, scriptRoot, pc, JSProto_Array);
         if (!type)
             return NULL;
@@ -5104,21 +5095,16 @@ IonBuilder::jsop_newobject(HandleObject 
 {
     // Don't bake in the TypeObject for non-CNG scripts.
     JS_ASSERT(script()->compileAndGo);
 
     RootedObject templateObject(cx);
 
     RootedScript scriptRoot(cx, script());
     NewObjectKind newKind = types::UseNewTypeForInitializer(cx, scriptRoot, pc, JSProto_Object);
-
-    // Do not allocate template objects in the nursery.
-    if (newKind == GenericObject)
-        newKind = TenuredObject;
-
     if (baseObj) {
         templateObject = CopyInitializerObject(cx, baseObj, newKind);
     } else {
         gc::AllocKind allocKind = GuessObjectGCKind(0);
         templateObject = NewBuiltinClassInstance(cx, &ObjectClass, allocKind, newKind);
     }
 
     if (!templateObject)
@@ -5171,19 +5157,16 @@ IonBuilder::jsop_initelem_array()
         if (!elemTypes)
             return false;
         if (!TypeSetIncludes(elemTypes, value->type(), value->resultTypeSet())) {
             elemTypes->addFreeze(cx);
             needStub = true;
         }
     }
 
-    if (NeedsPostBarrier(info(), value))
-        current->add(MPostWriteBarrier::New(obj, value));
-
     if (needStub) {
         MCallInitElementArray *store = MCallInitElementArray::New(obj, GET_UINT24(pc), value);
         current->add(store);
         return resumeAfter(store);
     }
 
     MConstant *id = MConstant::New(Int32Value(GET_UINT24(pc)));
     current->add(id);
@@ -5248,19 +5231,16 @@ IonBuilder::jsop_initprop(HandleProperty
         PropertyWriteNeedsTypeBarrier(cx, current, &obj, name, &value))
     {
         // JSOP_NEWINIT becomes an MNewObject without preconfigured properties.
         MInitProp *init = MInitProp::New(obj, name, value);
         current->add(init);
         return resumeAfter(init);
     }
 
-    if (NeedsPostBarrier(info(), value))
-        current->add(MPostWriteBarrier::New(obj, value));
-
     bool needsBarrier = true;
     if ((id == types::IdToTypeId(id)) &&
         obj->resultTypeSet() &&
         !obj->resultTypeSet()->propertyNeedsBarrier(cx, id))
     {
         needsBarrier = false;
     }
 
@@ -5970,23 +5950,16 @@ ion::TypeSetIncludes(types::TypeSet *typ
         return types->unknown() || (inputTypes && inputTypes->isSubset(types));
 
       default:
         JS_NOT_REACHED("Bad input type");
         return false;
     }
 }
 
-// Whether a write of the given value may need a post-write barrier for GC purposes.
-bool
-ion::NeedsPostBarrier(CompileInfo &info, MDefinition *value)
-{
-    return info.executionMode() != ParallelExecution && value->mightBeType(MIRType_Object);
-}
-
 bool
 IonBuilder::jsop_setgname(HandlePropertyName name)
 {
     RootedObject globalObj(cx, &script()->global());
     RootedId id(cx, NameToId(name));
 
     JS_ASSERT(globalObj->isNative());
 
@@ -6027,18 +6000,16 @@ IonBuilder::jsop_setgname(HandleProperty
     if (!propertyTypes)
         global = addShapeGuard(global, globalObj->lastProperty(), Bailout_ShapeGuard);
 
     JS_ASSERT(shape->slot() >= globalObj->numFixedSlots());
 
     MSlots *slots = MSlots::New(global);
     current->add(slots);
 
-    // Note: we do not use a post barrier when writing to the global object.
-    // Slots in the global object will be treated as roots during a minor GC.
     current->pop();
     MStoreSlot *store = MStoreSlot::New(slots, shape->slot() - globalObj->numFixedSlots(), value);
     current->add(store);
 
     // Determine whether write barrier is required.
     if (!propertyTypes || propertyTypes->needsBarrier(cx))
         store->setNeedsBarrier();
 
@@ -6580,19 +6551,16 @@ IonBuilder::jsop_setelem_dense(types::St
 {
     MIRType elementType = DenseNativeElementType(cx, obj);
     bool packed = ElementAccessIsPacked(cx, obj);
 
     // Writes which are on holes in the object do not have to bail out if they
     // cannot hit another indexed property on the object or its prototypes.
     bool writeOutOfBounds = !ElementAccessHasExtraIndexedProperty(cx, obj);
 
-    if (NeedsPostBarrier(info(), value))
-        current->add(MPostWriteBarrier::New(obj, value));
-
     // Ensure id is an integer.
     MInstruction *idInt32 = MToInt32::New(id);
     current->add(idInt32);
     id = idInt32;
 
     // Ensure the value is a double, if double conversion might be needed.
     MDefinition *newValue = value;
     if (conversion == types::StackTypeSet::AlwaysConvertToDoubles ||
@@ -7623,19 +7591,16 @@ IonBuilder::getPropTryCache(bool *emitte
 bool
 IonBuilder::jsop_setprop(HandlePropertyName name)
 {
     MDefinition *value = current->pop();
     MDefinition *obj = current->pop();
 
     types::StackTypeSet *objTypes = obj->resultTypeSet();
 
-    if (NeedsPostBarrier(info(), value))
-        current->add(MPostWriteBarrier::New(obj, value));
-
     RootedId id(cx, NameToId(name));
 
     JSFunction *commonSetter;
     bool isDOM;
     if (!TestCommonPropFunc(cx, objTypes, id, &commonSetter, false, &isDOM, NULL))
         return false;
     if (commonSetter) {
         // Setters can be called even if the property write needs a type
@@ -8025,19 +7990,16 @@ IonBuilder::jsop_getaliasedvar(ScopeCoor
 bool
 IonBuilder::jsop_setaliasedvar(ScopeCoordinate sc)
 {
     MDefinition *rval = current->peek(-1);
     MDefinition *obj = walkScopeChain(sc.hops);
 
     RootedShape shape(cx, ScopeCoordinateToStaticScopeShape(cx, script(), pc));
 
-    if (NeedsPostBarrier(info(), rval))
-        current->add(MPostWriteBarrier::New(obj, rval));
-
     MInstruction *store;
     if (shape->numFixedSlots() <= sc.slot) {
         MInstruction *slots = MSlots::New(obj);
         current->add(slots);
 
         store = MStoreSlot::NewBarriered(slots, sc.slot - shape->numFixedSlots(), rval);
     } else {
         store = MStoreFixedSlot::NewBarriered(obj, sc.slot, rval);
--- a/js/src/ion/IonBuilder.h
+++ b/js/src/ion/IonBuilder.h
@@ -767,14 +767,12 @@ class CallInfo
         MPassArg *passArg = MPassArg::New(arg);
         current->add(passArg);
         return passArg;
     }
 };
 
 bool TypeSetIncludes(types::TypeSet *types, MIRType input, types::TypeSet *inputTypes);
 
-bool NeedsPostBarrier(CompileInfo &info, MDefinition *value);
-
 } // namespace ion
 } // namespace js
 
 #endif // jsion_bytecode_analyzer_h__
--- a/js/src/ion/IonFrames.cpp
+++ b/js/src/ion/IonFrames.cpp
@@ -625,31 +625,33 @@ IonActivationIterator::operator++()
 }
 
 bool
 IonActivationIterator::more() const
 {
     return !!activation_;
 }
 
-CalleeToken
+void
 MarkCalleeToken(JSTracer *trc, CalleeToken token)
 {
     switch (GetCalleeTokenTag(token)) {
       case CalleeToken_Function:
       {
         JSFunction *fun = CalleeTokenToFunction(token);
         MarkObjectRoot(trc, &fun, "ion-callee");
-        return CalleeToToken(fun);
+        JS_ASSERT(fun == CalleeTokenToFunction(token));
+        break;
       }
       case CalleeToken_Script:
       {
         JSScript *script = CalleeTokenToScript(token);
         MarkScriptRoot(trc, &script, "ion-entry");
-        return CalleeToToken(script);
+        JS_ASSERT(script == CalleeTokenToScript(token));
+        break;
       }
       default:
         JS_NOT_REACHED("unknown callee token type");
     }
 }
 
 static inline uintptr_t
 ReadAllocation(const IonFrameIterator &frame, const LAllocation *a)
@@ -676,40 +678,22 @@ MarkActualArguments(JSTracer *trc, const
     size_t nargs = frame.numActualArgs();
 
     // Trace function arguments. Note + 1 for thisv.
     Value *argv = layout->argv();
     for (size_t i = 0; i < nargs + 1; i++)
         gc::MarkValueRoot(trc, &argv[i], "ion-argv");
 }
 
-static inline void
-WriteAllocation(const IonFrameIterator &frame, const LAllocation *a, uintptr_t value)
-{
-    if (a->isGeneralReg()) {
-        Register reg = a->toGeneralReg()->reg();
-        frame.machineState().write(reg, value);
-        return;
-    }
-    if (a->isStackSlot()) {
-        uint32_t slot = a->toStackSlot()->slot();
-        *frame.jsFrame()->slotRef(slot) = value;
-        return;
-    }
-    uint32_t index = a->toArgument()->index();
-    uint8_t *argv = reinterpret_cast<uint8_t *>(frame.jsFrame()->argv());
-    *reinterpret_cast<uintptr_t *>(argv + index) = value;
-}
-
 static void
 MarkIonJSFrame(JSTracer *trc, const IonFrameIterator &frame)
 {
     IonJSFrameLayout *layout = (IonJSFrameLayout *)frame.fp();
 
-    layout->replaceCalleeToken(MarkCalleeToken(trc, layout->calleeToken()));
+    MarkCalleeToken(trc, layout->calleeToken());
 
     IonScript *ionScript = NULL;
     if (frame.checkInvalidation(&ionScript)) {
         // This frame has been invalidated, meaning that its IonScript is no
         // longer reachable through the callee token (JSFunction/JSScript->ion
         // is now NULL or recompiled). Manually trace it here.
         IonScript::Trace(trc, ionScript);
     } else if (CalleeTokenIsFunction(layout->calleeToken())) {
@@ -753,22 +737,17 @@ MarkIonJSFrame(JSTracer *trc, const IonF
     LAllocation type, payload;
     while (safepoint.getNunboxSlot(&type, &payload)) {
         jsval_layout layout;
         layout.s.tag = (JSValueTag)ReadAllocation(frame, &type);
         layout.s.payload.uintptr = ReadAllocation(frame, &payload);
 
         Value v = IMPL_TO_JSVAL(layout);
         gc::MarkValueRoot(trc, &v, "ion-torn-value");
-
-        if (v != IMPL_TO_JSVAL(layout)) {
-            // GC moved the value, replace the stored payload.
-            layout = JSVAL_TO_IMPL(v);
-            WriteAllocation(frame, &payload, layout.s.payload.uintptr);
-        }
+        JS_ASSERT(v == IMPL_TO_JSVAL(layout));
     }
 #endif
 }
 
 static void
 MarkBaselineStubFrame(JSTracer *trc, const IonFrameIterator &frame)
 {
     // Mark the ICStub pointer stored in the stub frame. This is necessary
--- a/js/src/ion/IonFrames.h
+++ b/js/src/ion/IonFrames.h
@@ -319,16 +319,16 @@ ReadFrameSlot(IonJSFrameLayout *fp, int3
 }
 
 static inline double
 ReadFrameDoubleSlot(IonJSFrameLayout *fp, int32_t slot)
 {
     return *(double *)((char *)fp + OffsetOfFrameSlot(slot));
 }
 
-CalleeToken
+void
 MarkCalleeToken(JSTracer *trc, CalleeToken token);
 
 } /* namespace ion */
 } /* namespace js */
 
 #endif // jsion_frames_h__
 
--- a/js/src/ion/IonMacroAssembler.cpp
+++ b/js/src/ion/IonMacroAssembler.cpp
@@ -428,31 +428,16 @@ MacroAssembler::newGCThing(const Registe
 
 #ifdef JS_GC_ZEAL
     // Don't execute the inline path if gcZeal is active.
     movePtr(ImmWord(zone->rt), result);
     loadPtr(Address(result, offsetof(JSRuntime, gcZeal_)), result);
     branch32(Assembler::NotEqual, result, Imm32(0), fail);
 #endif
 
-#ifdef JSGC_GENERATIONAL
-    Nursery &nursery = zone->rt->gcNursery;
-    if (nursery.isEnabled() && allocKind <= gc::FINALIZE_OBJECT_LAST) {
-        // Inline Nursery::allocate. No explicit check for nursery.isEnabled()
-        // is needed, as the comparison with the nursery's end will always fail
-        // in such cases.
-        loadPtr(AbsoluteAddress(nursery.addressOfPosition()), result);
-        addPtr(Imm32(thingSize), result);
-        branchPtr(Assembler::BelowOrEqual, AbsoluteAddress(nursery.addressOfCurrentEnd()), result, fail);
-        storePtr(result, AbsoluteAddress(nursery.addressOfPosition()));
-        subPtr(Imm32(thingSize), result);
-        return;
-    }
-#endif // JSGC_GENERATIONAL
-
     // Inline FreeSpan::allocate.
     // There is always exactly one FreeSpan per allocKind per JSCompartment.
     // If a FreeSpan is replaced, its members are updated in the freeLists table,
     // which the code below always re-reads.
     gc::FreeSpan *list = const_cast<gc::FreeSpan *>
                          (zone->allocator.arenas.getFreeList(allocKind));
     loadPtr(AbsoluteAddress(&list->first), result);
     branchPtr(Assembler::BelowOrEqual, AbsoluteAddress(&list->last), result, fail);
--- a/js/src/ion/LIR-Common.h
+++ b/js/src/ion/LIR-Common.h
@@ -4183,62 +4183,16 @@ class LMonitorTypes : public LInstructio
     const MMonitorTypes *mir() const {
         return mir_->toMonitorTypes();
     }
     const LDefinition *temp() {
         return getTemp(0);
     }
 };
 
-// Generational write barrier used when writing an object to another object.
-class LPostWriteBarrierO : public LInstructionHelper<0, 2, 0>
-{
-  public:
-    LIR_HEADER(PostWriteBarrierO)
-
-    LPostWriteBarrierO(const LAllocation &obj, const LAllocation &value) {
-        setOperand(0, obj);
-        setOperand(1, value);
-    }
-
-    const MPostWriteBarrier *mir() const {
-        return mir_->toPostWriteBarrier();
-    }
-    const LAllocation *object() {
-        return getOperand(0);
-    }
-    const LAllocation *value() {
-        return getOperand(1);
-    }
-};
-
-// Generational write barrier used when writing a value to another object.
-class LPostWriteBarrierV : public LInstructionHelper<0, 1 + BOX_PIECES, 1>
-{
-  public:
-    LIR_HEADER(PostWriteBarrierV)
-
-    LPostWriteBarrierV(const LAllocation &obj, const LDefinition &temp) {
-        setOperand(0, obj);
-        setTemp(0, temp);
-    }
-
-    static const size_t Input = 1;
-
-    const MPostWriteBarrier *mir() const {
-        return mir_->toPostWriteBarrier();
-    }
-    const LAllocation *object() {
-        return getOperand(0);
-    }
-    const LDefinition *temp() {
-        return getTemp(0);
-    }
-};
-
 // Guard against an object's class.
 class LGuardClass : public LInstructionHelper<0, 1, 1>
 {
   public:
     LIR_HEADER(GuardClass)
 
     LGuardClass(const LAllocation &in, const LDefinition &temp) {
         setOperand(0, in);
--- a/js/src/ion/LOpcodes.h
+++ b/js/src/ion/LOpcodes.h
@@ -134,18 +134,16 @@
     _(StoreSlotT)                   \
     _(GuardShape)                   \
     _(GuardObjectType)              \
     _(GuardClass)                   \
     _(ParWriteGuard)                \
     _(ParDump)                      \
     _(TypeBarrier)                  \
     _(MonitorTypes)                 \
-    _(PostWriteBarrierO)            \
-    _(PostWriteBarrierV)            \
     _(InitializedLength)            \
     _(SetInitializedLength)         \
     _(BoundsCheck)                  \
     _(BoundsCheckRange)             \
     _(BoundsCheckLower)             \
     _(LoadElementV)                 \
     _(LoadElementT)                 \
     _(LoadElementHole)              \
--- a/js/src/ion/LinearScan.cpp
+++ b/js/src/ion/LinearScan.cpp
@@ -550,23 +550,16 @@ LinearScanAllocator::populateSafepoints(
                     (!isSpilledAt(typeInterval, inputOf(ins)) || payloadAlloc->isGeneralReg()))
                 {
                     // Either the payload is on the stack but the type is
                     // in a register, or the payload is in a register. In
                     // both cases, we don't have a contiguous spill so we
                     // add a torn entry.
                     if (!safepoint->addNunboxParts(*typeAlloc, *payloadAlloc))
                         return false;
-
-                    // If the nunbox is stored in multiple places, we need to
-                    // trace all of them to allow the GC to relocate objects.
-                    if (payloadAlloc->isGeneralReg() && isSpilledAt(payloadInterval, inputOf(ins))) {
-                        if (!safepoint->addNunboxParts(*typeAlloc, *payload->canonicalSpill()))
-                            return false;
-                    }
                 }
 #endif
             }
         }
 
 #ifdef JS_NUNBOX32
         if (IsNunbox(reg)) {
             // Skip past the next half of this nunbox so we don't track the
--- a/js/src/ion/Lowering.cpp
+++ b/js/src/ion/Lowering.cpp
@@ -1730,42 +1730,16 @@ LIRGenerator::visitMonitorTypes(MMonitor
     // from inside a type check.
     LMonitorTypes *lir = new LMonitorTypes(temp());
     if (!useBox(lir, LMonitorTypes::Input, ins->input()))
         return false;
     return assignSnapshot(lir, Bailout_Normal) && add(lir, ins);
 }
 
 bool
-LIRGenerator::visitPostWriteBarrier(MPostWriteBarrier *ins)
-{
-#ifdef JSGC_GENERATIONAL
-    switch (ins->value()->type()) {
-      case MIRType_Object: {
-        LPostWriteBarrierO *lir = new LPostWriteBarrierO(useRegisterOrConstant(ins->object()),
-                                                         useRegister(ins->value()));
-        return add(lir, ins) && assignSafepoint(lir, ins);
-      }
-      case MIRType_Value: {
-        LPostWriteBarrierV *lir =
-            new LPostWriteBarrierV(useRegisterOrConstant(ins->object()), temp());
-        if (!useBox(lir, LPostWriteBarrierV::Input, ins->value()))
-            return false;
-        return add(lir, ins) && assignSafepoint(lir, ins);
-      }
-      default:
-        // Currently, only objects can be in the nursery. Other instruction
-        // types cannot hold nursery pointers.
-        return true;
-    }
-#endif // JSGC_GENERATIONAL
-    return true;
-}
-
-bool
 LIRGenerator::visitArrayLength(MArrayLength *ins)
 {
     JS_ASSERT(ins->elements()->type() == MIRType_Elements);
     return define(new LArrayLength(useRegisterAtStart(ins->elements())), ins);
 }
 
 bool
 LIRGenerator::visitTypedArrayLength(MTypedArrayLength *ins)
--- a/js/src/ion/Lowering.h
+++ b/js/src/ion/Lowering.h
@@ -166,17 +166,16 @@ class LIRGenerator : public LIRGenerator
     bool visitFunctionEnvironment(MFunctionEnvironment *ins);
     bool visitParSlice(MParSlice *ins);
     bool visitParWriteGuard(MParWriteGuard *ins);
     bool visitParCheckInterrupt(MParCheckInterrupt *ins);
     bool visitParDump(MParDump *ins);
     bool visitStoreSlot(MStoreSlot *ins);
     bool visitTypeBarrier(MTypeBarrier *ins);
     bool visitMonitorTypes(MMonitorTypes *ins);
-    bool visitPostWriteBarrier(MPostWriteBarrier *ins);
     bool visitArrayLength(MArrayLength *ins);
     bool visitTypedArrayLength(MTypedArrayLength *ins);
     bool visitTypedArrayElements(MTypedArrayElements *ins);
     bool visitInitializedLength(MInitializedLength *ins);
     bool visitSetInitializedLength(MSetInitializedLength *ins);
     bool visitNot(MNot *ins);
     bool visitBoundsCheck(MBoundsCheck *ins);
     bool visitBoundsCheckLower(MBoundsCheckLower *ins);
--- a/js/src/ion/MCallOptimize.cpp
+++ b/js/src/ion/MCallOptimize.cpp
@@ -215,19 +215,17 @@ IonBuilder::inlineArray(CallInfo &callIn
 
     if (callInfo.argc() >= 2) {
         // Get the elements vector.
         MElements *elements = MElements::New(ins);
         current->add(elements);
 
         // Store all values, no need to initialize the length after each as
         // jsop_initelem_array is doing because we do not expect to bailout
-        // because the memory is supposed to be allocated by now. There is no
-        // need for a post barrier on these writes, as as the MNewAray will use
-        // the nursery if possible, triggering a minor collection if it can't.
+        // because the memory is supposed to be allocated by now.
         MConstant *id = NULL;
         for (uint32_t i = 0; i < initLength; i++) {
             id = MConstant::New(Int32Value(i));
             current->add(id);
 
             MDefinition *value = callInfo.getArg(i);
             if (conversion == types::StackTypeSet::AlwaysConvertToDoubles) {
                 MInstruction *valueDouble = MToDouble::New(value);
@@ -344,19 +342,16 @@ IonBuilder::inlineArrayPush(CallInfo &ca
     if (conversion == types::StackTypeSet::AlwaysConvertToDoubles ||
         conversion == types::StackTypeSet::MaybeConvertToDoubles)
     {
         MInstruction *valueDouble = MToDouble::New(value);
         current->add(valueDouble);
         value = valueDouble;
     }
 
-    if (NeedsPostBarrier(info(), value))
-        current->add(MPostWriteBarrier::New(callInfo.thisArg(), value));
-
     MArrayPush *ins = MArrayPush::New(callInfo.thisArg(), value);
     current->add(ins);
     current->push(ins);
 
     if (!resumeAfter(ins))
         return InliningStatus_Error;
     return InliningStatus_Inlined;
 }
@@ -449,17 +444,17 @@ IonBuilder::inlineArrayConcat(CallInfo &
         if (!elemTypes)
             return InliningStatus_Error;
 
         if (!elemTypes->knownSubset(cx, thisElemTypes))
             return InliningStatus_NotInlined;
     }
 
     // Inline the call.
-    RootedObject templateObj(cx, NewDenseEmptyArray(cx, thisType->proto, TenuredObject));
+    RootedObject templateObj(cx, NewDenseEmptyArray(cx, thisType->proto));
     if (!templateObj)
         return InliningStatus_Error;
     templateObj->setType(thisType);
 
     callInfo.unwrapArgs();
 
     MArrayConcat *ins = MArrayConcat::New(callInfo.thisArg(), callInfo.getArg(0), templateObj);
     current->add(ins);
@@ -779,17 +774,17 @@ IonBuilder::inlineStringObject(CallInfo 
     // MToString only supports int32 or string values.
     MIRType type = callInfo.getArg(0)->type();
     if (type != MIRType_Int32 && type != MIRType_String)
         return InliningStatus_NotInlined;
 
     callInfo.unwrapArgs();
 
     RootedString emptyString(cx, cx->runtime->emptyString);
-    RootedObject templateObj(cx, StringObject::create(cx, emptyString, TenuredObject));
+    RootedObject templateObj(cx, StringObject::create(cx, emptyString));
     if (!templateObj)
         return InliningStatus_Error;
 
     MNewStringObject *ins = MNewStringObject::New(callInfo.getArg(0), templateObj);
     current->add(ins);
     current->push(ins);
 
     if (!resumeAfter(ins))
--- a/js/src/ion/MIR.h
+++ b/js/src/ion/MIR.h
@@ -7169,47 +7169,16 @@ class MMonitorTypes : public MUnaryInstr
     const types::StackTypeSet *typeSet() const {
         return typeSet_;
     }
     AliasSet getAliasSet() const {
         return AliasSet::None();
     }
 };
 
-// Given a value being written to another object, update the generational store
-// buffer if the value is in the nursery and object is in the tenured heap.
-class MPostWriteBarrier
-  : public MBinaryInstruction,
-    public ObjectPolicy<0>
-{
-    MPostWriteBarrier(MDefinition *obj, MDefinition *value)
-      : MBinaryInstruction(obj, value)
-    {
-        setGuard();
-    }
-
-  public:
-    INSTRUCTION_HEADER(PostWriteBarrier)
-
-    static MPostWriteBarrier *New(MDefinition *obj, MDefinition *value) {
-        return new MPostWriteBarrier(obj, value);
-    }
-
-    TypePolicy *typePolicy() {
-        return this;
-    }
-
-    MDefinition *object() const {
-        return getOperand(0);
-    }
-    MDefinition *value() const {
-        return getOperand(1);
-    }
-};
-
 class MNewSlots : public MNullaryInstruction
 {
     unsigned nslots_;
 
     MNewSlots(unsigned nslots)
       : nslots_(nslots)
     {
         setResultType(MIRType_Slots);
--- a/js/src/ion/MOpcodes.h
+++ b/js/src/ion/MOpcodes.h
@@ -97,17 +97,16 @@ namespace ion {
     _(Elements)                                                             \
     _(ConstantElements)                                                     \
     _(ConvertElementsToDoubles)                                             \
     _(LoadSlot)                                                             \
     _(StoreSlot)                                                            \
     _(FunctionEnvironment)                                                  \
     _(TypeBarrier)                                                          \
     _(MonitorTypes)                                                         \
-    _(PostWriteBarrier)                                                     \
     _(GetPropertyCache)                                                     \
     _(GetPropertyPolymorphic)                                               \
     _(SetPropertyPolymorphic)                                               \
     _(GetElementCache)                                                      \
     _(BindNameCache)                                                        \
     _(GuardShape)                                                           \
     _(GuardObjectType)                                                      \
     _(GuardClass)                                                           \
--- a/js/src/ion/ParallelArrayAnalysis.cpp
+++ b/js/src/ion/ParallelArrayAnalysis.cpp
@@ -185,17 +185,16 @@ class ParallelArrayVisitor : public MIns
     SAFE_OP(Slots)
     SAFE_OP(Elements)
     SAFE_OP(ConstantElements)
     SAFE_OP(LoadSlot)
     WRITE_GUARDED_OP(StoreSlot, slots)
     SAFE_OP(FunctionEnvironment) // just a load of func env ptr
     SAFE_OP(TypeBarrier) // causes a bailout if the type is not found: a-ok with us
     SAFE_OP(MonitorTypes) // causes a bailout if the type is not found: a-ok with us
-    UNSAFE_OP(PostWriteBarrier)
     SAFE_OP(GetPropertyCache)
     SAFE_OP(GetPropertyPolymorphic)
     UNSAFE_OP(SetPropertyPolymorphic)
     UNSAFE_OP(GetElementCache)
     UNSAFE_OP(BindNameCache)
     SAFE_OP(GuardShape)
     SAFE_OP(GuardObjectType)
     SAFE_OP(GuardClass)
--- a/js/src/ion/RegisterAllocator.cpp
+++ b/js/src/ion/RegisterAllocator.cpp
@@ -286,36 +286,35 @@ AllocationIntegrityState::checkSafepoint
             IonSpew(IonSpew_RegAlloc, "Safepoint object v%u i%u %s",
                     vreg, ins->id(), alloc.toString());
             if (!safepoint->addGcPointer(alloc))
                 return false;
         }
         JS_ASSERT(safepoint->hasGcPointer(alloc));
         break;
 #ifdef JS_NUNBOX32
-      // Do not assert that safepoint information for nunbox types is complete,
+      // Do not assert that safepoint information for nunboxes is complete,
       // as if a vreg for a value's components are copied in multiple places
-      // then the safepoint information may not reflect all copies. All copies
-      // of payloads must be reflected, however, for generational GC.
+      // then the safepoint information may not reflect all copies.
+      // See SafepointWriter::writeNunboxParts.
       case LDefinition::TYPE:
         if (populateSafepoints) {
             IonSpew(IonSpew_RegAlloc, "Safepoint type v%u i%u %s",
                     vreg, ins->id(), alloc.toString());
             if (!safepoint->addNunboxType(vreg, alloc))
                 return false;
         }
         break;
       case LDefinition::PAYLOAD:
         if (populateSafepoints) {
             IonSpew(IonSpew_RegAlloc, "Safepoint payload v%u i%u %s",
                     vreg, ins->id(), alloc.toString());
             if (!safepoint->addNunboxPayload(vreg, alloc))
                 return false;
         }
-        JS_ASSERT(safepoint->hasNunboxPayload(alloc));
         break;
 #else
       case LDefinition::BOX:
         if (populateSafepoints) {
             IonSpew(IonSpew_RegAlloc, "Safepoint boxed value v%u i%u %s",
                     vreg, ins->id(), alloc.toString());
             if (!safepoint->addBoxedValue(alloc))
                 return false;
--- a/js/src/ion/Registers.h
+++ b/js/src/ion/Registers.h
@@ -108,18 +108,15 @@ class MachineState
         return fpregs_[reg.code()] != NULL;
     }
     uintptr_t read(Register reg) const {
         return *regs_[reg.code()];
     }
     double read(FloatRegister reg) const {
         return *fpregs_[reg.code()];
     }
-    void write(Register reg, uintptr_t value) const {
-        *regs_[reg.code()] = value;
-    }
 };
 
 } // namespace ion
 } // namespace js
 
 #endif // jsion_cpu_registers_h__
 
--- a/js/src/ion/Safepoints.cpp
+++ b/js/src/ion/Safepoints.cpp
@@ -226,16 +226,21 @@ SafepointWriter::writeNunboxParts(LSafep
             fprintf(IonSpewFile, ")\n");
         }
     }
 # endif
 
     // Safepoints are permitted to have partially filled in entries for nunboxes,
     // provided that only the type is live and not the payload. Omit these from
     // the written safepoint.
+    //
+    // Note that partial entries typically appear when one part of a nunbox is
+    // stored in multiple places, in which case we will end up with incomplete
+    // information about all the places the value is stored. This will need to
+    // be fixed when the GC is permitted to move structures.
     uint32_t partials = safepoint->partialNunboxes();
 
     stream_.writeUnsigned(entries.length() - partials);
 
     for (size_t i = 0; i < entries.length(); i++) {
         SafepointNunboxEntry &entry = entries[i];
 
         if (entry.type.isUse() || entry.payload.isUse()) {
--- a/js/src/ion/VMFunctions.cpp
+++ b/js/src/ion/VMFunctions.cpp
@@ -574,25 +574,16 @@ FilterArguments(JSContext *cx, JSString 
     const jschar *chars = str->getChars(cx);
     if (!chars)
         return false;
 
     static jschar arguments[] = {'a', 'r', 'g', 'u', 'm', 'e', 'n', 't', 's'};
     return !StringHasPattern(chars, str->length(), arguments, mozilla::ArrayLength(arguments));
 }
 
-#ifdef JSGC_GENERATIONAL
-void
-PostWriteBarrier(JSRuntime *rt, JSObject *obj)
-{
-    JS_ASSERT(!IsInsideNursery(rt, obj));
-    rt->gcStoreBuffer.putWholeObject(obj);
-}
-#endif
-
 uint32_t
 GetIndexFromString(JSString *str)
 {
     // Masks the return value UINT32_MAX as failure to get the index.
     // I.e. it is impossible to distinguish between failing to get the index
     // or the actual index UINT32_MAX.
 
     if (!str->isAtom())
--- a/js/src/ion/VMFunctions.h
+++ b/js/src/ion/VMFunctions.h
@@ -526,20 +526,16 @@ bool OperatorInI(JSContext *cx, uint32_t
 bool GetIntrinsicValue(JSContext *cx, HandlePropertyName name, MutableHandleValue rval);
 
 bool CreateThis(JSContext *cx, HandleObject callee, MutableHandleValue rval);
 
 void GetDynamicName(JSContext *cx, JSObject *scopeChain, JSString *str, Value *vp);
 
 JSBool FilterArguments(JSContext *cx, JSString *str);
 
-#ifdef JSGC_GENERATIONAL
-void PostWriteBarrier(JSRuntime *rt, JSObject *obj);
-#endif
-
 uint32_t GetIndexFromString(JSString *str);
 
 bool DebugPrologue(JSContext *cx, BaselineFrame *frame, JSBool *mustReturn);
 bool DebugEpilogue(JSContext *cx, BaselineFrame *frame, JSBool ok);
 
 bool StrictEvalPrologue(JSContext *cx, BaselineFrame *frame);
 bool HeavyweightFunPrologue(JSContext *cx, BaselineFrame *frame);
 
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -333,19 +333,16 @@ class NewObjectCache
 
   public:
 
     typedef int EntryIndex;
 
     NewObjectCache() { mozilla::PodZero(this); }
     void purge() { mozilla::PodZero(this); }
 
-    /* Remove any cached items keyed on moved objects. */
-    inline void clearNurseryObjects(JSRuntime *rt);
-
     /*
      * Get the entry index for the given lookup, return whether there was a hit
      * on an existing entry.
      */
     inline bool lookupProto(Class *clasp, JSObject *proto, gc::AllocKind kind, EntryIndex *pentry);
     inline bool lookupGlobal(Class *clasp, js::GlobalObject *global, gc::AllocKind kind, EntryIndex *pentry);
     inline bool lookupType(Class *clasp, js::types::TypeObject *type, gc::AllocKind kind, EntryIndex *pentry);
 
--- a/js/src/jscntxtinlines.h
+++ b/js/src/jscntxtinlines.h
@@ -24,25 +24,16 @@ namespace js {
 
 inline void
 NewObjectCache::staticAsserts()
 {
     JS_STATIC_ASSERT(NewObjectCache::MAX_OBJ_SIZE == sizeof(JSObject_Slots16));
     JS_STATIC_ASSERT(gc::FINALIZE_OBJECT_LAST == gc::FINALIZE_OBJECT16_BACKGROUND);
 }
 
-inline void
-NewObjectCache::clearNurseryObjects(JSRuntime *rt)
-{
-    for (unsigned i = 0; i < mozilla::ArrayLength(entries); ++i) {
-        if (IsInsideNursery(rt, entries[i].key))
-            mozilla::PodZero(&entries[i]);
-    }
-}
-
 inline bool
 NewObjectCache::lookup(Class *clasp, gc::Cell *key, gc::AllocKind kind, EntryIndex *pentry)
 {
     uintptr_t hash = (uintptr_t(clasp) ^ uintptr_t(key)) + kind;
     *pentry = hash % mozilla::ArrayLength(entries);
 
     Entry *entry = &entries[*pentry];
 
--- a/js/src/vm/ForkJoin.cpp
+++ b/js/src/vm/ForkJoin.cpp
@@ -670,18 +670,16 @@ class AutoEnterParallelSection
         // write barriers thread-safe.  Therefore, we guarantee
         // that there is no incremental GC in progress:
 
         if (JS::IsIncrementalGCInProgress(cx->runtime)) {
             JS::PrepareForIncrementalGC(cx->runtime);
             JS::FinishIncrementalGC(cx->runtime, JS::gcreason::API);
         }
 
-        MinorGC(cx->runtime, JS::gcreason::API);
-
         cx->runtime->gcHelperThread.waitBackgroundSweepEnd();
     }
 
     ~AutoEnterParallelSection() {
         cx_->mainThread().ionTop = prevIonTop_;
     }
 };