Bug 826691 part 2 - Add incremental GC barriers to stubs. r=djvj
authorJan de Mooij <jdemooij@mozilla.com>
Wed, 30 Jan 2013 11:14:18 +0100
changeset 127207 b2be0e8a374fac51ea0ee66195534ca2090894cf
parent 127206 6a3cf1c0b0fbda93f2f42be98d097744429a1e6b
child 127208 29b8d4586e54ac9b1f1e66074248222abe336ca6
push idunknown
push userunknown
push dateunknown
reviewersdjvj
bugs826691
milestone21.0a1
Bug 826691 part 2 - Add incremental GC barriers to stubs. r=djvj
js/src/ion/BaselineIC.cpp
js/src/ion/BaselineJIT.cpp
js/src/ion/Ion.cpp
js/src/ion/IonCompartment.h
js/src/jsweakcache.h
--- a/js/src/ion/BaselineIC.cpp
+++ b/js/src/ion/BaselineIC.cpp
@@ -231,16 +231,20 @@ ICStubCompiler::getStubCode()
     AutoFlushCache afc("ICStubCompiler::getStubCode", ion);
     if (!generateStubCode(masm))
         return NULL;
     Linker linker(masm);
     Rooted<IonCode *> newStubCode(cx, linker.newCode(cx));
     if (!newStubCode)
         return NULL;
 
+    // All barriers are emitted off-by-default, enable them if needed.
+    if (cx->zone()->needsBarrier())
+        newStubCode->togglePreBarriers(true);
+
     // Cache newly compiled stubcode.
     if (!ion->putStubCode(stubKey, newStubCode))
         return NULL;
 
     return newStubCode;
 }
 
 bool
@@ -1843,16 +1847,17 @@ ICSetElem_Dense::Compiler::generateStubC
     masm.branch32(Assembler::BelowOrEqual, initLength, key, &failure);
 
     // Hole check.
     BaseIndex element(scratchReg, key, TimesEight);
     masm.branchTestMagic(Assembler::Equal, element, &failure);
 
     // It's safe to overwrite R0 now.
     masm.loadValue(Address(BaselineStackReg, ICStackValueOffset), R0);
+    masm.patchableCallPreBarrier(element, MIRType_Value);
     masm.storeValue(R0, element);
     EmitReturnFromIC(masm);
 
     // Failure case - fail but first unstow R0 and R1
     masm.bind(&failureUnstow);
     EmitUnstowICValues(masm, 2);
 
     // Failure case - jump to next stub
@@ -2377,16 +2382,17 @@ ICSetProp_Native::Compiler::generateStub
     // Unstow R0 and R1 (object and key)
     EmitUnstowICValues(masm, 2);
 
     if (!isFixedSlot_)
         masm.loadPtr(Address(objReg, JSObject::offsetOfSlots()), objReg);
 
     // Perform the store.
     masm.load32(Address(BaselineStubReg, ICSetProp_Native::offsetOfOffset()), scratch);
+    masm.patchableCallPreBarrier(BaseIndex(objReg, scratch, TimesOne), MIRType_Value);
     masm.storeValue(R1, BaseIndex(objReg, scratch, TimesOne));
 
     // The RHS has to be in R0.
     masm.moveValue(R1, R0);
     EmitReturnFromIC(masm);
 
     // Failure case - jump to next stub
     masm.bind(&failure);
--- a/js/src/ion/BaselineJIT.cpp
+++ b/js/src/ion/BaselineJIT.cpp
@@ -449,8 +449,17 @@ ion::FinishDiscardBaselineScript(FreeOp 
         // that we don't need a separate script iteration to unmark them.
         script->baseline->resetActive();
         return;
     }
 
     BaselineScript::Destroy(fop, script->baseline);
     script->baseline = NULL;
 }
+
+void
+ion::IonCompartment::toggleBaselineStubBarriers(bool enabled)
+{
+    for (ICStubCodeMap::Enum e(*stubCodes_); !e.empty(); e.popFront()) {
+        IonCode *code = *e.front().value.unsafeGet();
+        code->togglePreBarriers(enabled);
+    }
+}
--- a/js/src/ion/Ion.cpp
+++ b/js/src/ion/Ion.cpp
@@ -779,16 +779,19 @@ ion::ToggleBarriers(JSCompartment *comp,
 {
     IonContext ictx(NULL, comp, NULL);
     AutoFlushCache afc("ToggleBarriers");
     for (gc::CellIterUnderGC i(comp, gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
         UnrootedScript script = i.get<JSScript>();
         if (script->hasIonScript())
             script->ion->toggleBarriers(needs);
     }
+
+    if (comp->ionCompartment())
+        comp->ionCompartment()->toggleBaselineStubBarriers(needs);
 }
 
 namespace js {
 namespace ion {
 
 CodeGenerator *
 CompileBackEnd(MIRGenerator *mir)
 {
--- a/js/src/ion/IonCompartment.h
+++ b/js/src/ion/IonCompartment.h
@@ -126,16 +126,18 @@ class IonCompartment
         // Make sure to do a lookupForAdd(key) and then insert into that slot, because
         // that way if stubCode gets moved due to a GC caused by lookupForAdd, then
         // we still write the correct pointer.
         JS_ASSERT(!stubCodes_->has(key));
         ICStubCodeMap::AddPtr p = stubCodes_->lookupForAdd(key);
         return stubCodes_->add(p, key, stubCode.get());
     }
 
+    void toggleBaselineStubBarriers(bool enabled);
+
   public:
     IonCompartment(IonRuntime *rt);
     ~IonCompartment();
 
     bool initialize(JSContext *cx);
 
     void mark(JSTracer *trc, JSCompartment *compartment);
     void sweep(FreeOp *fop);
--- a/js/src/jsweakcache.h
+++ b/js/src/jsweakcache.h
@@ -70,23 +70,23 @@ class WeakCache : public HashMap<Key, Va
     }
 };
 
 // A WeakValueCache is similar to a WeakCache, except keys are never marked.
 // This is useful for weak maps where the keys are primitive values such as uint32_t.
 template <class Key, class Value,
           class HashPolicy = DefaultHasher<Key>,
           class AllocPolicy = RuntimeAllocPolicy>
-class WeakValueCache : public HashMap<Key, Value, HashPolicy, AllocPolicy> {
-  private:
+class WeakValueCache : public HashMap<Key, Value, HashPolicy, AllocPolicy>
+{
+  public:
     typedef HashMap<Key, Value, HashPolicy, AllocPolicy> Base;
     typedef typename Base::Range Range;
     typedef typename Base::Enum Enum;
 
-  public:
     explicit WeakValueCache(JSRuntime *rt) : Base(rt) { }
     explicit WeakValueCache(JSContext *cx) : Base(cx) { }
 
   public:
     // Sweep all entries which have unmarked key or value.
     void sweep(FreeOp *fop) {
         // Remove all entries whose values remain unmarked.
         for (Enum e(*this); !e.empty(); e.popFront()) {