Backout 6b847a10bbb1 (Bug 1175642) for being on top of a regression.
authorTerrence Cole <terrence@mozilla.com>
Wed, 24 Jun 2015 16:25:12 -0700
changeset 280952 c01c45d8f131268f90de6734eab1c447f5ff4446
parent 280951 21a80f803b46020f14a5acb8bf45d7e19e3cf386
child 280953 fb159b796dadbaace3d8adedea5ec74b99129133
push idunknown
push userunknown
push dateunknown
bugs1175642
milestone41.0a1
backs out6b847a10bbb1996436d05cd8b64fb64775a37fe7
Backout 6b847a10bbb1 (Bug 1175642) for being on top of a regression.
dom/xbl/nsXBLMaybeCompiled.h
js/public/Id.h
js/public/RootingAPI.h
js/public/Value.h
js/src/gc/Barrier.cpp
js/src/gc/Barrier.h
js/src/gc/Heap.h
js/src/gc/StoreBuffer.h
js/src/jsobj.h
js/src/vm/Runtime.h
js/src/vm/Stack.cpp
js/src/vm/Stack.h
--- a/dom/xbl/nsXBLMaybeCompiled.h
+++ b/dom/xbl/nsXBLMaybeCompiled.h
@@ -87,29 +87,29 @@ namespace js {
 
 template <class UncompiledT>
 struct GCMethods<nsXBLMaybeCompiled<UncompiledT> >
 {
   typedef struct GCMethods<JSObject *> Base;
 
   static nsXBLMaybeCompiled<UncompiledT> initial() { return nsXBLMaybeCompiled<UncompiledT>(); }
 
-  static void postBarrier(nsXBLMaybeCompiled<UncompiledT>* functionp,
-                          nsXBLMaybeCompiled<UncompiledT> prev,
-                          nsXBLMaybeCompiled<UncompiledT> next)
+  static bool needsPostBarrier(nsXBLMaybeCompiled<UncompiledT> function)
+  {
+    return function.IsCompiled() && Base::needsPostBarrier(function.GetJSFunction());
+  }
+
+  static void postBarrier(nsXBLMaybeCompiled<UncompiledT>* functionp)
   {
-    if (next.IsCompiled()) {
-      Base::postBarrier(&functionp->UnsafeGetJSFunction(),
-                        prev.IsCompiled() ? prev.UnsafeGetJSFunction() : nullptr,
-                        next.UnsafeGetJSFunction());
-    } else if (prev.IsCompiled()) {
-      Base::postBarrier(&prev.UnsafeGetJSFunction(),
-                        prev.UnsafeGetJSFunction(),
-                        nullptr);
-    }
+    Base::postBarrier(&functionp->UnsafeGetJSFunction());
+  }
+
+  static void relocate(nsXBLMaybeCompiled<UncompiledT>* functionp)
+  {
+    Base::relocate(&functionp->UnsafeGetJSFunction());
   }
 };
 
 template <class UncompiledT>
 class HeapBase<nsXBLMaybeCompiled<UncompiledT> >
 {
   const JS::Heap<nsXBLMaybeCompiled<UncompiledT> >& wrapper() const {
     return *static_cast<const JS::Heap<nsXBLMaybeCompiled<UncompiledT> >*>(this);
--- a/js/public/Id.h
+++ b/js/public/Id.h
@@ -166,17 +166,19 @@ extern JS_PUBLIC_DATA(const jsid) JSID_E
 extern JS_PUBLIC_DATA(const JS::HandleId) JSID_VOIDHANDLE;
 extern JS_PUBLIC_DATA(const JS::HandleId) JSID_EMPTYHANDLE;
 
 namespace js {
 
 template <> struct GCMethods<jsid>
 {
     static jsid initial() { return JSID_VOID; }
-    static void postBarrier(jsid* idp, jsid prev, jsid next) {}
+    static bool needsPostBarrier(jsid id) { return false; }
+    static void postBarrier(jsid* idp) {}
+    static void relocate(jsid* idp) {}
 };
 
 // If the jsid is a GC pointer type, convert to that type and call |f| with
 // the pointer. If the jsid is not a GC type, calls F::defaultValue.
 template <typename F, typename... Args>
 auto
 DispatchIdTyped(F f, jsid& id, Args&&... args)
   -> decltype(f(static_cast<JSString*>(nullptr), mozilla::Forward<Args>(args)...))
--- a/js/public/RootingAPI.h
+++ b/js/public/RootingAPI.h
@@ -169,17 +169,18 @@ struct PersistentRootedMarker;
 namespace JS {
 
 template <typename T> class Rooted;
 template <typename T> class PersistentRooted;
 
 /* This is exposing internal state of the GC for inlining purposes. */
 JS_FRIEND_API(bool) isGCEnabled();
 
-JS_FRIEND_API(void) HeapObjectPostBarrier(JSObject** objp, JSObject* prev, JSObject* next);
+JS_FRIEND_API(void) HeapObjectPostBarrier(JSObject** objp);
+JS_FRIEND_API(void) HeapObjectRelocate(JSObject** objp);
 
 #ifdef JS_DEBUG
 /*
  * For generational GC, assert that an object is in the tenured generation as
  * opposed to being in the nursery.
  */
 extern JS_FRIEND_API(void)
 AssertGCThingMustBeTenured(JSObject* obj);
@@ -226,17 +227,18 @@ class Heap : public js::HeapBase<T>
      * For Heap, move semantics are equivalent to copy semantics. In C++, a
      * copy constructor taking const-ref is the way to get a single function
      * that will be used for both lvalue and rvalue copies, so we can simply
      * omit the rvalue variant.
      */
     explicit Heap(const Heap<T>& p) { init(p.ptr); }
 
     ~Heap() {
-        post(ptr, js::GCMethods<T>::initial());
+        if (js::GCMethods<T>::needsPostBarrier(ptr))
+            relocate();
     }
 
     DECLARE_POINTER_CONSTREF_OPS(T);
     DECLARE_POINTER_ASSIGN_OPS(Heap, T);
     DECLARE_NONPOINTER_ACCESSOR_METHODS(ptr);
 
     T* unsafeGet() { return &ptr; }
 
@@ -250,27 +252,39 @@ class Heap : public js::HeapBase<T>
 
     bool isSetToCrashOnTouch() {
         return ptr == crashOnTouchPointer;
     }
 
   private:
     void init(T newPtr) {
         ptr = newPtr;
-        post(js::GCMethods<T>::initial(), ptr);
+        if (js::GCMethods<T>::needsPostBarrier(ptr))
+            post();
     }
 
     void set(T newPtr) {
-        T tmp = ptr;
-        ptr = newPtr;
-        post(tmp, ptr);
+        if (js::GCMethods<T>::needsPostBarrier(newPtr)) {
+            ptr = newPtr;
+            post();
+        } else if (js::GCMethods<T>::needsPostBarrier(ptr)) {
+            relocate();  /* Called before overwriting ptr. */
+            ptr = newPtr;
+        } else {
+            ptr = newPtr;
+        }
     }
 
-    void post(const T& prev, const T& next) {
-        js::GCMethods<T>::postBarrier(&ptr, prev, next);
+    void post() {
+        MOZ_ASSERT(js::GCMethods<T>::needsPostBarrier(ptr));
+        js::GCMethods<T>::postBarrier(&ptr);
+    }
+
+    void relocate() {
+        js::GCMethods<T>::relocate(&ptr);
     }
 
     enum {
         crashOnTouchPointer = 1
     };
 
     T ptr;
 };
@@ -585,46 +599,57 @@ struct RootKind<T*>
 {
     static ThingRootKind rootKind() { return T::rootKind(); }
 };
 
 template <typename T>
 struct GCMethods<T*>
 {
     static T* initial() { return nullptr; }
-    static void postBarrier(T** vp, T* prev, T* next) {
-        if (next)
-            JS::AssertGCThingIsNotAnObjectSubclass(reinterpret_cast<js::gc::Cell*>(next));
+    static bool needsPostBarrier(T* v) { return false; }
+    static void postBarrier(T** vp) {
+        if (vp)
+            JS::AssertGCThingIsNotAnObjectSubclass(reinterpret_cast<js::gc::Cell*>(vp));
     }
     static void relocate(T** vp) {}
 };
 
 template <>
 struct GCMethods<JSObject*>
 {
     static JSObject* initial() { return nullptr; }
     static gc::Cell* asGCThingOrNull(JSObject* v) {
         if (!v)
             return nullptr;
         MOZ_ASSERT(uintptr_t(v) > 32);
         return reinterpret_cast<gc::Cell*>(v);
     }
-    static void postBarrier(JSObject** vp, JSObject* prev, JSObject* next) {
-        JS::HeapObjectPostBarrier(vp, prev, next);
+    static bool needsPostBarrier(JSObject* v) {
+        return v != nullptr && gc::IsInsideNursery(reinterpret_cast<gc::Cell*>(v));
+    }
+    static void postBarrier(JSObject** vp) {
+        JS::HeapObjectPostBarrier(vp);
+    }
+    static void relocate(JSObject** vp) {
+        JS::HeapObjectRelocate(vp);
     }
 };
 
 template <>
 struct GCMethods<JSFunction*>
 {
     static JSFunction* initial() { return nullptr; }
-    static void postBarrier(JSFunction** vp, JSFunction* prev, JSFunction* next) {
-        JS::HeapObjectPostBarrier(reinterpret_cast<JSObject**>(vp),
-                                  reinterpret_cast<JSObject*>(prev),
-                                  reinterpret_cast<JSObject*>(next));
+    static bool needsPostBarrier(JSFunction* v) {
+        return v != nullptr && gc::IsInsideNursery(reinterpret_cast<gc::Cell*>(v));
+    }
+    static void postBarrier(JSFunction** vp) {
+        JS::HeapObjectPostBarrier(reinterpret_cast<JSObject**>(vp));
+    }
+    static void relocate(JSFunction** vp) {
+        JS::HeapObjectRelocate(reinterpret_cast<JSObject**>(vp));
     }
 };
 
 } /* namespace js */
 
 namespace JS {
 
 /*
--- a/js/public/Value.h
+++ b/js/public/Value.h
@@ -1624,35 +1624,38 @@ SameType(const Value& lhs, const Value& 
     return JSVAL_SAME_TYPE_IMPL(lhs.data, rhs.data);
 }
 
 } // namespace JS
 
 /************************************************************************/
 
 namespace JS {
-JS_PUBLIC_API(void) HeapValuePostBarrier(Value* valuep, const Value& prev, const Value& next);
+JS_PUBLIC_API(void) HeapValuePostBarrier(Value* valuep);
+JS_PUBLIC_API(void) HeapValueRelocate(Value* valuep);
 }
 
 namespace js {
 
 template <> struct GCMethods<const JS::Value>
 {
     static JS::Value initial() { return JS::UndefinedValue(); }
 };
 
 template <> struct GCMethods<JS::Value>
 {
     static JS::Value initial() { return JS::UndefinedValue(); }
     static gc::Cell* asGCThingOrNull(const JS::Value& v) {
         return v.isMarkable() ? v.toGCThing() : nullptr;
     }
-    static void postBarrier(JS::Value* v, const JS::Value& prev, const JS::Value& next) {
-        JS::HeapValuePostBarrier(v, prev, next);
+    static bool needsPostBarrier(const JS::Value& v) {
+        return v.isObject() && gc::IsInsideNursery(reinterpret_cast<gc::Cell*>(&v.toObject()));
     }
+    static void postBarrier(JS::Value* v) { JS::HeapValuePostBarrier(v); }
+    static void relocate(JS::Value* v) { JS::HeapValueRelocate(v); }
 };
 
 template <class Outer> class MutableValueOperations;
 
 /*
  * A class designed for CRTP use in implementing the non-mutating parts of the
  * Value interface in Value-like classes.  Outer must be a class inheriting
  * ValueOperations<Outer> with a visible extract() method returning the
--- a/js/src/gc/Barrier.cpp
+++ b/js/src/gc/Barrier.cpp
@@ -85,20 +85,36 @@ js::PreBarrierFunctor<S>::operator()(T* 
 }
 template void js::PreBarrierFunctor<JS::Value>::operator()<JS::Symbol>(JS::Symbol*);
 template void js::PreBarrierFunctor<JS::Value>::operator()<JSObject>(JSObject*);
 template void js::PreBarrierFunctor<JS::Value>::operator()<JSString>(JSString*);
 template void js::PreBarrierFunctor<jsid>::operator()<JS::Symbol>(JS::Symbol*);
 template void js::PreBarrierFunctor<jsid>::operator()<JSString>(JSString*);
 
 JS_PUBLIC_API(void)
-JS::HeapObjectPostBarrier(JSObject** objp, JSObject* prev, JSObject* next)
+JS::HeapObjectPostBarrier(JSObject** objp)
 {
     MOZ_ASSERT(objp);
-    js::InternalGCMethods<JSObject*>::postBarrier(objp, prev, next);
+    MOZ_ASSERT(*objp);
+    js::InternalGCMethods<JSObject*>::postBarrierRelocate(objp);
 }
 
 JS_PUBLIC_API(void)
-JS::HeapValuePostBarrier(JS::Value* valuep, const Value& prev, const Value& next)
+JS::HeapObjectRelocate(JSObject** objp)
+{
+    MOZ_ASSERT(objp);
+    MOZ_ASSERT(*objp);
+    js::InternalGCMethods<JSObject*>::postBarrierRemove(objp);
+}
+
+JS_PUBLIC_API(void)
+JS::HeapValuePostBarrier(JS::Value* valuep)
 {
     MOZ_ASSERT(valuep);
-    js::InternalGCMethods<JS::Value>::postBarrier(valuep, prev, next);
+    js::InternalGCMethods<JS::Value>::postBarrierRelocate(valuep);
 }
+
+JS_PUBLIC_API(void)
+JS::HeapValueRelocate(JS::Value* valuep)
+{
+    MOZ_ASSERT(valuep);
+    js::InternalGCMethods<JS::Value>::postBarrierRemove(valuep);
+}
--- a/js/src/gc/Barrier.h
+++ b/js/src/gc/Barrier.h
@@ -234,19 +234,19 @@ struct InternalGCMethods {};
 
 template <typename T>
 struct InternalGCMethods<T*>
 {
     static bool isMarkable(T* v) { return v != nullptr; }
 
     static void preBarrier(T* v) { T::writeBarrierPre(v); }
 
-    static void postBarrier(T** vp, T* prior, T* next) {
-        return T::writeBarrierPost(vp, prior, next);
-    }
+    static void postBarrier(T** vp) { T::writeBarrierPost(*vp, vp); }
+    static void postBarrierRelocate(T** vp) { T::writeBarrierPostRelocate(*vp, vp); }
+    static void postBarrierRemove(T** vp) { T::writeBarrierPostRemove(*vp, vp); }
 
     static void readBarrier(T* v) { T::readBarrier(v); }
 };
 
 template <typename S> struct PreBarrierFunctor : VoidDefaultAdaptor<S> {
     template <typename T> void operator()(T* t);
 };
 
@@ -258,49 +258,57 @@ template <>
 struct InternalGCMethods<Value>
 {
     static bool isMarkable(Value v) { return v.isMarkable(); }
 
     static void preBarrier(Value v) {
         DispatchValueTyped(PreBarrierFunctor<Value>(), v);
     }
 
-    static void postBarrier(Value* vp, const Value& prev, const Value& next) {
+    static void postBarrier(Value* vp) {
         MOZ_ASSERT(!CurrentThreadIsIonCompiling());
-        MOZ_ASSERT(vp);
+        if (vp->isObject()) {
+            gc::StoreBuffer* sb = reinterpret_cast<gc::Cell*>(&vp->toObject())->storeBuffer();
+            if (sb)
+                sb->putValueFromAnyThread(vp);
+        }
+    }
 
-        // If the target needs an entry, add it.
-        js::gc::StoreBuffer* sb;
-        if (next.isObject() && (sb = reinterpret_cast<gc::Cell*>(&next.toObject())->storeBuffer())) {
-            // If we know that the prev has already inserted an entry, we can skip
-            // doing the lookup to add the new entry.
-            if (prev.isObject() && reinterpret_cast<gc::Cell*>(&prev.toObject())->storeBuffer()) {
-                sb->assertHasValueEdge(vp);
-                return;
-            }
-            sb->putValueFromAnyThread(vp);
-            return;
+    static void postBarrierRelocate(Value* vp) {
+        MOZ_ASSERT(!CurrentThreadIsIonCompiling());
+        if (vp->isObject()) {
+            gc::StoreBuffer* sb = reinterpret_cast<gc::Cell*>(&vp->toObject())->storeBuffer();
+            if (sb)
+                sb->putValueFromAnyThread(vp);
         }
-        // Remove the prev entry if the new value does not need it.
-        if (prev.isObject() && (sb = reinterpret_cast<gc::Cell*>(&prev.toObject())->storeBuffer()))
-            sb->unputValueFromAnyThread(vp);
+    }
+
+    static void postBarrierRemove(Value* vp) {
+        MOZ_ASSERT(vp);
+        MOZ_ASSERT(vp->isMarkable());
+        MOZ_ASSERT(!CurrentThreadIsIonCompiling());
+        JSRuntime* rt = static_cast<js::gc::Cell*>(vp->toGCThing())->runtimeFromAnyThread();
+        JS::shadow::Runtime* shadowRuntime = JS::shadow::Runtime::asShadowRuntime(rt);
+        shadowRuntime->gcStoreBufferPtr()->unputValueFromAnyThread(vp);
     }
 
     static void readBarrier(const Value& v) {
         DispatchValueTyped(ReadBarrierFunctor<Value>(), v);
     }
 };
 
 template <>
 struct InternalGCMethods<jsid>
 {
     static bool isMarkable(jsid id) { return JSID_IS_STRING(id) || JSID_IS_SYMBOL(id); }
 
     static void preBarrier(jsid id) { DispatchIdTyped(PreBarrierFunctor<jsid>(), id); }
-    static void postBarrier(jsid* idp, jsid prev, jsid next) {}
+    static void postBarrier(jsid* idp) {}
+    static void postBarrierRelocate(jsid* idp) {}
+    static void postBarrierRemove(jsid* idp) {}
 };
 
 template <typename T>
 class BarrieredBaseMixins {};
 
 /*
  * Base class for barriered pointer types.
  */
@@ -328,16 +336,17 @@ class BarrieredBase : public BarrieredBa
      * Obviously this is dangerous unless you know the barrier is not needed.
      */
     T* unsafeGet() { return &value; }
     const T* unsafeGet() const { return &value; }
     void unsafeSet(T v) { value = v; }
 
     /* For users who need to manually barrier the raw types. */
     static void writeBarrierPre(const T& v) { InternalGCMethods<T>::preBarrier(v); }
+    static void writeBarrierPost(const T& v, T* vp) { InternalGCMethods<T>::postBarrier(vp); }
 
   protected:
     void pre() { InternalGCMethods<T>::preBarrier(value); }
 };
 
 template <>
 class BarrieredBaseMixins<JS::Value> : public ValueOperations<BarrieredBase<JS::Value> >
 {
@@ -395,42 +404,41 @@ class PreBarriered : public BarrieredBas
  * implemented by RelocatablePtr<T> or JS::Heap<T> at the cost of not
  * automatically handling deletion or movement.
  */
 template <class T>
 class HeapPtr : public BarrieredBase<T>
 {
   public:
     HeapPtr() : BarrieredBase<T>(GCMethods<T>::initial()) {}
-    explicit HeapPtr(T v) : BarrieredBase<T>(v) { post(GCMethods<T>::initial(), v); }
-    explicit HeapPtr(const HeapPtr<T>& v) : BarrieredBase<T>(v) { post(GCMethods<T>::initial(), v); }
+    explicit HeapPtr(T v) : BarrieredBase<T>(v) { post(); }
+    explicit HeapPtr(const HeapPtr<T>& v) : BarrieredBase<T>(v) { post(); }
 #ifdef DEBUG
     ~HeapPtr() {
         // No prebarrier necessary as this only happens when we are sweeping or
         // before the containing obect becomes part of the GC graph.
         MOZ_ASSERT(CurrentThreadIsGCSweeping() || CurrentThreadIsHandlingInitFailure());
     }
 #endif
 
     void init(T v) {
         this->value = v;
-        post(GCMethods<T>::initial(), v);
+        post();
     }
 
     DECLARE_POINTER_ASSIGN_OPS(HeapPtr, T);
 
   protected:
-    void post(T prev, T next) { InternalGCMethods<T>::postBarrier(&this->value, prev, next); }
+    void post() { InternalGCMethods<T>::postBarrier(&this->value); }
 
   private:
     void set(const T& v) {
         this->pre();
-        T tmp = this->value;
         this->value = v;
-        post(tmp, this->value);
+        post();
     }
 
     /*
      * Unlike RelocatablePtr<T>, HeapPtr<T> must be managed with GC lifetimes.
      * Specifically, the memory used by the pointer itself must be live until
      * at least the next minor GC. For that reason, move semantics are invalid
      * and are deleted here. Please note that not all containers support move
      * semantics, so this does not completely prevent invalid uses.
@@ -481,32 +489,35 @@ class ImmutableTenuredPtr
  * used in contexts where this ability is necessary.
  */
 template <class T>
 class RelocatablePtr : public BarrieredBase<T>
 {
   public:
     RelocatablePtr() : BarrieredBase<T>(GCMethods<T>::initial()) {}
     explicit RelocatablePtr(T v) : BarrieredBase<T>(v) {
-        post(GCMethods<T>::initial(), this->value);
+        if (GCMethods<T>::needsPostBarrier(v))
+            post();
     }
 
     /*
      * For RelocatablePtr, move semantics are equivalent to copy semantics. In
      * C++, a copy constructor taking const-ref is the way to get a single
      * function that will be used for both lvalue and rvalue copies, so we can
      * simply omit the rvalue variant.
      */
     RelocatablePtr(const RelocatablePtr<T>& v) : BarrieredBase<T>(v) {
-        post(GCMethods<T>::initial(), this->value);
+        if (GCMethods<T>::needsPostBarrier(this->value))
+            post();
     }
 
     ~RelocatablePtr() {
         this->pre();
-        post(this->value, GCMethods<T>::initial());
+        if (GCMethods<T>::needsPostBarrier(this->value))
+            relocate();
     }
 
     DECLARE_POINTER_ASSIGN_OPS(RelocatablePtr, T);
 
     /* Make this friend so it can access pre() and post(). */
     template <class T1, class T2>
     friend inline void
     BarrieredSetPair(Zone* zone,
@@ -515,23 +526,35 @@ class RelocatablePtr : public BarrieredB
 
   protected:
     void set(const T& v) {
         this->pre();
         postBarrieredSet(v);
     }
 
     void postBarrieredSet(const T& v) {
-        T tmp = this->value;
-        this->value = v;
-        post(tmp, this->value);
+        if (GCMethods<T>::needsPostBarrier(v)) {
+            this->value = v;
+            post();
+        } else if (GCMethods<T>::needsPostBarrier(this->value)) {
+            relocate();
+            this->value = v;
+        } else {
+            this->value = v;
+        }
     }
 
-    void post(T prev, T next) {
-        InternalGCMethods<T>::postBarrier(&this->value, prev, next);
+    void post() {
+        MOZ_ASSERT(GCMethods<T>::needsPostBarrier(this->value));
+        InternalGCMethods<T>::postBarrierRelocate(&this->value);
+    }
+
+    void relocate() {
+        MOZ_ASSERT(GCMethods<T>::needsPostBarrier(this->value));
+        InternalGCMethods<T>::postBarrierRemove(&this->value);
     }
 };
 
 /*
  * This is a hack for RegExpStatics::updateFromMatch. It allows us to do two
  * barriers with only one branch to check if we're in an incremental GC.
  */
 template <class T1, class T2>
--- a/js/src/gc/Heap.h
+++ b/js/src/gc/Heap.h
@@ -280,18 +280,19 @@ class TenuredCell : public Cell
     }
     MOZ_ALWAYS_INLINE JS::shadow::Zone* shadowZoneFromAnyThread() const {
         return JS::shadow::Zone::asShadowZone(zoneFromAnyThread());
     }
 
     static MOZ_ALWAYS_INLINE void readBarrier(TenuredCell* thing);
     static MOZ_ALWAYS_INLINE void writeBarrierPre(TenuredCell* thing);
 
-    static MOZ_ALWAYS_INLINE void writeBarrierPost(void* cellp, TenuredCell* prior,
-                                                   TenuredCell* next);
+    static MOZ_ALWAYS_INLINE void writeBarrierPost(TenuredCell* thing, void* cellp);
+    static MOZ_ALWAYS_INLINE void writeBarrierPostRelocate(TenuredCell* thing, void* cellp);
+    static MOZ_ALWAYS_INLINE void writeBarrierPostRemove(TenuredCell* thing, void* cellp);
 
 #ifdef DEBUG
     inline bool isAligned() const;
 #endif
 };
 
 /*
  * The mark bitmap has one bit per each GC cell. For multi-cell GC things this
@@ -1464,19 +1465,31 @@ TenuredCell::writeBarrierPre(TenuredCell
 static MOZ_ALWAYS_INLINE void
 AssertValidToSkipBarrier(TenuredCell* thing)
 {
     MOZ_ASSERT(!IsInsideNursery(thing));
     MOZ_ASSERT_IF(thing, MapAllocToTraceKind(thing->getAllocKind()) != JS::TraceKind::Object);
 }
 
 /* static */ MOZ_ALWAYS_INLINE void
-TenuredCell::writeBarrierPost(void* cellp, TenuredCell* prior, TenuredCell* next)
+TenuredCell::writeBarrierPost(TenuredCell* thing, void* cellp)
+{
+    AssertValidToSkipBarrier(thing);
+}
+
+/* static */ MOZ_ALWAYS_INLINE void
+TenuredCell::writeBarrierPostRelocate(TenuredCell* thing, void* cellp)
 {
-    AssertValidToSkipBarrier(next);
+    AssertValidToSkipBarrier(thing);
+}
+
+/* static */ MOZ_ALWAYS_INLINE void
+TenuredCell::writeBarrierPostRemove(TenuredCell* thing, void* cellp)
+{
+    AssertValidToSkipBarrier(thing);
 }
 
 #ifdef DEBUG
 bool
 Cell::isAligned() const
 {
     if (!isTenured())
         return true;
--- a/js/src/gc/StoreBuffer.h
+++ b/js/src/gc/StoreBuffer.h
@@ -86,20 +86,16 @@ class StoreBuffer
                 owner->setAboutToOverflow();
         }
 
         /* Remove an item from the store buffer. */
         void unput(StoreBuffer* owner, const T& v) {
             stores_.remove(v);
         }
 
-        bool has(const T& v) const {
-            return stores_.has(v);
-        }
-
         /* Trace the source of all edges in the store buffer. */
         void trace(StoreBuffer* owner, TenuringTracer& mover);
 
         size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) {
             return stores_.sizeOfExcludingThis(mallocSizeOf);
         }
 
       private:
@@ -402,19 +398,16 @@ class StoreBuffer
     void putGeneric(const T& t) { putFromAnyThread(bufferGeneric, t);}
 
     /* Insert or update a callback entry. */
     template <typename Key>
     void putCallback(void (*callback)(JSTracer* trc, Key* key, void* data), Key* key, void* data) {
         putFromAnyThread(bufferGeneric, CallbackRef<Key>(callback, key, data));
     }
 
-    void assertHasCellEdge(Cell** cellp) const { MOZ_ASSERT(bufferCell.has(CellPtrEdge(cellp))); }
-    void assertHasValueEdge(Value* vp) const { MOZ_ASSERT(bufferVal.has(ValueEdge(vp))); }
-
     void setShouldCancelIonCompilations() {
         cancelIonCompilations_ = true;
     }
 
     /* Methods to trace the source of all edges in the store buffer. */
     void traceValues(TenuringTracer& mover)            { bufferVal.trace(this, mover); }
     void traceCells(TenuringTracer& mover)             { bufferCell.trace(this, mover); }
     void traceSlots(TenuringTracer& mover)             { bufferSlot.trace(this, mover); }
--- a/js/src/jsobj.h
+++ b/js/src/jsobj.h
@@ -305,17 +305,19 @@ class JSObject : public js::gc::Cell
     MOZ_ALWAYS_INLINE JS::Zone* zoneFromAnyThread() const {
         return group_->zoneFromAnyThread();
     }
     MOZ_ALWAYS_INLINE JS::shadow::Zone* shadowZoneFromAnyThread() const {
         return JS::shadow::Zone::asShadowZone(zoneFromAnyThread());
     }
     static MOZ_ALWAYS_INLINE void readBarrier(JSObject* obj);
     static MOZ_ALWAYS_INLINE void writeBarrierPre(JSObject* obj);
-    static MOZ_ALWAYS_INLINE void writeBarrierPost(void* cellp, JSObject* prev, JSObject* next);
+    static MOZ_ALWAYS_INLINE void writeBarrierPost(JSObject* obj, void* cellp);
+    static MOZ_ALWAYS_INLINE void writeBarrierPostRelocate(JSObject* obj, void* cellp);
+    static MOZ_ALWAYS_INLINE void writeBarrierPostRemove(JSObject* obj, void* cellp);
 
     /* Return the allocKind we would use if we were to tenure this object. */
     js::gc::AllocKind allocKindForTenure(const js::Nursery& nursery) const;
 
     size_t tenuredSizeOfThis() const {
         MOZ_ASSERT(isTenured());
         return js::gc::Arena::thingSize(asTenured().getAllocKind());
     }
@@ -623,36 +625,46 @@ JSObject::readBarrier(JSObject* obj)
 /* static */ MOZ_ALWAYS_INLINE void
 JSObject::writeBarrierPre(JSObject* obj)
 {
     if (!isNullLike(obj) && obj->isTenured())
         obj->asTenured().writeBarrierPre(&obj->asTenured());
 }
 
 /* static */ MOZ_ALWAYS_INLINE void
-JSObject::writeBarrierPost(void* cellp, JSObject* prev, JSObject* next)
+JSObject::writeBarrierPost(JSObject* obj, void* cellp)
+{
+    MOZ_ASSERT(cellp);
+    if (IsNullTaggedPointer(obj))
+        return;
+    MOZ_ASSERT(obj == *static_cast<JSObject**>(cellp));
+    js::gc::StoreBuffer* storeBuffer = obj->storeBuffer();
+    if (storeBuffer)
+        storeBuffer->putCellFromAnyThread(static_cast<js::gc::Cell**>(cellp));
+}
+
+/* static */ MOZ_ALWAYS_INLINE void
+JSObject::writeBarrierPostRelocate(JSObject* obj, void* cellp)
 {
     MOZ_ASSERT(cellp);
+    MOZ_ASSERT(obj);
+    MOZ_ASSERT(obj == *static_cast<JSObject**>(cellp));
+    js::gc::StoreBuffer* storeBuffer = obj->storeBuffer();
+    if (storeBuffer)
+        storeBuffer->putCellFromAnyThread(static_cast<js::gc::Cell**>(cellp));
+}
 
-    // If the target needs an entry, add it.
-    js::gc::StoreBuffer* buffer;
-    if (!IsNullTaggedPointer(next) && (buffer = next->storeBuffer())) {
-        // If we know that the prev has already inserted an entry, we can skip
-        // doing the lookup to add the new entry.
-        if (!IsNullTaggedPointer(prev) && prev->storeBuffer()) {
-            buffer->assertHasCellEdge(static_cast<js::gc::Cell**>(cellp));
-            return;
-        }
-        buffer->putCellFromAnyThread(static_cast<js::gc::Cell**>(cellp));
-        return;
-    }
-
-    // Remove the prev entry if the new value does not need it.
-    if (!IsNullTaggedPointer(prev) && (buffer = prev->storeBuffer()))
-        buffer->unputCellFromAnyThread(static_cast<js::gc::Cell**>(cellp));
+/* static */ MOZ_ALWAYS_INLINE void
+JSObject::writeBarrierPostRemove(JSObject* obj, void* cellp)
+{
+    MOZ_ASSERT(cellp);
+    MOZ_ASSERT(obj);
+    MOZ_ASSERT(obj == *static_cast<JSObject**>(cellp));
+    obj->shadowRuntimeFromAnyThread()->gcStoreBufferPtr()->unputCellFromAnyThread(
+        static_cast<js::gc::Cell**>(cellp));
 }
 
 namespace js {
 
 inline bool
 IsCallable(const Value& v)
 {
     return v.isObject() && v.toObject().isCallable();
--- a/js/src/vm/Runtime.h
+++ b/js/src/vm/Runtime.h
@@ -342,18 +342,18 @@ class NewObjectCache
         entry->kind = kind;
 
         entry->nbytes = gc::Arena::thingSize(kind);
         js_memcpy(&entry->templateObject, obj, entry->nbytes);
     }
 
     static void copyCachedToObject(NativeObject* dst, NativeObject* src, gc::AllocKind kind) {
         js_memcpy(dst, src, gc::Arena::thingSize(kind));
-        Shape::writeBarrierPost(&dst->shape_, nullptr, dst->shape_);
-        ObjectGroup::writeBarrierPost(&dst->group_, nullptr, dst->group_);
+        Shape::writeBarrierPost(dst->shape_, &dst->shape_);
+        ObjectGroup::writeBarrierPost(dst->group_, &dst->group_);
     }
 };
 
 /*
  * A FreeOp can do one thing: free memory. For convenience, it has delete_
  * convenience methods that also call destructors.
  *
  * FreeOp is passed to finalizers and other sweep-phase hooks so that we do not
--- a/js/src/vm/Stack.cpp
+++ b/js/src/vm/Stack.cpp
@@ -102,16 +102,35 @@ InterpreterFrame::initExecuteFrame(JSCon
     if (script->isDebuggee())
         setIsDebuggee();
 
 #ifdef DEBUG
     Debug_SetValueRangeToCrashOnTouch(&rval_, 1);
 #endif
 }
 
+void
+InterpreterFrame::writeBarrierPost()
+{
+    /* This needs to follow the same rules as in InterpreterFrame::mark. */
+    if (scopeChain_)
+        JSObject::writeBarrierPost(scopeChain_, &scopeChain_);
+    if (flags_ & HAS_ARGS_OBJ)
+        JSObject::writeBarrierPost(argsObj_, &argsObj_);
+    if (isFunctionFrame()) {
+        JSFunction::writeBarrierPost(exec.fun, &exec.fun);
+        if (isEvalFrame())
+            JSScript::writeBarrierPost(u.evalScript, &u.evalScript);
+    } else {
+        JSScript::writeBarrierPost(exec.script, &exec.script);
+    }
+    if (hasReturnValue())
+        HeapValue::writeBarrierPost(rval_, &rval_);
+}
+
 bool
 InterpreterFrame::copyRawFrameSlots(AutoValueVector* vec)
 {
     if (!vec->resize(numFormalArgs() + script()->nfixed()))
         return false;
     PodCopy(vec->begin(), argv(), numFormalArgs());
     PodCopy(vec->begin() + numFormalArgs(), slots(), script()->nfixed());
     return true;
--- a/js/src/vm/Stack.h
+++ b/js/src/vm/Stack.h
@@ -376,16 +376,18 @@ class InterpreterFrame
     Value*              argv_;         /* If hasArgs(), points to frame's arguments. */
     LifoAlloc::Mark     mark_;          /* Used to release memory for this frame. */
 
     static void staticAsserts() {
         JS_STATIC_ASSERT(offsetof(InterpreterFrame, rval_) % sizeof(Value) == 0);
         JS_STATIC_ASSERT(sizeof(InterpreterFrame) % sizeof(Value) == 0);
     }
 
+    void writeBarrierPost();
+
     /*
      * The utilities are private since they are not able to assert that only
      * unaliased vars/formals are accessed. Normal code should prefer the
      * InterpreterFrame::unaliased* members (or InterpreterRegs::stackDepth for
      * the usual "depth is at least" assertions).
      */
     Value* slots() const { return (Value*)(this + 1); }
     Value* base() const { return slots() + script()->nfixed(); }