Bug 1175642 - Fix the interface that RelocatablePtr uses to interact with the StoreBuffer; r=jonco
authorTerrence Cole <terrence@mozilla.com>
Thu, 18 Jun 2015 10:23:49 -0700
changeset 282715 3c61b61ea4a2541ade243726a967163d849f264a
parent 282714 a067d2f467ea0dbe4a8f826fa6a15a66c5841e7b
child 282716 85ce4ef19b930ca33d29d42e7e417c25cf571866
push idunknown
push userunknown
push dateunknown
reviewersjonco
bugs1175642, 1175466
milestone42.0a1
Bug 1175642 - Fix the interface that RelocatablePtr uses to interact with the StoreBuffer; r=jonco * * * Bug 1175466 - "Allocate arguments objects in the nursery" [r=terrence]
dom/xbl/nsXBLMaybeCompiled.h
js/public/Id.h
js/public/RootingAPI.h
js/public/Value.h
js/src/gc/Barrier.cpp
js/src/gc/Barrier.h
js/src/gc/Heap.h
js/src/gc/StoreBuffer.h
js/src/jsobj.h
js/src/vm/Runtime.h
--- a/dom/xbl/nsXBLMaybeCompiled.h
+++ b/dom/xbl/nsXBLMaybeCompiled.h
@@ -87,29 +87,29 @@ namespace js {
 
 template <class UncompiledT>
 struct GCMethods<nsXBLMaybeCompiled<UncompiledT> >
 {
   typedef struct GCMethods<JSObject *> Base;
 
   static nsXBLMaybeCompiled<UncompiledT> initial() { return nsXBLMaybeCompiled<UncompiledT>(); }
 
-  static bool needsPostBarrier(nsXBLMaybeCompiled<UncompiledT> function)
-  {
-    return function.IsCompiled() && Base::needsPostBarrier(function.GetJSFunction());
-  }
-
-  static void postBarrier(nsXBLMaybeCompiled<UncompiledT>* functionp)
+  static void postBarrier(nsXBLMaybeCompiled<UncompiledT>* functionp,
+                          nsXBLMaybeCompiled<UncompiledT> prev,
+                          nsXBLMaybeCompiled<UncompiledT> next)
   {
-    Base::postBarrier(&functionp->UnsafeGetJSFunction());
-  }
-
-  static void relocate(nsXBLMaybeCompiled<UncompiledT>* functionp)
-  {
-    Base::relocate(&functionp->UnsafeGetJSFunction());
+    if (next.IsCompiled()) {
+      Base::postBarrier(&functionp->UnsafeGetJSFunction(),
+                        prev.IsCompiled() ? prev.UnsafeGetJSFunction() : nullptr,
+                        next.UnsafeGetJSFunction());
+    } else if (prev.IsCompiled()) {
+      Base::postBarrier(&prev.UnsafeGetJSFunction(),
+                        prev.UnsafeGetJSFunction(),
+                        nullptr);
+    }
   }
 };
 
 template <class UncompiledT>
 class HeapBase<nsXBLMaybeCompiled<UncompiledT> >
 {
   const JS::Heap<nsXBLMaybeCompiled<UncompiledT> >& wrapper() const {
     return *static_cast<const JS::Heap<nsXBLMaybeCompiled<UncompiledT> >*>(this);
--- a/js/public/Id.h
+++ b/js/public/Id.h
@@ -166,19 +166,17 @@ extern JS_PUBLIC_DATA(const jsid) JSID_E
 extern JS_PUBLIC_DATA(const JS::HandleId) JSID_VOIDHANDLE;
 extern JS_PUBLIC_DATA(const JS::HandleId) JSID_EMPTYHANDLE;
 
 namespace js {
 
 template <> struct GCMethods<jsid>
 {
     static jsid initial() { return JSID_VOID; }
-    static bool needsPostBarrier(jsid id) { return false; }
-    static void postBarrier(jsid* idp) {}
-    static void relocate(jsid* idp) {}
+    static void postBarrier(jsid* idp, jsid prev, jsid next) {}
 };
 
 // If the jsid is a GC pointer type, convert to that type and call |f| with
 // the pointer. If the jsid is not a GC type, calls F::defaultValue.
 template <typename F, typename... Args>
 auto
 DispatchIdTyped(F f, jsid& id, Args&&... args)
   -> decltype(f(static_cast<JSString*>(nullptr), mozilla::Forward<Args>(args)...))
--- a/js/public/RootingAPI.h
+++ b/js/public/RootingAPI.h
@@ -169,18 +169,17 @@ struct PersistentRootedMarker;
 namespace JS {
 
 template <typename T> class Rooted;
 template <typename T> class PersistentRooted;
 
 /* This is exposing internal state of the GC for inlining purposes. */
 JS_FRIEND_API(bool) isGCEnabled();
 
-JS_FRIEND_API(void) HeapObjectPostBarrier(JSObject** objp);
-JS_FRIEND_API(void) HeapObjectRelocate(JSObject** objp);
+JS_FRIEND_API(void) HeapObjectPostBarrier(JSObject** objp, JSObject* prev, JSObject* next);
 
 #ifdef JS_DEBUG
 /*
  * For generational GC, assert that an object is in the tenured generation as
  * opposed to being in the nursery.
  */
 extern JS_FRIEND_API(void)
 AssertGCThingMustBeTenured(JSObject* obj);
@@ -227,18 +226,17 @@ class Heap : public js::HeapBase<T>
      * For Heap, move semantics are equivalent to copy semantics. In C++, a
      * copy constructor taking const-ref is the way to get a single function
      * that will be used for both lvalue and rvalue copies, so we can simply
      * omit the rvalue variant.
      */
     explicit Heap(const Heap<T>& p) { init(p.ptr); }
 
     ~Heap() {
-        if (js::GCMethods<T>::needsPostBarrier(ptr))
-            relocate();
+        post(ptr, js::GCMethods<T>::initial());
     }
 
     DECLARE_POINTER_CONSTREF_OPS(T);
     DECLARE_POINTER_ASSIGN_OPS(Heap, T);
     DECLARE_NONPOINTER_ACCESSOR_METHODS(ptr);
 
     T* unsafeGet() { return &ptr; }
 
@@ -252,39 +250,27 @@ class Heap : public js::HeapBase<T>
 
     bool isSetToCrashOnTouch() {
         return ptr == crashOnTouchPointer;
     }
 
   private:
     void init(T newPtr) {
         ptr = newPtr;
-        if (js::GCMethods<T>::needsPostBarrier(ptr))
-            post();
+        post(js::GCMethods<T>::initial(), ptr);
     }
 
     void set(T newPtr) {
-        if (js::GCMethods<T>::needsPostBarrier(newPtr)) {
-            ptr = newPtr;
-            post();
-        } else if (js::GCMethods<T>::needsPostBarrier(ptr)) {
-            relocate();  /* Called before overwriting ptr. */
-            ptr = newPtr;
-        } else {
-            ptr = newPtr;
-        }
+        T tmp = ptr;
+        ptr = newPtr;
+        post(tmp, ptr);
     }
 
-    void post() {
-        MOZ_ASSERT(js::GCMethods<T>::needsPostBarrier(ptr));
-        js::GCMethods<T>::postBarrier(&ptr);
-    }
-
-    void relocate() {
-        js::GCMethods<T>::relocate(&ptr);
+    void post(const T& prev, const T& next) {
+        js::GCMethods<T>::postBarrier(&ptr, prev, next);
     }
 
     enum {
         crashOnTouchPointer = 1
     };
 
     T ptr;
 };
@@ -599,57 +585,46 @@ struct RootKind<T*>
 {
     static ThingRootKind rootKind() { return T::rootKind(); }
 };
 
 template <typename T>
 struct GCMethods<T*>
 {
     static T* initial() { return nullptr; }
-    static bool needsPostBarrier(T* v) { return false; }
-    static void postBarrier(T** vp) {
-        if (vp)
-            JS::AssertGCThingIsNotAnObjectSubclass(reinterpret_cast<js::gc::Cell*>(vp));
+    static void postBarrier(T** vp, T* prev, T* next) {
+        if (next)
+            JS::AssertGCThingIsNotAnObjectSubclass(reinterpret_cast<js::gc::Cell*>(next));
     }
     static void relocate(T** vp) {}
 };
 
 template <>
 struct GCMethods<JSObject*>
 {
     static JSObject* initial() { return nullptr; }
     static gc::Cell* asGCThingOrNull(JSObject* v) {
         if (!v)
             return nullptr;
         MOZ_ASSERT(uintptr_t(v) > 32);
         return reinterpret_cast<gc::Cell*>(v);
     }
-    static bool needsPostBarrier(JSObject* v) {
-        return v != nullptr && gc::IsInsideNursery(reinterpret_cast<gc::Cell*>(v));
-    }
-    static void postBarrier(JSObject** vp) {
-        JS::HeapObjectPostBarrier(vp);
-    }
-    static void relocate(JSObject** vp) {
-        JS::HeapObjectRelocate(vp);
+    static void postBarrier(JSObject** vp, JSObject* prev, JSObject* next) {
+        JS::HeapObjectPostBarrier(vp, prev, next);
     }
 };
 
 template <>
 struct GCMethods<JSFunction*>
 {
     static JSFunction* initial() { return nullptr; }
-    static bool needsPostBarrier(JSFunction* v) {
-        return v != nullptr && gc::IsInsideNursery(reinterpret_cast<gc::Cell*>(v));
-    }
-    static void postBarrier(JSFunction** vp) {
-        JS::HeapObjectPostBarrier(reinterpret_cast<JSObject**>(vp));
-    }
-    static void relocate(JSFunction** vp) {
-        JS::HeapObjectRelocate(reinterpret_cast<JSObject**>(vp));
+    static void postBarrier(JSFunction** vp, JSFunction* prev, JSFunction* next) {
+        JS::HeapObjectPostBarrier(reinterpret_cast<JSObject**>(vp),
+                                  reinterpret_cast<JSObject*>(prev),
+                                  reinterpret_cast<JSObject*>(next));
     }
 };
 
 } /* namespace js */
 
 namespace JS {
 
 /*
--- a/js/public/Value.h
+++ b/js/public/Value.h
@@ -1622,38 +1622,35 @@ SameType(const Value& lhs, const Value& 
     return JSVAL_SAME_TYPE_IMPL(lhs.data, rhs.data);
 }
 
 } // namespace JS
 
 /************************************************************************/
 
 namespace JS {
-JS_PUBLIC_API(void) HeapValuePostBarrier(Value* valuep);
-JS_PUBLIC_API(void) HeapValueRelocate(Value* valuep);
+JS_PUBLIC_API(void) HeapValuePostBarrier(Value* valuep, const Value& prev, const Value& next);
 }
 
 namespace js {
 
 template <> struct GCMethods<const JS::Value>
 {
     static JS::Value initial() { return JS::UndefinedValue(); }
 };
 
 template <> struct GCMethods<JS::Value>
 {
     static JS::Value initial() { return JS::UndefinedValue(); }
     static gc::Cell* asGCThingOrNull(const JS::Value& v) {
         return v.isMarkable() ? v.toGCThing() : nullptr;
     }
-    static bool needsPostBarrier(const JS::Value& v) {
-        return v.isObject() && gc::IsInsideNursery(reinterpret_cast<gc::Cell*>(&v.toObject()));
+    static void postBarrier(JS::Value* v, const JS::Value& prev, const JS::Value& next) {
+        JS::HeapValuePostBarrier(v, prev, next);
     }
-    static void postBarrier(JS::Value* v) { JS::HeapValuePostBarrier(v); }
-    static void relocate(JS::Value* v) { JS::HeapValueRelocate(v); }
 };
 
 template <class Outer> class MutableValueOperations;
 
 /*
  * A class designed for CRTP use in implementing the non-mutating parts of the
  * Value interface in Value-like classes.  Outer must be a class inheriting
  * ValueOperations<Outer> with a visible extract() method returning the
--- a/js/src/gc/Barrier.cpp
+++ b/js/src/gc/Barrier.cpp
@@ -85,36 +85,20 @@ js::PreBarrierFunctor<S>::operator()(T* 
 }
 template void js::PreBarrierFunctor<JS::Value>::operator()<JS::Symbol>(JS::Symbol*);
 template void js::PreBarrierFunctor<JS::Value>::operator()<JSObject>(JSObject*);
 template void js::PreBarrierFunctor<JS::Value>::operator()<JSString>(JSString*);
 template void js::PreBarrierFunctor<jsid>::operator()<JS::Symbol>(JS::Symbol*);
 template void js::PreBarrierFunctor<jsid>::operator()<JSString>(JSString*);
 
 JS_PUBLIC_API(void)
-JS::HeapObjectPostBarrier(JSObject** objp)
+JS::HeapObjectPostBarrier(JSObject** objp, JSObject* prev, JSObject* next)
 {
     MOZ_ASSERT(objp);
-    MOZ_ASSERT(*objp);
-    js::InternalGCMethods<JSObject*>::postBarrierRelocate(objp);
+    js::InternalGCMethods<JSObject*>::postBarrier(objp, prev, next);
 }
 
 JS_PUBLIC_API(void)
-JS::HeapObjectRelocate(JSObject** objp)
-{
-    MOZ_ASSERT(objp);
-    MOZ_ASSERT(*objp);
-    js::InternalGCMethods<JSObject*>::postBarrierRemove(objp);
-}
-
-JS_PUBLIC_API(void)
-JS::HeapValuePostBarrier(JS::Value* valuep)
+JS::HeapValuePostBarrier(JS::Value* valuep, const Value& prev, const Value& next)
 {
     MOZ_ASSERT(valuep);
-    js::InternalGCMethods<JS::Value>::postBarrierRelocate(valuep);
+    js::InternalGCMethods<JS::Value>::postBarrier(valuep, prev, next);
 }
-
-JS_PUBLIC_API(void)
-JS::HeapValueRelocate(JS::Value* valuep)
-{
-    MOZ_ASSERT(valuep);
-    js::InternalGCMethods<JS::Value>::postBarrierRemove(valuep);
-}
--- a/js/src/gc/Barrier.h
+++ b/js/src/gc/Barrier.h
@@ -234,19 +234,17 @@ struct InternalGCMethods {};
 
 template <typename T>
 struct InternalGCMethods<T*>
 {
     static bool isMarkable(T* v) { return v != nullptr; }
 
     static void preBarrier(T* v) { T::writeBarrierPre(v); }
 
-    static void postBarrier(T** vp) { T::writeBarrierPost(*vp, vp); }
-    static void postBarrierRelocate(T** vp) { T::writeBarrierPostRelocate(*vp, vp); }
-    static void postBarrierRemove(T** vp) { T::writeBarrierPostRemove(*vp, vp); }
+    static void postBarrier(T** vp, T* prev, T* next) { T::writeBarrierPost(vp, prev, next); }
 
     static void readBarrier(T* v) { T::readBarrier(v); }
 };
 
 template <typename S> struct PreBarrierFunctor : VoidDefaultAdaptor<S> {
     template <typename T> void operator()(T* t);
 };
 
@@ -258,57 +256,49 @@ template <>
 struct InternalGCMethods<Value>
 {
     static bool isMarkable(Value v) { return v.isMarkable(); }
 
     static void preBarrier(Value v) {
         DispatchValueTyped(PreBarrierFunctor<Value>(), v);
     }
 
-    static void postBarrier(Value* vp) {
-        MOZ_ASSERT(!CurrentThreadIsIonCompiling());
-        if (vp->isObject()) {
-            gc::StoreBuffer* sb = reinterpret_cast<gc::Cell*>(&vp->toObject())->storeBuffer();
-            if (sb)
-                sb->putValueFromAnyThread(vp);
-        }
-    }
-
-    static void postBarrierRelocate(Value* vp) {
+    static void postBarrier(Value* vp, const Value& prev, const Value& next) {
         MOZ_ASSERT(!CurrentThreadIsIonCompiling());
-        if (vp->isObject()) {
-            gc::StoreBuffer* sb = reinterpret_cast<gc::Cell*>(&vp->toObject())->storeBuffer();
-            if (sb)
-                sb->putValueFromAnyThread(vp);
-        }
-    }
+        MOZ_ASSERT(vp);
 
-    static void postBarrierRemove(Value* vp) {
-        MOZ_ASSERT(vp);
-        MOZ_ASSERT(vp->isMarkable());
-        MOZ_ASSERT(!CurrentThreadIsIonCompiling());
-        JSRuntime* rt = static_cast<js::gc::Cell*>(vp->toGCThing())->runtimeFromAnyThread();
-        JS::shadow::Runtime* shadowRuntime = JS::shadow::Runtime::asShadowRuntime(rt);
-        shadowRuntime->gcStoreBufferPtr()->unputValueFromAnyThread(vp);
+        // If the target needs an entry, add it.
+        js::gc::StoreBuffer* sb;
+        if (next.isObject() && (sb = reinterpret_cast<gc::Cell*>(&next.toObject())->storeBuffer())) {
+            // If we know that the prev has already inserted an entry, we can skip
+            // doing the lookup to add the new entry.
+            if (prev.isObject() && reinterpret_cast<gc::Cell*>(&prev.toObject())->storeBuffer()) {
+                sb->assertHasValueEdge(vp);
+                return;
+            }
+            sb->putValueFromAnyThread(vp);
+            return;
+        }
+        // Remove the prev entry if the new value does not need it.
+        if (prev.isObject() && (sb = reinterpret_cast<gc::Cell*>(&prev.toObject())->storeBuffer()))
+            sb->unputValueFromAnyThread(vp);
     }
 
     static void readBarrier(const Value& v) {
         DispatchValueTyped(ReadBarrierFunctor<Value>(), v);
     }
 };
 
 template <>
 struct InternalGCMethods<jsid>
 {
     static bool isMarkable(jsid id) { return JSID_IS_STRING(id) || JSID_IS_SYMBOL(id); }
 
     static void preBarrier(jsid id) { DispatchIdTyped(PreBarrierFunctor<jsid>(), id); }
-    static void postBarrier(jsid* idp) {}
-    static void postBarrierRelocate(jsid* idp) {}
-    static void postBarrierRemove(jsid* idp) {}
+    static void postBarrier(jsid* idp, jsid prev, jsid next) {}
 };
 
 template <typename T>
 class BarrieredBaseMixins {};
 
 /*
  * Base class for barriered pointer types.
  */
@@ -336,17 +326,16 @@ class BarrieredBase : public BarrieredBa
      * Obviously this is dangerous unless you know the barrier is not needed.
      */
     T* unsafeGet() { return &value; }
     const T* unsafeGet() const { return &value; }
     void unsafeSet(T v) { value = v; }
 
     /* For users who need to manually barrier the raw types. */
     static void writeBarrierPre(const T& v) { InternalGCMethods<T>::preBarrier(v); }
-    static void writeBarrierPost(const T& v, T* vp) { InternalGCMethods<T>::postBarrier(vp); }
 
   protected:
     void pre() { InternalGCMethods<T>::preBarrier(value); }
 };
 
 template <>
 class BarrieredBaseMixins<JS::Value> : public ValueOperations<BarrieredBase<JS::Value> >
 {
@@ -404,41 +393,42 @@ class PreBarriered : public BarrieredBas
  * implemented by RelocatablePtr<T> or JS::Heap<T> at the cost of not
  * automatically handling deletion or movement.
  */
 template <class T>
 class HeapPtr : public BarrieredBase<T>
 {
   public:
     HeapPtr() : BarrieredBase<T>(GCMethods<T>::initial()) {}
-    explicit HeapPtr(T v) : BarrieredBase<T>(v) { post(); }
-    explicit HeapPtr(const HeapPtr<T>& v) : BarrieredBase<T>(v) { post(); }
+    explicit HeapPtr(T v) : BarrieredBase<T>(v) { post(GCMethods<T>::initial(), v); }
+    explicit HeapPtr(const HeapPtr<T>& v) : BarrieredBase<T>(v) { post(GCMethods<T>::initial(), v); }
 #ifdef DEBUG
     ~HeapPtr() {
         // No prebarrier necessary as this only happens when we are sweeping or
         // before the containing obect becomes part of the GC graph.
         MOZ_ASSERT(CurrentThreadIsGCSweeping() || CurrentThreadIsHandlingInitFailure());
     }
 #endif
 
     void init(T v) {
         this->value = v;
-        post();
+        post(GCMethods<T>::initial(), v);
     }
 
     DECLARE_POINTER_ASSIGN_OPS(HeapPtr, T);
 
   protected:
-    void post() { InternalGCMethods<T>::postBarrier(&this->value); }
+    void post(T prev, T next) { InternalGCMethods<T>::postBarrier(&this->value, prev, next); }
 
   private:
     void set(const T& v) {
         this->pre();
+        T tmp = this->value;
         this->value = v;
-        post();
+        post(tmp, this->value);
     }
 
     /*
      * Unlike RelocatablePtr<T>, HeapPtr<T> must be managed with GC lifetimes.
      * Specifically, the memory used by the pointer itself must be live until
      * at least the next minor GC. For that reason, move semantics are invalid
      * and are deleted here. Please note that not all containers support move
      * semantics, so this does not completely prevent invalid uses.
@@ -489,35 +479,32 @@ class ImmutableTenuredPtr
  * used in contexts where this ability is necessary.
  */
 template <class T>
 class RelocatablePtr : public BarrieredBase<T>
 {
   public:
     RelocatablePtr() : BarrieredBase<T>(GCMethods<T>::initial()) {}
     explicit RelocatablePtr(T v) : BarrieredBase<T>(v) {
-        if (GCMethods<T>::needsPostBarrier(v))
-            post();
+        post(GCMethods<T>::initial(), this->value);
     }
 
     /*
      * For RelocatablePtr, move semantics are equivalent to copy semantics. In
      * C++, a copy constructor taking const-ref is the way to get a single
      * function that will be used for both lvalue and rvalue copies, so we can
      * simply omit the rvalue variant.
      */
     RelocatablePtr(const RelocatablePtr<T>& v) : BarrieredBase<T>(v) {
-        if (GCMethods<T>::needsPostBarrier(this->value))
-            post();
+        post(GCMethods<T>::initial(), this->value);
     }
 
     ~RelocatablePtr() {
         this->pre();
-        if (GCMethods<T>::needsPostBarrier(this->value))
-            relocate();
+        post(this->value, GCMethods<T>::initial());
     }
 
     DECLARE_POINTER_ASSIGN_OPS(RelocatablePtr, T);
 
     /* Make this friend so it can access pre() and post(). */
     template <class T1, class T2>
     friend inline void
     BarrieredSetPair(Zone* zone,
@@ -526,35 +513,23 @@ class RelocatablePtr : public BarrieredB
 
   protected:
     void set(const T& v) {
         this->pre();
         postBarrieredSet(v);
     }
 
     void postBarrieredSet(const T& v) {
-        if (GCMethods<T>::needsPostBarrier(v)) {
-            this->value = v;
-            post();
-        } else if (GCMethods<T>::needsPostBarrier(this->value)) {
-            relocate();
-            this->value = v;
-        } else {
-            this->value = v;
-        }
+        T tmp = this->value;
+        this->value = v;
+        post(tmp, this->value);
     }
 
-    void post() {
-        MOZ_ASSERT(GCMethods<T>::needsPostBarrier(this->value));
-        InternalGCMethods<T>::postBarrierRelocate(&this->value);
-    }
-
-    void relocate() {
-        MOZ_ASSERT(GCMethods<T>::needsPostBarrier(this->value));
-        InternalGCMethods<T>::postBarrierRemove(&this->value);
+    void post(T prev, T next) {
+        InternalGCMethods<T>::postBarrier(&this->value, prev, next);
     }
 };
 
 /*
  * This is a hack for RegExpStatics::updateFromMatch. It allows us to do two
  * barriers with only one branch to check if we're in an incremental GC.
  */
 template <class T1, class T2>
--- a/js/src/gc/Heap.h
+++ b/js/src/gc/Heap.h
@@ -280,19 +280,18 @@ class TenuredCell : public Cell
     }
     MOZ_ALWAYS_INLINE JS::shadow::Zone* shadowZoneFromAnyThread() const {
         return JS::shadow::Zone::asShadowZone(zoneFromAnyThread());
     }
 
     static MOZ_ALWAYS_INLINE void readBarrier(TenuredCell* thing);
     static MOZ_ALWAYS_INLINE void writeBarrierPre(TenuredCell* thing);
 
-    static MOZ_ALWAYS_INLINE void writeBarrierPost(TenuredCell* thing, void* cellp);
-    static MOZ_ALWAYS_INLINE void writeBarrierPostRelocate(TenuredCell* thing, void* cellp);
-    static MOZ_ALWAYS_INLINE void writeBarrierPostRemove(TenuredCell* thing, void* cellp);
+    static MOZ_ALWAYS_INLINE void writeBarrierPost(void* cellp, TenuredCell* prior,
+                                                   TenuredCell* next);
 
 #ifdef DEBUG
     inline bool isAligned() const;
 #endif
 };
 
 /*
  * The mark bitmap has one bit per each GC cell. For multi-cell GC things this
@@ -1465,31 +1464,19 @@ TenuredCell::writeBarrierPre(TenuredCell
 static MOZ_ALWAYS_INLINE void
 AssertValidToSkipBarrier(TenuredCell* thing)
 {
     MOZ_ASSERT(!IsInsideNursery(thing));
     MOZ_ASSERT_IF(thing, MapAllocToTraceKind(thing->getAllocKind()) != JS::TraceKind::Object);
 }
 
 /* static */ MOZ_ALWAYS_INLINE void
-TenuredCell::writeBarrierPost(TenuredCell* thing, void* cellp)
-{
-    AssertValidToSkipBarrier(thing);
-}
-
-/* static */ MOZ_ALWAYS_INLINE void
-TenuredCell::writeBarrierPostRelocate(TenuredCell* thing, void* cellp)
+TenuredCell::writeBarrierPost(void* cellp, TenuredCell* prior, TenuredCell* next)
 {
-    AssertValidToSkipBarrier(thing);
-}
-
-/* static */ MOZ_ALWAYS_INLINE void
-TenuredCell::writeBarrierPostRemove(TenuredCell* thing, void* cellp)
-{
-    AssertValidToSkipBarrier(thing);
+    AssertValidToSkipBarrier(next);
 }
 
 #ifdef DEBUG
 bool
 Cell::isAligned() const
 {
     if (!isTenured())
         return true;
--- a/js/src/gc/StoreBuffer.h
+++ b/js/src/gc/StoreBuffer.h
@@ -116,16 +116,21 @@ class StoreBuffer
         }
 
         /* Remove an item from the store buffer. */
         void unput(StoreBuffer* owner, const T& v) {
             sinkStores(owner);
             stores_.remove(v);
         }
 
+        bool has(StoreBuffer* owner, const T& v) {
+            sinkStores(owner);
+            return stores_.has(v);
+        }
+
         /* Trace the source of all edges in the store buffer. */
         void trace(StoreBuffer* owner, TenuringTracer& mover);
 
         size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) {
             return stores_.sizeOfExcludingThis(mallocSizeOf);
         }
 
       private:
@@ -428,16 +433,29 @@ class StoreBuffer
     void putGeneric(const T& t) { putFromAnyThread(bufferGeneric, t);}
 
     /* Insert or update a callback entry. */
     template <typename Key>
     void putCallback(void (*callback)(JSTracer* trc, Key* key, void* data), Key* key, void* data) {
         putFromAnyThread(bufferGeneric, CallbackRef<Key>(callback, key, data));
     }
 
+    void assertHasCellEdge(Cell** cellp) {
+        CellPtrEdge cpe(cellp);
+
+        MOZ_ASSERT(bufferCell.has(this, CellPtrEdge(cellp)) ||
+                   !CellPtrEdge(cellp).maybeInRememberedSet(nursery_));
+
+    }
+
+    void assertHasValueEdge(Value* vp) {
+        MOZ_ASSERT(bufferVal.has(this, ValueEdge(vp)) ||
+                   !ValueEdge(vp).maybeInRememberedSet(nursery_));
+    }
+
     void setShouldCancelIonCompilations() {
         cancelIonCompilations_ = true;
     }
 
     /* Methods to trace the source of all edges in the store buffer. */
     void traceValues(TenuringTracer& mover)            { bufferVal.trace(this, mover); }
     void traceCells(TenuringTracer& mover)             { bufferCell.trace(this, mover); }
     void traceSlots(TenuringTracer& mover)             { bufferSlot.trace(this, mover); }
--- a/js/src/jsobj.h
+++ b/js/src/jsobj.h
@@ -305,19 +305,17 @@ class JSObject : public js::gc::Cell
     MOZ_ALWAYS_INLINE JS::Zone* zoneFromAnyThread() const {
         return group_->zoneFromAnyThread();
     }
     MOZ_ALWAYS_INLINE JS::shadow::Zone* shadowZoneFromAnyThread() const {
         return JS::shadow::Zone::asShadowZone(zoneFromAnyThread());
     }
     static MOZ_ALWAYS_INLINE void readBarrier(JSObject* obj);
     static MOZ_ALWAYS_INLINE void writeBarrierPre(JSObject* obj);
-    static MOZ_ALWAYS_INLINE void writeBarrierPost(JSObject* obj, void* cellp);
-    static MOZ_ALWAYS_INLINE void writeBarrierPostRelocate(JSObject* obj, void* cellp);
-    static MOZ_ALWAYS_INLINE void writeBarrierPostRemove(JSObject* obj, void* cellp);
+    static MOZ_ALWAYS_INLINE void writeBarrierPost(void* cellp, JSObject* prev, JSObject* next);
 
     /* Return the allocKind we would use if we were to tenure this object. */
     js::gc::AllocKind allocKindForTenure(const js::Nursery& nursery) const;
 
     size_t tenuredSizeOfThis() const {
         MOZ_ASSERT(isTenured());
         return js::gc::Arena::thingSize(asTenured().getAllocKind());
     }
@@ -625,46 +623,36 @@ JSObject::readBarrier(JSObject* obj)
 /* static */ MOZ_ALWAYS_INLINE void
 JSObject::writeBarrierPre(JSObject* obj)
 {
     if (!isNullLike(obj) && obj->isTenured())
         obj->asTenured().writeBarrierPre(&obj->asTenured());
 }
 
 /* static */ MOZ_ALWAYS_INLINE void
-JSObject::writeBarrierPost(JSObject* obj, void* cellp)
-{
-    MOZ_ASSERT(cellp);
-    if (IsNullTaggedPointer(obj))
-        return;
-    MOZ_ASSERT(obj == *static_cast<JSObject**>(cellp));
-    js::gc::StoreBuffer* storeBuffer = obj->storeBuffer();
-    if (storeBuffer)
-        storeBuffer->putCellFromAnyThread(static_cast<js::gc::Cell**>(cellp));
-}
-
-/* static */ MOZ_ALWAYS_INLINE void
-JSObject::writeBarrierPostRelocate(JSObject* obj, void* cellp)
+JSObject::writeBarrierPost(void* cellp, JSObject* prev, JSObject* next)
 {
     MOZ_ASSERT(cellp);
-    MOZ_ASSERT(obj);
-    MOZ_ASSERT(obj == *static_cast<JSObject**>(cellp));
-    js::gc::StoreBuffer* storeBuffer = obj->storeBuffer();
-    if (storeBuffer)
-        storeBuffer->putCellFromAnyThread(static_cast<js::gc::Cell**>(cellp));
-}
 
-/* static */ MOZ_ALWAYS_INLINE void
-JSObject::writeBarrierPostRemove(JSObject* obj, void* cellp)
-{
-    MOZ_ASSERT(cellp);
-    MOZ_ASSERT(obj);
-    MOZ_ASSERT(obj == *static_cast<JSObject**>(cellp));
-    obj->shadowRuntimeFromAnyThread()->gcStoreBufferPtr()->unputCellFromAnyThread(
-        static_cast<js::gc::Cell**>(cellp));
+    // If the target needs an entry, add it.
+    js::gc::StoreBuffer* buffer;
+    if (!IsNullTaggedPointer(next) && (buffer = next->storeBuffer())) {
+        // If we know that the prev has already inserted an entry, we can skip
+        // doing the lookup to add the new entry.
+        if (!IsNullTaggedPointer(prev) && prev->storeBuffer()) {
+            buffer->assertHasCellEdge(static_cast<js::gc::Cell**>(cellp));
+            return;
+        }
+        buffer->putCellFromAnyThread(static_cast<js::gc::Cell**>(cellp));
+        return;
+    }
+
+    // Remove the prev entry if the new value does not need it.
+    if (!IsNullTaggedPointer(prev) && (buffer = prev->storeBuffer()))
+        buffer->unputCellFromAnyThread(static_cast<js::gc::Cell**>(cellp));
 }
 
 namespace js {
 
 inline bool
 IsCallable(const Value& v)
 {
     return v.isObject() && v.toObject().isCallable();
--- a/js/src/vm/Runtime.h
+++ b/js/src/vm/Runtime.h
@@ -353,18 +353,18 @@ class NewObjectCache
         entry->kind = kind;
 
         entry->nbytes = gc::Arena::thingSize(kind);
         js_memcpy(&entry->templateObject, obj, entry->nbytes);
     }
 
     static void copyCachedToObject(NativeObject* dst, NativeObject* src, gc::AllocKind kind) {
         js_memcpy(dst, src, gc::Arena::thingSize(kind));
-        Shape::writeBarrierPost(dst->shape_, &dst->shape_);
-        ObjectGroup::writeBarrierPost(dst->group_, &dst->group_);
+        Shape::writeBarrierPost(&dst->shape_, nullptr, dst->shape_);
+        ObjectGroup::writeBarrierPost(&dst->group_, nullptr, dst->group_);
     }
 };
 
 /*
  * A FreeOp can do one thing: free memory. For convenience, it has delete_
  * convenience methods that also call destructors.
  *
  * FreeOp is passed to finalizers and other sweep-phase hooks so that we do not