Bug 1260785 - Make hashcode generation fallible for cell pointers that can be moved by GC r=terrence a=gchang
authorJon Coppeard <jcoppeard@mozilla.com>
Wed, 13 Jul 2016 10:20:00 +0100
changeset 342045 7431edf0d97c7d93be05bd7d6b0c91474925861d
parent 342044 b95e8981e41800c3b202ea73d096e3e3e184d70d
child 342046 517663b29d7ffc2ce91f90dc7beaac4fa35e52e5
push id1183
push userraliiev@mozilla.com
push dateMon, 05 Sep 2016 20:01:49 +0000
treeherdermozilla-release@3148731bed45 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersterrence, gchang
bugs1260785
milestone49.0a2
Bug 1260785 - Make hashcode generation fallible for cell pointers that can be moved by GC r=terrence a=gchang
js/public/HashTable.h
js/public/Id.h
js/public/RootingAPI.h
js/src/gc/Barrier.cpp
js/src/gc/Barrier.h
js/src/gc/Zone.h
js/src/jsapi-tests/testGCWeakCache.cpp
js/src/jscntxt.cpp
js/src/jscntxt.h
js/src/json.cpp
js/src/vm/StructuredClone.cpp
--- a/js/public/HashTable.h
+++ b/js/public/HashTable.h
@@ -57,16 +57,17 @@ template <class Key,
           class HashPolicy = DefaultHasher<Key>,
           class AllocPolicy = TempAllocPolicy>
 class HashMap
 {
     typedef HashMapEntry<Key, Value> TableEntry;
 
     struct MapHashPolicy : HashPolicy
     {
+        using Base = HashPolicy;
         typedef Key KeyType;
         static const Key& getKey(TableEntry& e) { return e.key(); }
         static void setKey(TableEntry& e, Key& k) { HashPolicy::rekey(e.mutableKey(), k); }
     };
 
     typedef detail::HashTable<TableEntry, MapHashPolicy, AllocPolicy> Impl;
     Impl impl;
 
@@ -311,16 +312,17 @@ class HashMap
 // - Due to the lack of exception handling, the user must call |init()|.
 template <class T,
           class HashPolicy = DefaultHasher<T>,
           class AllocPolicy = TempAllocPolicy>
 class HashSet
 {
     struct SetOps : HashPolicy
     {
+        using Base = HashPolicy;
         typedef T KeyType;
         static const KeyType& getKey(const T& t) { return t; }
         static void setKey(T& t, KeyType& k) { HashPolicy::rekey(t, k); }
     };
 
     typedef detail::HashTable<const T, SetOps, AllocPolicy> Impl;
     Impl impl;
 
@@ -670,16 +672,48 @@ struct CStringHasher
     static js::HashNumber hash(Lookup l) {
         return mozilla::HashString(l);
     }
     static bool match(const char* key, Lookup lookup) {
         return strcmp(key, lookup) == 0;
     }
 };
 
+// Fallible hashing interface.
+//
+// Most of the time generating a hash code is infallible so this class provides
+// default methods that always succeed.  Specialize this class for your own hash
+// policy to provide fallible hashing.
+//
+// This is used by MovableCellHasher to handle the fact that generating a unique
+// ID for cell pointer may fail due to OOM.
+template <typename HashPolicy>
+struct FallibleHashMethods
+{
+    // Return true if a hashcode is already available for its argument.  Once
+    // this returns true for a specific argument it must continue to do so.
+    template <typename Lookup> static bool hasHash(Lookup&& l) { return true; }
+
+    // Fallible method to ensure a hashcode exists for its argument and create
+    // one if not.  Returns false on error, e.g. out of memory.
+    template <typename Lookup> static bool ensureHash(Lookup&& l) { return true; }
+};
+
+template <typename HashPolicy, typename Lookup>
+static bool
+HasHash(Lookup&& l) {
+    return FallibleHashMethods<typename HashPolicy::Base>::hasHash(mozilla::Forward<Lookup>(l));
+}
+
+template <typename HashPolicy, typename Lookup>
+static bool
+EnsureHash(Lookup&& l) {
+    return FallibleHashMethods<typename HashPolicy::Base>::ensureHash(mozilla::Forward<Lookup>(l));
+}
+
 /*****************************************************************************/
 
 // Both HashMap and HashSet are implemented by a single HashTable that is even
 // more heavily parameterized than the other two. This leaves HashTable gnarly
 // and extremely coupled to HashMap and HashSet; thus code should not use
 // HashTable directly.
 
 template <class Key, class Value>
@@ -847,24 +881,31 @@ class HashTable : private AllocPolicy
           : entry_(&entry)
 #ifdef JS_DEBUG
           , table_(&tableArg)
           , generation(tableArg.generation())
 #endif
         {}
 
       public:
-        // Leaves Ptr uninitialized.
-        Ptr() {
+        Ptr()
+          : entry_(nullptr)
 #ifdef JS_DEBUG
-            entry_ = (Entry*)0xbad;
+          , table_(nullptr)
+          , generation(0)
 #endif
+        {}
+
+        bool isValid() const {
+            return !entry_;
         }
 
         bool found() const {
+            if (isValid())
+                return false;
 #ifdef JS_DEBUG
             MOZ_ASSERT(generation == table_->generation());
 #endif
             return entry_->isLive();
         }
 
         explicit operator bool() const {
             return found();
@@ -879,23 +920,25 @@ class HashTable : private AllocPolicy
 #ifdef JS_DEBUG
             MOZ_ASSERT(generation == table_->generation());
 #endif
             return !(*this == rhs);
         }
 
         T& operator*() const {
 #ifdef JS_DEBUG
+            MOZ_ASSERT(found());
             MOZ_ASSERT(generation == table_->generation());
 #endif
             return entry_->get();
         }
 
         T* operator->() const {
 #ifdef JS_DEBUG
+            MOZ_ASSERT(found());
             MOZ_ASSERT(generation == table_->generation());
 #endif
             return &entry_->get();
         }
     };
 
     // A Ptr that can be used to add a key after a failed lookup.
     class AddPtr : public Ptr
@@ -910,18 +953,17 @@ class HashTable : private AllocPolicy
           : Ptr(entry, tableArg)
           , keyHash(hn)
 #ifdef JS_DEBUG
           , mutationCount(tableArg.mutationCount)
 #endif
         {}
 
       public:
-        // Leaves AddPtr uninitialized.
-        AddPtr() {}
+        AddPtr() : keyHash(0) {}
     };
 
     // A collection of hash table entries. The collection is enumerated by
     // calling |front()| followed by |popFront()| as long as |!empty()|. As
     // with Ptr/AddPtr, Range objects must not be used after any mutating hash
     // table operation unless the |generation()| is tested.
     class Range
     {
@@ -1677,43 +1719,53 @@ class HashTable : private AllocPolicy
     size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const
     {
         return mallocSizeOf(this) + sizeOfExcludingThis(mallocSizeOf);
     }
 
     Ptr lookup(const Lookup& l) const
     {
         mozilla::ReentrancyGuard g(*this);
+        if (!HasHash<HashPolicy>(l))
+            return Ptr();
         HashNumber keyHash = prepareHash(l);
         return Ptr(lookup(l, keyHash, 0), *this);
     }
 
     Ptr readonlyThreadsafeLookup(const Lookup& l) const
     {
+        if (!HasHash<HashPolicy>(l))
+            return Ptr();
         HashNumber keyHash = prepareHash(l);
         return Ptr(lookup(l, keyHash, 0), *this);
     }
 
     AddPtr lookupForAdd(const Lookup& l) const
     {
         mozilla::ReentrancyGuard g(*this);
+        if (!EnsureHash<HashPolicy>(l))
+            return AddPtr();
         HashNumber keyHash = prepareHash(l);
         Entry& entry = lookup(l, keyHash, sCollisionBit);
         AddPtr p(entry, *this, keyHash);
         return p;
     }
 
     template <typename... Args>
     MOZ_MUST_USE bool add(AddPtr& p, Args&&... args)
     {
         mozilla::ReentrancyGuard g(*this);
         MOZ_ASSERT(table);
         MOZ_ASSERT(!p.found());
         MOZ_ASSERT(!(p.keyHash & sCollisionBit));
 
+        // Check for error from ensureHash() here.
+        if (p.isValid())
+            return false;
+
         // Changing an entry from removed to live does not affect whether we
         // are overloaded and can be handled separately.
         if (p.entry_->isRemoved()) {
             if (!this->checkSimulatedOOM())
                 return false;
             METER(stats.addOverRemoved++);
             removedCount--;
             p.keyHash |= sCollisionBit;
@@ -1751,28 +1803,35 @@ class HashTable : private AllocPolicy
     // Note: |l| may be alias arguments in |args|, so this function must take
     // care not to use |l| after moving |args|.
     template <typename... Args>
     MOZ_MUST_USE bool putNew(const Lookup& l, Args&&... args)
     {
         if (!this->checkSimulatedOOM())
             return false;
 
+        if (!EnsureHash<HashPolicy>(l))
+            return false;
+
         if (checkOverloaded() == RehashFailed)
             return false;
 
         putNewInfallible(l, mozilla::Forward<Args>(args)...);
         return true;
     }
 
     // Note: |l| may be a reference to a piece of |u|, so this function
     // must take care not to use |l| after moving |u|.
     template <typename... Args>
     MOZ_MUST_USE bool relookupOrAdd(AddPtr& p, const Lookup& l, Args&&... args)
     {
+        // Check for error from ensureHash() here.
+        if (p.isValid())
+            return false;
+
 #ifdef JS_DEBUG
         p.generation = generation();
         p.mutationCount = mutationCount;
 #endif
         {
             mozilla::ReentrancyGuard g(*this);
             MOZ_ASSERT(prepareHash(l) == p.keyHash); // l has not been destroyed
             p.entry_ = &lookup(l, p.keyHash, sCollisionBit);
--- a/js/public/Id.h
+++ b/js/public/Id.h
@@ -22,20 +22,16 @@
 
 #include "jstypes.h"
 
 #include "js/HeapAPI.h"
 #include "js/RootingAPI.h"
 #include "js/TypeDecls.h"
 #include "js/Utility.h"
 
-namespace js {
-template <typename T> struct DefaultHasher;
-} // namespace js
-
 struct jsid
 {
     size_t asBits;
     bool operator==(const jsid& rhs) const { return asBits == rhs.asBits; }
     bool operator!=(const jsid& rhs) const { return asBits != rhs.asBits; }
 } JS_HAZ_GC_POINTER;
 #define JSID_BITS(id) (id.asBits)
 
--- a/js/public/RootingAPI.h
+++ b/js/public/RootingAPI.h
@@ -559,32 +559,47 @@ struct BarrierMethods<JSFunction*>
 // aggregate Lookup kinds embed a JSObject* that is frequently null and do not
 // null test before dispatching to the hasher.
 template <typename T>
 struct JS_PUBLIC_API(MovableCellHasher)
 {
     using Key = T;
     using Lookup = T;
 
+    static bool hasHash(const Lookup& l);
+    static bool ensureHash(const Lookup& l);
     static HashNumber hash(const Lookup& l);
     static bool match(const Key& k, const Lookup& l);
     static void rekey(Key& k, const Key& newKey) { k = newKey; }
 };
 
 template <typename T>
 struct JS_PUBLIC_API(MovableCellHasher<JS::Heap<T>>)
 {
     using Key = JS::Heap<T>;
     using Lookup = T;
 
+    static bool hasHash(const Lookup& l) { return MovableCellHasher<T>::hasHash(l); }
+    static bool ensureHash(const Lookup& l) { return MovableCellHasher<T>::ensureHash(l); }
     static HashNumber hash(const Lookup& l) { return MovableCellHasher<T>::hash(l); }
     static bool match(const Key& k, const Lookup& l) { return MovableCellHasher<T>::match(k, l); }
     static void rekey(Key& k, const Key& newKey) { k.unsafeSet(newKey); }
 };
 
+template <typename T>
+struct FallibleHashMethods<MovableCellHasher<T>>
+{
+    template <typename Lookup> static bool hasHash(Lookup&& l) {
+        return MovableCellHasher<T>::hasHash(mozilla::Forward<Lookup>(l));
+    }
+    template <typename Lookup> static bool ensureHash(Lookup&& l) {
+        return MovableCellHasher<T>::ensureHash(mozilla::Forward<Lookup>(l));
+    }
+};
+
 } /* namespace js */
 
 namespace js {
 
 template <typename T>
 class DispatchWrapper
 {
     static_assert(JS::MapTypeToRootKind<T>::kind == JS::RootKind::Traceable,
--- a/js/src/gc/Barrier.cpp
+++ b/js/src/gc/Barrier.cpp
@@ -102,16 +102,37 @@ PreBarrierFunctor<S>::operator()(T* t)
 template void PreBarrierFunctor<JS::Value>::operator()<type>(type*);
 JS_FOR_EACH_TRACEKIND(JS_EXPAND_DEF);
 #undef JS_EXPAND_DEF
 
 template void PreBarrierFunctor<jsid>::operator()<JS::Symbol>(JS::Symbol*);
 template void PreBarrierFunctor<jsid>::operator()<JSString>(JSString*);
 
 template <typename T>
+/* static */ bool
+MovableCellHasher<T>::hasHash(const Lookup& l)
+{
+    if (!l)
+        return true;
+
+    return l->zoneFromAnyThread()->hasUniqueId(l);
+}
+
+template <typename T>
+/* static */ bool
+MovableCellHasher<T>::ensureHash(const Lookup& l)
+{
+    if (!l)
+        return true;
+
+    uint64_t unusedId;
+    return l->zoneFromAnyThread()->getUniqueId(l, &unusedId);
+}
+
+template <typename T>
 /* static */ HashNumber
 MovableCellHasher<T>::hash(const Lookup& l)
 {
     if (!l)
         return 0;
 
     // We have to access the zone from-any-thread here: a worker thread may be
     // cloning a self-hosted object from the main-thread-runtime-owned self-
--- a/js/src/gc/Barrier.h
+++ b/js/src/gc/Barrier.h
@@ -796,38 +796,44 @@ class ImmutableTenuredPtr
 };
 
 template <typename T>
 struct MovableCellHasher<PreBarriered<T>>
 {
     using Key = PreBarriered<T>;
     using Lookup = T;
 
+    static bool hasHash(const Lookup& l) { return MovableCellHasher<T>::hasHash(l); }
+    static bool ensureHash(const Lookup& l) { return MovableCellHasher<T>::ensureHash(l); }
     static HashNumber hash(const Lookup& l) { return MovableCellHasher<T>::hash(l); }
     static bool match(const Key& k, const Lookup& l) { return MovableCellHasher<T>::match(k, l); }
     static void rekey(Key& k, const Key& newKey) { k.unsafeSet(newKey); }
 };
 
 template <typename T>
 struct MovableCellHasher<HeapPtr<T>>
 {
     using Key = HeapPtr<T>;
     using Lookup = T;
 
+    static bool hasHash(const Lookup& l) { return MovableCellHasher<T>::hasHash(l); }
+    static bool ensureHash(const Lookup& l) { return MovableCellHasher<T>::ensureHash(l); }
     static HashNumber hash(const Lookup& l) { return MovableCellHasher<T>::hash(l); }
     static bool match(const Key& k, const Lookup& l) { return MovableCellHasher<T>::match(k, l); }
     static void rekey(Key& k, const Key& newKey) { k.unsafeSet(newKey); }
 };
 
 template <typename T>
 struct MovableCellHasher<ReadBarriered<T>>
 {
     using Key = ReadBarriered<T>;
     using Lookup = T;
 
+    static bool hasHash(const Lookup& l) { return MovableCellHasher<T>::hasHash(l); }
+    static bool ensureHash(const Lookup& l) { return MovableCellHasher<T>::ensureHash(l); }
     static HashNumber hash(const Lookup& l) { return MovableCellHasher<T>::hash(l); }
     static bool match(const Key& k, const Lookup& l) {
         return MovableCellHasher<T>::match(k.unbarrieredGet(), l);
     }
     static void rekey(Key& k, const Key& newKey) { k.unsafeSet(newKey); }
 };
 
 /* Useful for hashtables with a GCPtr as key. */
--- a/js/src/gc/Zone.h
+++ b/js/src/gc/Zone.h
@@ -423,17 +423,22 @@ struct Zone : public JS::shadow::Zone,
         // Set a new uid on the cell.
         *uidp = js::gc::NextCellUniqueId(runtimeFromAnyThread());
         if (!uniqueIds_.add(p, cell, *uidp))
             return false;
 
         // If the cell was in the nursery, hopefully unlikely, then we need to
         // tell the nursery about it so that it can sweep the uid if the thing
         // does not get tenured.
-        return runtimeFromAnyThread()->gc.nursery.addedUniqueIdToCell(cell);
+        if (!runtimeFromAnyThread()->gc.nursery.addedUniqueIdToCell(cell)) {
+            uniqueIds_.remove(cell);
+            return false;
+        }
+
+        return true;
     }
 
     js::HashNumber getHashCodeInfallible(js::gc::Cell* cell) {
         return UniqueIdToHash(getUniqueIdInfallible(cell));
     }
 
     uint64_t getUniqueIdInfallible(js::gc::Cell* cell) {
         uint64_t uid;
--- a/js/src/jsapi-tests/testGCWeakCache.cpp
+++ b/js/src/jsapi-tests/testGCWeakCache.cpp
@@ -19,19 +19,21 @@ BEGIN_TEST(testWeakCacheSet)
     // this may fail and we'll get more tenured objects. That's fine:
     // the test will continue to work, it will just not test as much.
     JS::RootedObject tenured1(cx, JS_NewPlainObject(cx));
     JS::RootedObject tenured2(cx, JS_NewPlainObject(cx));
     JS_GC(rt);
     JS::RootedObject nursery1(cx, JS_NewPlainObject(cx));
     JS::RootedObject nursery2(cx, JS_NewPlainObject(cx));
 
-    using ObjectSet = js::GCHashSet<JS::Heap<JSObject*>, js::MovableCellHasher<JS::Heap<JSObject*>>>;
+    using ObjectSet = js::GCHashSet<JS::Heap<JSObject*>,
+                                    js::MovableCellHasher<JS::Heap<JSObject*>>,
+                                    js::SystemAllocPolicy>;
     using Cache = JS::WeakCache<ObjectSet>;
-    auto cache = Cache(JS::GetObjectZone(tenured1), ObjectSet(cx));
+    auto cache = Cache(JS::GetObjectZone(tenured1), ObjectSet());
     CHECK(cache.init());
 
     cache.put(tenured1);
     cache.put(tenured2);
     cache.put(nursery1);
     cache.put(nursery2);
 
     // Verify relocation and that we don't sweep too aggressively.
--- a/js/src/jscntxt.cpp
+++ b/js/src/jscntxt.cpp
@@ -57,18 +57,20 @@ using mozilla::PodArrayZero;
 using mozilla::PointerRangeSize;
 
 bool
 js::AutoCycleDetector::init()
 {
     AutoCycleDetector::Set& set = cx->cycleDetectorSet;
     hashsetAddPointer = set.lookupForAdd(obj);
     if (!hashsetAddPointer) {
-        if (!set.add(hashsetAddPointer, obj))
+        if (!set.add(hashsetAddPointer, obj)) {
+            ReportOutOfMemory(cx);
             return false;
+        }
         cyclic = false;
         hashsetGenerationAtInit = set.generation();
     }
     return true;
 }
 
 js::AutoCycleDetector::~AutoCycleDetector()
 {
@@ -963,17 +965,16 @@ JSContext::JSContext(JSRuntime* rt)
     unwrappedException_(this),
     options_(),
     overRecursed_(false),
     propagatingForcedReturn_(false),
     liveVolatileJitFrameIterators_(nullptr),
     reportGranularity(JS_DEFAULT_JITREPORT_GRANULARITY),
     resolvingList(nullptr),
     generatingError(false),
-    cycleDetectorSet(this),
     data(nullptr),
     data2(nullptr),
     outstandingRequests(0),
     jitIsBroken(false)
 {
     MOZ_ASSERT(static_cast<ContextFriendFields*>(this) ==
                ContextFriendFields::get(this));
 }
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -30,17 +30,17 @@ class DebugModeOSRVolatileJitFrameIterat
 } // namespace jit
 
 typedef HashSet<Shape*> ShapeSet;
 
 /* Detects cycles when traversing an object graph. */
 class MOZ_RAII AutoCycleDetector
 {
   public:
-    using Set = HashSet<JSObject*, MovableCellHasher<JSObject*>>;
+    using Set = HashSet<JSObject*, MovableCellHasher<JSObject*>, SystemAllocPolicy>;
 
     AutoCycleDetector(JSContext* cx, HandleObject objArg
                       MOZ_GUARD_OBJECT_NOTIFIER_PARAM)
       : cx(cx), obj(cx, objArg), cyclic(true)
     {
         MOZ_GUARD_OBJECT_NOTIFIER_INIT;
     }
 
--- a/js/src/json.cpp
+++ b/js/src/json.cpp
@@ -123,42 +123,44 @@ Quote(JSContext* cx, StringBuffer& sb, J
 
     return linear->hasLatin1Chars()
            ? Quote<Latin1Char>(sb, linear)
            : Quote<char16_t>(sb, linear);
 }
 
 namespace {
 
+using ObjectSet = GCHashSet<JSObject*, MovableCellHasher<JSObject*>, SystemAllocPolicy>;
+
 class StringifyContext
 {
   public:
     StringifyContext(JSContext* cx, StringBuffer& sb, const StringBuffer& gap,
                      HandleObject replacer, const AutoIdVector& propertyList,
                      bool maybeSafely)
       : sb(sb),
         gap(gap),
         replacer(cx, replacer),
-        stack(cx, GCHashSet<JSObject*, MovableCellHasher<JSObject*>>(cx)),
+        stack(cx),
         propertyList(propertyList),
         depth(0),
         maybeSafely(maybeSafely)
     {
         MOZ_ASSERT_IF(maybeSafely, !replacer);
         MOZ_ASSERT_IF(maybeSafely, gap.empty());
     }
 
     bool init() {
         return stack.init(8);
     }
 
     StringBuffer& sb;
     const StringBuffer& gap;
     RootedObject replacer;
-    Rooted<GCHashSet<JSObject*, MovableCellHasher<JSObject*>>> stack;
+    Rooted<ObjectSet> stack;
     const AutoIdVector& propertyList;
     uint32_t depth;
     bool maybeSafely;
 };
 
 } /* anonymous namespace */
 
 static bool Str(JSContext* cx, const Value& v, StringifyContext* scx);
@@ -306,25 +308,29 @@ class CycleDetector
 
     bool foundCycle(JSContext* cx) {
         auto addPtr = stack.lookupForAdd(obj_);
         if (addPtr) {
             JS_ReportErrorNumber(cx, GetErrorMessage, nullptr, JSMSG_JSON_CYCLIC_VALUE,
                                  js_object_str);
             return false;
         }
-        return stack.add(addPtr, obj_);
+        if (!stack.add(addPtr, obj_)) {
+            ReportOutOfMemory(cx);
+            return false;
+        }
+        return true;
     }
 
     ~CycleDetector() {
         stack.remove(obj_);
     }
 
   private:
-    MutableHandle<GCHashSet<JSObject*, MovableCellHasher<JSObject*>>> stack;
+    MutableHandle<ObjectSet> stack;
     HandleObject obj_;
 };
 
 /* ES5 15.12.3 JO. */
 static bool
 JO(JSContext* cx, HandleObject obj, StringifyContext* scx)
 {
     /*
--- a/js/src/vm/StructuredClone.cpp
+++ b/js/src/vm/StructuredClone.cpp
@@ -272,24 +272,30 @@ struct JSStructuredCloneReader {
 struct JSStructuredCloneWriter {
   public:
     explicit JSStructuredCloneWriter(JSContext* cx,
                                      const JSStructuredCloneCallbacks* cb,
                                      void* cbClosure,
                                      Value tVal)
         : out(cx), objs(out.context()),
           counts(out.context()), entries(out.context()),
-          memory(out.context(), CloneMemory(out.context())), callbacks(cb),
+          memory(out.context()), callbacks(cb),
           closure(cbClosure), transferable(out.context(), tVal),
           transferableObjects(out.context(), GCHashSet<JSObject*>(cx))
     {}
 
     ~JSStructuredCloneWriter();
 
-    bool init() { return memory.init() && parseTransferable() && writeTransferMap(); }
+    bool init() {
+        if (!memory.init()) {
+            ReportOutOfMemory(context());
+            return false;
+        }
+        return parseTransferable() && writeTransferMap();
+    }
 
     bool write(HandleValue v);
 
     SCOutput& output() { return out; }
 
     bool extractBuffer(uint64_t** datap, size_t* sizep) {
         return out.extractBuffer(datap, sizep);
     }
@@ -334,20 +340,23 @@ struct JSStructuredCloneWriter {
     Vector<size_t> counts;
 
     // For JSObject: Property IDs as value
     // For Map: Key followed by value
     // For Set: Key
     // For SavedFrame: parent SavedFrame
     AutoValueVector entries;
 
-    // The "memory" list described in the HTML5 internal structured cloning algorithm.
-    // memory is a superset of objs; items are never removed from Memory
-    // until a serialization operation is finished
-    using CloneMemory = GCHashMap<JSObject*, uint32_t, MovableCellHasher<JSObject*>>;
+    // The "memory" list described in the HTML5 internal structured cloning
+    // algorithm.  memory is a superset of objs; items are never removed from
+    // Memory until a serialization operation is finished
+    using CloneMemory = GCHashMap<JSObject*,
+                                  uint32_t,
+                                  MovableCellHasher<JSObject*>,
+                                  SystemAllocPolicy>;
     Rooted<CloneMemory> memory;
 
     // The user defined callbacks that will be used for cloning.
     const JSStructuredCloneCallbacks* callbacks;
 
     // Any value passed to JS_WriteStructuredClone.
     void* closure;
 
@@ -961,18 +970,20 @@ JSStructuredCloneWriter::writeSharedArra
 
 bool
 JSStructuredCloneWriter::startObject(HandleObject obj, bool* backref)
 {
     /* Handle cycles in the object graph. */
     CloneMemory::AddPtr p = memory.lookupForAdd(obj);
     if ((*backref = p.found()))
         return out.writePair(SCTAG_BACK_REFERENCE_OBJECT, p->value());
-    if (!memory.add(p, obj, memory.count()))
+    if (!memory.add(p, obj, memory.count())) {
+        ReportOutOfMemory(context());
         return false;
+    }
 
     if (memory.count() == UINT32_MAX) {
         JS_ReportErrorNumber(context(), GetErrorMessage, nullptr,
                              JSMSG_NEED_DIET, "object graph to serialize");
         return false;
     }
 
     return true;
@@ -1261,18 +1272,20 @@ JSStructuredCloneWriter::writeTransferMa
 
     if (!out.write(transferableObjects.count()))
         return false;
 
     RootedObject obj(context());
     for (auto tr = transferableObjects.all(); !tr.empty(); tr.popFront()) {
         obj = tr.front();
 
-        if (!memory.put(obj, memory.count()))
+        if (!memory.put(obj, memory.count())) {
+            ReportOutOfMemory(context());
             return false;
+        }
 
         // Emit a placeholder pointer.  We defer stealing the data until later
         // (and, if necessary, detaching this object if it's an ArrayBuffer).
         if (!out.writePair(SCTAG_TRANSFER_MAP_PENDING_ENTRY, JS::SCTAG_TMO_UNFILLED))
             return false;
         if (!out.writePtr(nullptr)) // Pointer to ArrayBuffer contents or to SharedArrayRawBuffer.
             return false;
         if (!out.write(0)) // extraData