Bug 650161 - Fix test failures with compacting GC enabled r=terrence
☠☠ backed out by da9a761dc078 ☠ ☠
authorJon Coppeard <jcoppeard@mozilla.com>
Thu, 14 Aug 2014 11:52:33 +0100
changeset 199452 8d1e96d1eb31a7d1dfd107764ac32e75f913a52a
parent 199451 bdcd6002052392cc8a73a8ef3d2c2de05c89eef7
child 199453 55126a7a74479bfcf90d50cbb44503c00f6bbd8f
push id47653
push userjcoppeard@mozilla.com
push dateThu, 14 Aug 2014 11:02:51 +0000
treeherdermozilla-inbound@8d1e96d1eb31 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersterrence
bugs650161
milestone34.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 650161 - Fix test failures with compacting GC enabled r=terrence
js/src/gc/Barrier.h
js/src/gc/GCRuntime.h
js/src/gc/Marking.cpp
js/src/gc/Marking.h
js/src/gc/Tracer.cpp
js/src/gc/Tracer.h
js/src/jscompartment.cpp
js/src/jscompartment.h
js/src/jsgc.cpp
js/src/jsgc.h
js/src/jshashutil.h
js/src/jsweakmap.cpp
js/src/jsweakmap.h
js/src/shell/jsheaptools.cpp
js/src/vm/ArrayBufferObject.cpp
js/src/vm/ArrayBufferObject.h
js/src/vm/Debugger.cpp
js/src/vm/SavedStacks.cpp
js/src/vm/ScopeObject.cpp
js/src/vm/ScopeObject.h
js/src/vm/Shape.cpp
js/src/vm/TypedArrayObject.h
--- a/js/src/gc/Barrier.h
+++ b/js/src/gc/Barrier.h
@@ -164,16 +164,17 @@ namespace js {
 class ArgumentsObject;
 class ArrayBufferObject;
 class ArrayBufferViewObject;
 class SharedArrayBufferObject;
 class BaseShape;
 class DebugScopeObject;
 class GlobalObject;
 class LazyScript;
+class NestedScopeObject;
 class Nursery;
 class ObjectImpl;
 class PropertyName;
 class SavedFrame;
 class ScopeObject;
 class ScriptSourceObject;
 class Shape;
 class UnownedBaseShape;
@@ -212,16 +213,17 @@ template <> struct MapTypeToTraceKind<JS
 template <> struct MapTypeToTraceKind<JSAtom>           { static const JSGCTraceKind kind = JSTRACE_STRING; };
 template <> struct MapTypeToTraceKind<JSFlatString>     { static const JSGCTraceKind kind = JSTRACE_STRING; };
 template <> struct MapTypeToTraceKind<JSFunction>       { static const JSGCTraceKind kind = JSTRACE_OBJECT; };
 template <> struct MapTypeToTraceKind<JSLinearString>   { static const JSGCTraceKind kind = JSTRACE_STRING; };
 template <> struct MapTypeToTraceKind<JSObject>         { static const JSGCTraceKind kind = JSTRACE_OBJECT; };
 template <> struct MapTypeToTraceKind<JSScript>         { static const JSGCTraceKind kind = JSTRACE_SCRIPT; };
 template <> struct MapTypeToTraceKind<JSString>         { static const JSGCTraceKind kind = JSTRACE_STRING; };
 template <> struct MapTypeToTraceKind<LazyScript>       { static const JSGCTraceKind kind = JSTRACE_LAZY_SCRIPT; };
+template <> struct MapTypeToTraceKind<NestedScopeObject>{ static const JSGCTraceKind kind = JSTRACE_OBJECT; };
 template <> struct MapTypeToTraceKind<ObjectImpl>       { static const JSGCTraceKind kind = JSTRACE_OBJECT; };
 template <> struct MapTypeToTraceKind<PropertyName>     { static const JSGCTraceKind kind = JSTRACE_STRING; };
 template <> struct MapTypeToTraceKind<SavedFrame>       { static const JSGCTraceKind kind = JSTRACE_OBJECT; };
 template <> struct MapTypeToTraceKind<ScopeObject>      { static const JSGCTraceKind kind = JSTRACE_OBJECT; };
 template <> struct MapTypeToTraceKind<Shape>            { static const JSGCTraceKind kind = JSTRACE_SHAPE; };
 template <> struct MapTypeToTraceKind<SharedArrayBufferObject>{ static const JSGCTraceKind kind = JSTRACE_OBJECT; };
 template <> struct MapTypeToTraceKind<UnownedBaseShape> { static const JSGCTraceKind kind = JSTRACE_BASE_SHAPE; };
 template <> struct MapTypeToTraceKind<jit::JitCode>     { static const JSGCTraceKind kind = JSTRACE_JITCODE; };
--- a/js/src/gc/GCRuntime.h
+++ b/js/src/gc/GCRuntime.h
@@ -391,16 +391,21 @@ class GCRuntime
 
     bool isIncrementalGCEnabled() { return mode == JSGC_MODE_INCREMENTAL && incrementalAllowed; }
     bool isIncrementalGCInProgress() { return state() != gc::NO_INCREMENTAL && !verifyPreData; }
 
     bool isGenerationalGCEnabled() { return generationalDisabled == 0; }
     void disableGenerationalGC();
     void enableGenerationalGC();
 
+#ifdef JSGC_COMPACTING
+    void disableCompactingGC();
+    void enableCompactingGC();
+#endif
+
     void setGrayRootsTracer(JSTraceDataOp traceOp, void *data);
     bool addBlackRootsTracer(JSTraceDataOp traceOp, void *data);
     void removeBlackRootsTracer(JSTraceDataOp traceOp, void *data);
 
     void setMaxMallocBytes(size_t value);
     void resetMallocBytes();
     bool isTooMuchMalloc() const { return mallocBytes <= 0; }
     void updateMallocCounter(JS::Zone *zone, size_t nbytes);
@@ -708,16 +713,25 @@ class GCRuntime
      */
     bool                  incrementalAllowed;
 
     /*
      * GGC can be enabled from the command line while testing.
      */
     unsigned              generationalDisabled;
 
+#ifdef JSGC_COMPACTING
+    /*
+     * Some code cannot tolerate compacting GC so it can be disabled with this
+     * counter.  This can happen from code executing in a ThreadSafeContext so
+     * we make it atomic.
+     */
+    mozilla::Atomic<uint32_t, mozilla::ReleaseAcquire> compactingDisabled;
+#endif
+
     /*
      * This is true if we are in the middle of a brain transplant (e.g.,
      * JS_TransplantObject) or some other operation that can manipulate
      * dead zones.
      */
     bool                  manipulatingDeadZones;
 
     /*
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -624,16 +624,17 @@ DeclMarkerImpl(JitCode, jit::JitCode)
 DeclMarkerImpl(Object, ArgumentsObject)
 DeclMarkerImpl(Object, ArrayBufferObject)
 DeclMarkerImpl(Object, ArrayBufferViewObject)
 DeclMarkerImpl(Object, SharedArrayBufferObject)
 DeclMarkerImpl(Object, DebugScopeObject)
 DeclMarkerImpl(Object, GlobalObject)
 DeclMarkerImpl(Object, JSObject)
 DeclMarkerImpl(Object, JSFunction)
+DeclMarkerImpl(Object, NestedScopeObject)
 DeclMarkerImpl(Object, ObjectImpl)
 DeclMarkerImpl(Object, SavedFrame)
 DeclMarkerImpl(Object, ScopeObject)
 DeclMarkerImpl(Script, JSScript)
 DeclMarkerImpl(LazyScript, LazyScript)
 DeclMarkerImpl(Shape, Shape)
 DeclMarkerImpl(String, JSAtom)
 DeclMarkerImpl(String, JSString)
--- a/js/src/gc/Marking.h
+++ b/js/src/gc/Marking.h
@@ -18,16 +18,17 @@ class ArgumentsObject;
 class ArrayBufferObject;
 class ArrayBufferViewObject;
 class SharedArrayBufferObject;
 class BaseShape;
 class DebugScopeObject;
 class GCMarker;
 class GlobalObject;
 class LazyScript;
+class NestedScopeObject;
 class SavedFrame;
 class ScopeObject;
 class Shape;
 class UnownedBaseShape;
 
 template<class> class HeapPtr;
 
 namespace jit {
@@ -107,16 +108,17 @@ DeclMarker(JitCode, jit::JitCode)
 DeclMarker(Object, ArgumentsObject)
 DeclMarker(Object, ArrayBufferObject)
 DeclMarker(Object, ArrayBufferViewObject)
 DeclMarker(Object, SharedArrayBufferObject)
 DeclMarker(Object, DebugScopeObject)
 DeclMarker(Object, GlobalObject)
 DeclMarker(Object, JSObject)
 DeclMarker(Object, JSFunction)
+DeclMarker(Object, NestedScopeObject)
 DeclMarker(Object, SavedFrame)
 DeclMarker(Object, ScopeObject)
 DeclMarker(Script, JSScript)
 DeclMarker(LazyScript, LazyScript)
 DeclMarker(Shape, Shape)
 DeclMarker(String, JSAtom)
 DeclMarker(String, JSString)
 DeclMarker(String, JSFlatString)
--- a/js/src/gc/Tracer.cpp
+++ b/js/src/gc/Tracer.cpp
@@ -632,38 +632,36 @@ GCMarker::markBufferedGrayRoots(JS::Zone
 {
     JS_ASSERT(grayBufferState == GRAY_BUFFER_OK);
     JS_ASSERT(zone->isGCMarkingGray() || zone->isGCCompacting());
 
     for (GrayRoot *elem = zone->gcGrayRoots.begin(); elem != zone->gcGrayRoots.end(); elem++) {
 #ifdef DEBUG
         setTracingDetails(elem->debugPrinter, elem->debugPrintArg, elem->debugPrintIndex);
 #endif
-        void *tmp = elem->thing;
-        setTracingLocation((void *)&elem->thing);
-        MarkKind(this, &tmp, elem->kind);
-        JS_ASSERT(tmp == elem->thing);
+        MarkKind(this, elem->thingp, elem->kind);
     }
 }
 
 void
-GCMarker::appendGrayRoot(void *thing, JSGCTraceKind kind)
+GCMarker::appendGrayRoot(void **thingp, JSGCTraceKind kind)
 {
     JS_ASSERT(started);
 
     if (grayBufferState == GRAY_BUFFER_FAILED)
         return;
 
-    GrayRoot root(thing, kind);
+    GrayRoot root(thingp, kind);
 #ifdef DEBUG
     root.debugPrinter = debugPrinter();
     root.debugPrintArg = debugPrintArg();
     root.debugPrintIndex = debugPrintIndex();
 #endif
 
+    void *thing = *thingp;
     Zone *zone = static_cast<Cell *>(thing)->tenuredZone();
     if (zone->isCollecting()) {
         // See the comment on SetMaybeAliveFlag to see why we only do this for
         // objects and scripts. We rely on gray root buffering for this to work,
         // but we only need to worry about uncollected dead compartments during
         // incremental GCs (when we do gray root buffering).
         switch (kind) {
           case JSTRACE_OBJECT:
@@ -683,17 +681,17 @@ GCMarker::appendGrayRoot(void *thing, JS
 }
 
 void
 GCMarker::GrayCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind)
 {
     JS_ASSERT(thingp);
     JS_ASSERT(*thingp);
     GCMarker *gcmarker = static_cast<GCMarker *>(trc);
-    gcmarker->appendGrayRoot(*thingp, kind);
+    gcmarker->appendGrayRoot(thingp, kind);
 }
 
 size_t
 GCMarker::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const
 {
     size_t size = stack.sizeOfExcludingThis(mallocSizeOf);
     for (ZonesIter zone(runtime(), WithAtoms); !zone.done(); zone.next())
         size += zone->gcGrayRoots.sizeOfExcludingThis(mallocSizeOf);
--- a/js/src/gc/Tracer.h
+++ b/js/src/gc/Tracer.h
@@ -270,17 +270,17 @@ class GCMarker : public JSTracer
         return stack.isEmpty();
     }
 
     bool restoreValueArray(JSObject *obj, void **vpp, void **endp);
     void saveValueRanges();
     inline void processMarkStackTop(SliceBudget &budget);
     void processMarkStackOther(uintptr_t tag, uintptr_t addr);
 
-    void appendGrayRoot(void *thing, JSGCTraceKind kind);
+    void appendGrayRoot(void **thingp, JSGCTraceKind kind);
 
     /* The color is only applied to objects and functions. */
     uint32_t color;
 
     /* Pointer to the top of the stack of arenas we are delaying marking on. */
     js::gc::ArenaHeader *unmarkedArenaStackTop;
 
     /* Count of arenas that are currently in the stack. */
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -622,16 +622,27 @@ JSCompartment::sweep(FreeOp *fop, bool r
     NativeIterator *ni = enumerators->next();
     while (ni != enumerators) {
         JSObject *iterObj = ni->iterObj();
         NativeIterator *next = ni->next();
         if (gc::IsObjectAboutToBeFinalized(&iterObj))
             ni->unlink();
         ni = next;
     }
+
+    /* For each debuggee being GC'd, detach it from all its debuggers. */
+    for (GlobalObjectSet::Enum e(debuggees); !e.empty(); e.popFront()) {
+        GlobalObject *global = e.front();
+        if (IsObjectAboutToBeFinalized(&global)) {
+            // See infallibility note above.
+            Debugger::detachAllDebuggersFromGlobal(fop, global, &e);
+        } else if (global != e.front()) {
+            e.rekeyFront(global);
+        }
+    }
 }
 
 /*
  * Remove dead wrappers from the table. We must sweep all compartments, since
  * string entries in the crossCompartmentWrappers table are not marked during
  * markCrossCompartmentWrappers.
  */
 void
@@ -876,29 +887,27 @@ JSCompartment::updateJITForDebugMode(JSC
     // The AutoDebugModeInvalidation argument makes sure we can't forget to
     // invalidate, but it is also important not to run any scripts in this
     // compartment until the invalidate is destroyed.  That is the caller's
     // responsibility.
     return jit::UpdateForDebugMode(maybecx, this, invalidate);
 }
 
 bool
-JSCompartment::addDebuggee(JSContext *cx, js::GlobalObject *global)
+JSCompartment::addDebuggee(JSContext *cx, JS::Handle<js::GlobalObject *> global)
 {
     AutoDebugModeInvalidation invalidate(this);
     return addDebuggee(cx, global, invalidate);
 }
 
 bool
 JSCompartment::addDebuggee(JSContext *cx,
-                           GlobalObject *globalArg,
+                           JS::Handle<GlobalObject *> global,
                            AutoDebugModeInvalidation &invalidate)
 {
-    Rooted<GlobalObject*> global(cx, globalArg);
-
     bool wasEnabled = debugMode();
     if (!debuggees.put(global)) {
         js_ReportOutOfMemory(cx);
         return false;
     }
     debugModeBits |= DebugFromJS;
     if (!wasEnabled && !updateJITForDebugMode(cx, invalidate))
         return false;
--- a/js/src/jscompartment.h
+++ b/js/src/jscompartment.h
@@ -418,18 +418,18 @@ struct JSCompartment
 
   private:
 
     /* This is called only when debugMode() has just toggled. */
     bool updateJITForDebugMode(JSContext *maybecx, js::AutoDebugModeInvalidation &invalidate);
 
   public:
     js::GlobalObjectSet &getDebuggees() { return debuggees; }
-    bool addDebuggee(JSContext *cx, js::GlobalObject *global);
-    bool addDebuggee(JSContext *cx, js::GlobalObject *global,
+    bool addDebuggee(JSContext *cx, JS::Handle<js::GlobalObject *> global);
+    bool addDebuggee(JSContext *cx, JS::Handle<js::GlobalObject *> global,
                      js::AutoDebugModeInvalidation &invalidate);
     bool removeDebuggee(JSContext *cx, js::GlobalObject *global,
                         js::GlobalObjectSet::Enum *debuggeesEnum = nullptr);
     bool removeDebuggee(JSContext *cx, js::GlobalObject *global,
                         js::AutoDebugModeInvalidation &invalidate,
                         js::GlobalObjectSet::Enum *debuggeesEnum = nullptr);
     void removeDebuggeeUnderGC(js::FreeOp *fop, js::GlobalObject *global,
                                js::GlobalObjectSet::Enum *debuggeesEnum = nullptr);
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -1153,16 +1153,19 @@ GCRuntime::GCRuntime(JSRuntime *rt) :
     arenasAllocatedDuringSweep(nullptr),
 #ifdef JS_GC_MARKING_VALIDATION
     markingValidator(nullptr),
 #endif
     interFrameGC(0),
     sliceBudget(SliceBudget::Unlimited),
     incrementalAllowed(true),
     generationalDisabled(0),
+#ifdef JSGC_COMPACTING
+    compactingDisabled(0),
+#endif
     manipulatingDeadZones(false),
     objectsMarkedInDeadZones(0),
     poked(false),
     heapState(Idle),
 #ifdef JS_GC_ZEAL
     zealMode(0),
     zealFrequency(0),
     nextScheduled(0),
@@ -1999,24 +2002,48 @@ ArenaLists::wipeDuringParallelExecution(
 }
 
 /* Compacting GC */
 
 bool
 GCRuntime::shouldCompact()
 {
 #ifdef JSGC_COMPACTING
-    return invocationKind == GC_SHRINK;
+    return invocationKind == GC_SHRINK && !compactingDisabled;
 #else
     return false;
 #endif
 }
 
 #ifdef JSGC_COMPACTING
 
+void
+GCRuntime::disableCompactingGC()
+{
+    ++rt->gc.compactingDisabled;
+}
+
+void
+GCRuntime::enableCompactingGC()
+{
+    JS_ASSERT(compactingDisabled > 0);
+    --compactingDisabled;
+}
+
+AutoDisableCompactingGC::AutoDisableCompactingGC(JSRuntime *rt)
+  : gc(rt->gc)
+{
+    gc.disableCompactingGC();
+}
+
+AutoDisableCompactingGC::~AutoDisableCompactingGC()
+{
+    gc.enableCompactingGC();
+}
+
 static void
 ForwardCell(Cell *dest, Cell *src)
 {
     // Mark a cell has having been relocated and astore forwarding pointer to
     // the new cell.
     MOZ_ASSERT(src->tenuredZone() == dest->tenuredZone());
 
     // Putting the values this way round is a terrible hack to make
@@ -2043,19 +2070,21 @@ ArenaContainsGlobal(ArenaHeader *arena)
 }
 
 static bool
 CanRelocateArena(ArenaHeader *arena)
 {
     /*
      * We can't currently move global objects because their address is baked
      * into compiled code. We therefore skip moving the contents of any arena
-     * containing a global.
+     * containing a global if ion or baseline are enabled.
      */
-    return arena->getAllocKind() <= FINALIZE_OBJECT_LAST && !ArenaContainsGlobal(arena);
+    JSRuntime *rt = arena->zone->runtimeFromMainThread();
+    return arena->getAllocKind() <= FINALIZE_OBJECT_LAST &&
+        ((!rt->options().baseline() && !rt->options().ion()) || !ArenaContainsGlobal(arena));
 }
 
 static bool
 ShouldRelocateArena(ArenaHeader *arena)
 {
 #ifdef JS_GC_ZEAL
     if (arena->zone->runtimeFromMainThread()->gc.zeal() == ZealCompactValue)
         return true;
@@ -2098,45 +2127,69 @@ ArenaList::pickArenasToRelocate()
             arenap = &arena->next;
         }
     }
 
     check();
     return head;
 }
 
+#ifdef DEBUG
+inline bool
+PtrIsInRange(void *ptr, void *start, size_t length)
+{
+    return uintptr_t(ptr) - uintptr_t(start) < length;
+}
+#endif
+
 static bool
 RelocateCell(Zone *zone, Cell *src, AllocKind thingKind, size_t thingSize)
 {
     // Allocate a new cell.
     void *dst = zone->allocator.arenas.allocateFromFreeList(thingKind, thingSize);
     if (!dst)
         dst = js::gc::ArenaLists::refillFreeListInGC(zone, thingKind);
     if (!dst)
         return false;
 
     // Copy source cell contents to destination.
     memcpy(dst, src, thingSize);
 
-    // Mark source cell as forwarded and leave a pointer to the destination.
-    ForwardCell(static_cast<Cell *>(dst), src);
-
     // Fixup the pointer to inline object elements if necessary.
     if (thingKind <= FINALIZE_OBJECT_LAST) {
         JSObject *srcObj = static_cast<JSObject *>(src);
         JSObject *dstObj = static_cast<JSObject *>(dst);
         if (srcObj->hasFixedElements())
             dstObj->setFixedElements();
-        JS_ASSERT(
-            uintptr_t((HeapSlot*)dstObj->getElementsHeader()) - uintptr_t(srcObj) >= thingSize);
+
+        if (srcObj->is<ArrayBufferObject>()) {
+            // We must fix up any inline data pointers while we know the source
+            // object and before we mark any of the views.
+            ArrayBufferObject::fixupDataPointerAfterMovingGC(
+                srcObj->as<ArrayBufferObject>(), dstObj->as<ArrayBufferObject>());
+        } else if (srcObj->is<TypedArrayObject>()) {
+            TypedArrayObject &typedArray = srcObj->as<TypedArrayObject>();
+            if (!typedArray.hasBuffer()) {
+                JS_ASSERT(srcObj->getPrivate() ==
+                          srcObj->fixedData(TypedArrayObject::FIXED_DATA_START));
+                dstObj->setPrivate(dstObj->fixedData(TypedArrayObject::FIXED_DATA_START));
+            }
+        }
+
+
+        JS_ASSERT_IF(dstObj->isNative(),
+                     !PtrIsInRange((HeapSlot*)dstObj->getDenseElements(), src, thingSize));
     }
 
     // Copy the mark bits.
     static_cast<Cell *>(dst)->copyMarkBitsFrom(src);
 
+    // Mark source cell as forwarded and leave a pointer to the destination.
+    ForwardCell(static_cast<Cell *>(dst), src);
+
     return true;
 }
 
 static bool
 RelocateArena(ArenaHeader *aheader)
 {
     JS_ASSERT(aheader->allocated());
     JS_ASSERT(!aheader->hasDelayedMarking);
@@ -2246,20 +2299,22 @@ struct MovingTracer : JSTracer {
     static void Visit(JSTracer *jstrc, void **thingp, JSGCTraceKind kind);
     static void Sweep(JSTracer *jstrc);
 };
 
 void
 MovingTracer::Visit(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
 {
     Cell *thing = static_cast<Cell *>(*thingp);
-    if (!thing->tenuredZone()->isGCCompacting()) {
+    Zone *zone = thing->tenuredZoneFromAnyThread();
+    if (!zone->isGCCompacting()) {
         JS_ASSERT(!IsForwarded(thing));
         return;
     }
+    JS_ASSERT(CurrentThreadCanAccessZone(zone));
 
     if (IsForwarded(thing)) {
         Cell *dst = Forwarded(thing);
         *thingp = dst;
     }
 }
 
 void
@@ -2275,27 +2330,29 @@ MovingTracer::Sweep(JSTracer *jstrc)
     for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) {
         if (zone->isCollecting()) {
             bool oom = false;
             zone->sweep(fop, false, &oom);
             JS_ASSERT(!oom);
 
             for (CompartmentsInZoneIter c(zone); !c.done(); c.next()) {
                 c->sweep(fop, false);
-                ArrayBufferObject::sweep(c);
             }
         } else {
             /* Update cross compartment wrappers into moved zones. */
             for (CompartmentsInZoneIter c(zone); !c.done(); c.next())
                 c->sweepCrossCompartmentWrappers();
         }
     }
 
     /* Type inference may put more blocks here to free. */
     rt->freeLifoAlloc.freeAll();
+
+    /* Clear the new object cache as this can contain cell pointers. */
+    rt->newObjectCache.purge();
 }
 
 /*
  * Update the interal pointers in a single cell.
  */
 static void
 UpdateCellPointers(MovingTracer *trc, Cell *cell, JSGCTraceKind traceKind) {
     TraceChildren(trc, cell, traceKind);
@@ -2354,22 +2411,25 @@ GCRuntime::updatePointersToRelocatedCell
     }
 
     // Mark roots to update them.
     markRuntime(&trc, MarkRuntime);
     Debugger::markAll(&trc);
     Debugger::markCrossCompartmentDebuggerObjectReferents(&trc);
 
     for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
+        WeakMapBase::markAll(c, &trc);
         if (c->watchpointMap)
             c->watchpointMap->markAll(&trc);
     }
 
+    // Mark all gray roots, making sure we call the trace callback to get the
+    // current set.
+    marker.resetBufferedGrayRoots();
     markAllGrayReferences(gcstats::PHASE_COMPACT_UPDATE_GRAY);
-    markAllWeakReferences(gcstats::PHASE_COMPACT_UPDATE_GRAY);
 
     MovingTracer::Sweep(&trc);
 }
 
 void
 GCRuntime::releaseRelocatedArenas(ArenaHeader *relocatedList)
 {
     // Release the relocated arenas, now containing only forwarding pointers
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -1133,26 +1133,26 @@ struct GCChunkHasher {
         JS_ASSERT(!(uintptr_t(l) & gc::ChunkMask));
         return k == l;
     }
 };
 
 typedef HashSet<js::gc::Chunk *, GCChunkHasher, SystemAllocPolicy> GCChunkSet;
 
 struct GrayRoot {
-    void *thing;
+    void **thingp;
     JSGCTraceKind kind;
 #ifdef DEBUG
     JSTraceNamePrinter debugPrinter;
     const void *debugPrintArg;
     size_t debugPrintIndex;
 #endif
 
-    GrayRoot(void *thing, JSGCTraceKind kind)
-        : thing(thing), kind(kind) {}
+    GrayRoot(void **thingp, JSGCTraceKind kind)
+        : thingp(thingp), kind(kind) {}
 };
 
 void
 MarkStackRangeConservatively(JSTracer *trc, Value *begin, Value *end);
 
 typedef void (*IterateChunkCallback)(JSRuntime *rt, void *data, gc::Chunk *chunk);
 typedef void (*IterateZoneCallback)(JSRuntime *rt, void *data, JS::Zone *zone);
 typedef void (*IterateArenaCallback)(JSRuntime *rt, void *data, gc::Arena *arena,
@@ -1422,16 +1422,30 @@ class AutoDisableProxyCheck
 };
 #else
 struct AutoDisableProxyCheck
 {
     explicit AutoDisableProxyCheck(JSRuntime *rt) {}
 };
 #endif
 
+struct AutoDisableCompactingGC
+{
+#ifdef JSGC_COMPACTING
+    explicit AutoDisableCompactingGC(JSRuntime *rt);
+    ~AutoDisableCompactingGC();
+
+  private:
+    gc::GCRuntime &gc;
+#else
+    explicit AutoDisableCompactingGC(JSRuntime *rt) {}
+    ~AutoDisableCompactingGC() {}
+#endif
+};
+
 void
 PurgeJITCaches(JS::Zone *zone);
 
 // This is the same as IsInsideNursery, but not inlined.
 bool
 UninlinedIsInsideNursery(const gc::Cell *cell);
 
 } /* namespace js */
--- a/js/src/jshashutil.h
+++ b/js/src/jshashutil.h
@@ -8,17 +8,17 @@
 #define jshashutil_h
 
 #include "jscntxt.h"
 
 namespace js {
 
 /*
  * Used to add entries to a js::HashMap or HashSet where the key depends on a GC
- * thing that may be moved by generational collection between the call to
+ * thing that may be moved by generational or compacting GC between the call to
  * lookupForAdd() and relookupOrAdd().
  */
 template <class T>
 struct DependentAddPtr
 {
     typedef typename T::AddPtr AddPtr;
     typedef typename T::Entry Entry;
 
--- a/js/src/jsweakmap.cpp
+++ b/js/src/jsweakmap.cpp
@@ -63,16 +63,23 @@ WeakMapBase::trace(JSTracer *tracer)
 
 void
 WeakMapBase::unmarkCompartment(JSCompartment *c)
 {
     for (WeakMapBase *m = c->gcWeakMapList; m; m = m->next)
         m->marked = false;
 }
 
+void
+WeakMapBase::markAll(JSCompartment *c, JSTracer *tracer)
+{
+    for (WeakMapBase *m = c->gcWeakMapList; m; m = m->next)
+        m->markIteratively(tracer);
+}
+
 bool
 WeakMapBase::markCompartmentIteratively(JSCompartment *c, JSTracer *tracer)
 {
     bool markedAny = false;
     for (WeakMapBase *m = c->gcWeakMapList; m; m = m->next) {
         if (m->marked && m->markIteratively(tracer))
             markedAny = true;
     }
--- a/js/src/jsweakmap.h
+++ b/js/src/jsweakmap.h
@@ -44,16 +44,19 @@ class WeakMapBase {
 
     void trace(JSTracer *tracer);
 
     // Garbage collector entry points.
 
     // Unmark all weak maps in a compartment.
     static void unmarkCompartment(JSCompartment *c);
 
+    // Mark all the weakmaps in a compartment.
+    static void markAll(JSCompartment *c, JSTracer *tracer);
+
     // Check all weak maps in a compartment that have been marked as live in this garbage
     // collection, and mark the values of all entries that have become strong references
     // to them. Return true if we marked any new values, indicating that we need to make
     // another pass. In other words, mark my marked maps' marked members' mid-collection.
     static bool markCompartmentIteratively(JSCompartment *c, JSTracer *tracer);
 
     // Add zone edges for weakmaps with key delegates in a different zone.
     static bool findZoneEdgesForCompartment(JSCompartment *c);
--- a/js/src/shell/jsheaptools.cpp
+++ b/js/src/shell/jsheaptools.cpp
@@ -152,28 +152,30 @@ class HeapReverser : public JSTracer, pu
     typedef HashMap<void *, Node, DefaultHasher<void *>, SystemAllocPolicy> Map;
     Map map;
 
     /* Construct a HeapReverser for |context|'s heap. */
     explicit HeapReverser(JSContext *cx)
       : JSTracer(cx->runtime(), traverseEdgeWithThis),
         JS::CustomAutoRooter(cx),
         noggc(JS_GetRuntime(cx)),
+        nocgc(JS_GetRuntime(cx)),
         runtime(JS_GetRuntime(cx)),
         parent(nullptr)
     {
     }
 
     bool init() { return map.init(); }
 
     /* Build a reversed map of the heap in |map|. */
     bool reverseHeap();
 
   private:
     JS::AutoDisableGenerationalGC noggc;
+    js::AutoDisableCompactingGC nocgc;
 
     /* A runtime pointer for use by the destructor. */
     JSRuntime *runtime;
 
     /*
      * Return the name of the most recent edge this JSTracer has traversed. The
      * result is allocated with malloc; if we run out of memory, raise an error
      * in this HeapReverser's context and return nullptr.
--- a/js/src/vm/ArrayBufferObject.cpp
+++ b/js/src/vm/ArrayBufferObject.cpp
@@ -927,16 +927,25 @@ ArrayBufferObject::sweep(JSCompartment *
         }
 
         buffer->setViewList(prevLiveView);
     }
 
     gcLiveArrayBuffers.clear();
 }
 
+/* static */ void
+ArrayBufferObject::fixupDataPointerAfterMovingGC(const ArrayBufferObject &src, ArrayBufferObject &dst)
+{
+    // Fix up possible inline data pointer.
+    const size_t reservedSlots = JSCLASS_RESERVED_SLOTS(&ArrayBufferObject::class_);
+    if (src.dataPointer() == src.fixedData(reservedSlots))
+        dst.setSlot(DATA_SLOT, PrivateValue(dst.fixedData(reservedSlots)));
+}
+
 void
 ArrayBufferObject::resetArrayBufferList(JSCompartment *comp)
 {
     ArrayBufferVector &gcLiveArrayBuffers = comp->gcLiveArrayBuffers;
 
     for (size_t i = 0; i < gcLiveArrayBuffers.length(); i++) {
         ArrayBufferObject *buffer = gcLiveArrayBuffers[i];
 
@@ -987,17 +996,17 @@ ArrayBufferObject::restoreArrayBufferLis
 ArrayBufferViewObject::trace(JSTracer *trc, JSObject *obj)
 {
     HeapSlot &bufSlot = obj->getReservedSlotRef(BUFFER_SLOT);
     MarkSlot(trc, &bufSlot, "typedarray.buffer");
 
     // Update obj's data pointer if the array buffer moved. Note that during
     // initialization, bufSlot may still contain |undefined|.
     if (bufSlot.isObject()) {
-        ArrayBufferObject &buf = AsArrayBuffer(&bufSlot.toObject());
+        ArrayBufferObject &buf = AsArrayBuffer(MaybeForwarded(&bufSlot.toObject()));
         int32_t offset = obj->getReservedSlot(BYTEOFFSET_SLOT).toInt32();
         MOZ_ASSERT(buf.dataPointer() != nullptr);
         obj->initPrivate(buf.dataPointer() + offset);
     }
 
     /* Update NEXT_VIEW_SLOT, if the view moved. */
     IsSlotMarked(&obj->getReservedSlotRef(NEXT_VIEW_SLOT));
 }
--- a/js/src/vm/ArrayBufferObject.h
+++ b/js/src/vm/ArrayBufferObject.h
@@ -90,16 +90,18 @@ class ArrayBufferObject : public JSObjec
 
     template<typename T>
     static bool createTypedArrayFromBuffer(JSContext *cx, unsigned argc, Value *vp);
 
     static void obj_trace(JSTracer *trc, JSObject *obj);
 
     static void sweep(JSCompartment *rt);
 
+    static void fixupDataPointerAfterMovingGC(const ArrayBufferObject &src, ArrayBufferObject &dst);
+
     static void resetArrayBufferList(JSCompartment *rt);
     static bool saveArrayBufferList(JSCompartment *c, ArrayBufferVector &vector);
     static void restoreArrayBufferLists(ArrayBufferVector &vector);
 
     static void *stealContents(JSContext *cx, Handle<ArrayBufferObject*> buffer);
 
     bool hasStealableContents() const {
         // Inline elements strictly adhere to the corresponding buffer.
--- a/js/src/vm/Debugger.cpp
+++ b/js/src/vm/Debugger.cpp
@@ -1745,30 +1745,16 @@ Debugger::sweepAll(FreeOp *fop)
             for (GlobalObjectSet::Enum e(dbg->debuggees); !e.empty(); e.popFront()) {
                 // We can't recompile on-stack scripts here, and we
                 // can only toggle debug mode to off, so we use an
                 // infallible variant of removeDebuggeeGlobal.
                 dbg->removeDebuggeeGlobalUnderGC(fop, e.front(), nullptr, &e);
             }
         }
     }
-
-    for (gc::GCCompartmentGroupIter comp(rt); !comp.done(); comp.next()) {
-        /* For each debuggee being GC'd, detach it from all its debuggers. */
-        GlobalObjectSet &debuggees = comp->getDebuggees();
-        for (GlobalObjectSet::Enum e(debuggees); !e.empty(); e.popFront()) {
-            GlobalObject *global = e.front();
-            if (IsObjectAboutToBeFinalized(&global)) {
-                // See infallibility note above.
-                detachAllDebuggersFromGlobal(fop, global, &e);
-            } else if (global != e.front()) {
-                e.rekeyFront(global);
-            }
-        }
-    }
 }
 
 void
 Debugger::detachAllDebuggersFromGlobal(FreeOp *fop, GlobalObject *global,
                                        GlobalObjectSet::Enum *compartmentEnum)
 {
     const GlobalObject::DebuggerVector *debuggers = global->getDebuggers();
     JS_ASSERT(!debuggers->empty());
--- a/js/src/vm/SavedStacks.cpp
+++ b/js/src/vm/SavedStacks.cpp
@@ -5,16 +5,17 @@
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 
 #include "vm/SavedStacks.h"
 
 #include "jsapi.h"
 #include "jscompartment.h"
 #include "jsfriendapi.h"
+#include "jshashutil.h"
 #include "jsnum.h"
 
 #include "gc/Marking.h"
 #include "js/Vector.h"
 #include "vm/Debugger.h"
 #include "vm/GlobalObject.h"
 #include "vm/StringBuffer.h"
 
@@ -557,25 +558,25 @@ SavedStacks::insertFrames(JSContext *cx,
 
     frame.set(parentFrame);
     return true;
 }
 
 SavedFrame *
 SavedStacks::getOrCreateSavedFrame(JSContext *cx, SavedFrame::HandleLookup lookup)
 {
-    SavedFrame::Set::AddPtr p = frames.lookupForAdd(lookup);
+    DependentAddPtr<SavedFrame::Set> p(cx, frames, lookup);
     if (p)
         return *p;
 
     RootedSavedFrame frame(cx, createFrameFromLookup(cx, lookup));
     if (!frame)
         return nullptr;
 
-    if (!frames.relookupOrAdd(p, lookup, frame))
+    if (!p.add(cx, frames, lookup, frame))
         return nullptr;
 
     return frame;
 }
 
 JSObject *
 SavedStacks::getOrCreateSavedFramePrototype(JSContext *cx)
 {
--- a/js/src/vm/ScopeObject.cpp
+++ b/js/src/vm/ScopeObject.cpp
@@ -1065,16 +1065,25 @@ ScopeIterKey::match(ScopeIterKey si1, Sc
     /* hasScopeObject_ is determined by the other fields. */
     return si1.frame_ == si2.frame_ &&
            (!si1.frame_ ||
             (si1.cur_   == si2.cur_   &&
              si1.staticScope_ == si2.staticScope_ &&
              si1.type_  == si2.type_));
 }
 
+void
+ScopeIterVal::sweep()
+{
+    /* We need to update possibly moved pointers on sweep. */
+    MOZ_ALWAYS_FALSE(IsObjectAboutToBeFinalized(cur_.unsafeGet()));
+    if (staticScope_)
+        MOZ_ALWAYS_FALSE(IsObjectAboutToBeFinalized(staticScope_.unsafeGet()));
+}
+
 // Live ScopeIter values may be added to DebugScopes::liveScopes, as
 // ScopeIterVal instances.  They need to have write barriers when they are added
 // to the hash table, but no barriers when rehashing inside GC.  It's a nasty
 // hack, but the important thing is that ScopeIterKey and ScopeIterVal need to
 // alias each other.
 void ScopeIterVal::staticAsserts() {
     static_assert(sizeof(ScopeIterVal) == sizeof(ScopeIterKey),
                   "ScopeIterVal must be same size of ScopeIterKey");
@@ -1787,28 +1796,30 @@ DebugScopes::sweep(JSRuntime *rt)
             e.removeFront();
         } else {
             ScopeIterKey key = e.front().key();
             bool needsUpdate = false;
             if (IsForwarded(key.cur())) {
                 key.updateCur(js::gc::Forwarded(key.cur()));
                 needsUpdate = true;
             }
-            if (IsForwarded(key.staticScope())) {
+            if (key.staticScope() && IsForwarded(key.staticScope())) {
                 key.updateStaticScope(Forwarded(key.staticScope()));
                 needsUpdate = true;
             }
             if (needsUpdate)
                 e.rekeyFront(key);
         }
     }
 
     for (LiveScopeMap::Enum e(liveScopes); !e.empty(); e.popFront()) {
         ScopeObject *scope = e.front().key();
 
+        e.front().value().sweep();
+
         /*
          * Scopes can be finalized when a debugger-synthesized ScopeObject is
          * no longer reachable via its DebugScopeObject.
          */
         if (IsObjectAboutToBeFinalized(&scope))
             e.removeFront();
         else if (scope != e.front().key())
             e.rekeyFront(scope);
--- a/js/src/vm/ScopeObject.h
+++ b/js/src/vm/ScopeObject.h
@@ -728,16 +728,18 @@ class ScopeIterVal
     friend class DebugScopes;
 
     AbstractFramePtr frame_;
     RelocatablePtrObject cur_;
     RelocatablePtrNestedScopeObject staticScope_;
     ScopeIter::Type type_;
     bool hasScopeObject_;
 
+    void sweep();
+
     static void staticAsserts();
 
   public:
     explicit ScopeIterVal(const ScopeIter &si)
       : frame_(si.frame()), cur_(si.cur_), staticScope_(si.staticScope_), type_(si.type_),
         hasScopeObject_(si.hasScopeObject_) {}
 
     AbstractFramePtr frame() const { return frame_; }
--- a/js/src/vm/Shape.cpp
+++ b/js/src/vm/Shape.cpp
@@ -440,17 +440,17 @@ JSObject::lookupChildProperty(ThreadSafe
 
 bool
 js::ObjectImpl::toDictionaryMode(ThreadSafeContext *cx)
 {
     JS_ASSERT(!inDictionaryMode());
 
 #ifdef JSGC_COMPACTING
     // TODO: This crashes if we run a compacting GC here.
-    js::gc::AutoSuppressGC nogc(zone()->runtimeFromAnyThread());
+    js::AutoDisableCompactingGC nogc(zone()->runtimeFromAnyThread());
 #endif
 
     /* We allocate the shapes from cx->compartment(), so make sure it's right. */
     JS_ASSERT(cx->isInsideCurrentCompartment(this));
 
     /*
      * This function is thread safe as long as the object is thread local. It
      * does not modify the shared shapes, and only allocates newly allocated
--- a/js/src/vm/TypedArrayObject.h
+++ b/js/src/vm/TypedArrayObject.h
@@ -75,16 +75,19 @@ class TypedArrayObject : public ArrayBuf
     static Value lengthValue(TypedArrayObject *tarr) {
         return tarr->getFixedSlot(LENGTH_SLOT);
     }
 
     static bool
     ensureHasBuffer(JSContext *cx, Handle<TypedArrayObject *> tarray);
 
     ArrayBufferObject *sharedBuffer() const;
+    bool hasBuffer() const {
+        return bufferValue(const_cast<TypedArrayObject*>(this)).isObject();
+    }
     ArrayBufferObject *buffer() const {
         JSObject *obj = bufferValue(const_cast<TypedArrayObject*>(this)).toObjectOrNull();
         if (!obj)
             return nullptr;
         if (obj->is<ArrayBufferObject>())
             return &obj->as<ArrayBufferObject>();
         return sharedBuffer();
     }