Bug 1064578 - Sweep tables in parallel; r=jonco r=bhackett
authorTerrence Cole <terrence@mozilla.com>
Fri, 12 Sep 2014 17:32:51 -0700
changeset 209034 8be54e6c4dcd2f69d8c27ab6038e828dee126f63
parent 209033 b2d92cb448730745ab54c54167cd423ae8347469
child 209035 f0e7ea124644f0985954cf25f720a1185497ac94
push id27600
push userryanvm@gmail.com
push dateMon, 06 Oct 2014 21:11:44 +0000
treeherdermozilla-central@eaa80e4597a2 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjonco, bhackett
bugs1064578
milestone35.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1064578 - Sweep tables in parallel; r=jonco r=bhackett
js/src/gc/GCRuntime.h
js/src/gc/Marking.cpp
js/src/gc/Marking.h
js/src/gc/Statistics.cpp
js/src/gc/Statistics.h
js/src/jsatom.cpp
js/src/jscntxt.cpp
js/src/jscompartment.cpp
js/src/jscompartment.h
js/src/jsgc.cpp
js/src/jsgc.h
js/src/jsinfer.cpp
js/src/vm/ArrayBufferObject.cpp
js/src/vm/HelperThreads.cpp
js/src/vm/HelperThreads.h
js/src/vm/RegExpObject.cpp
js/src/vm/SavedStacks.cpp
js/src/vm/ScopeObject.cpp
js/src/vm/Shape.cpp
--- a/js/src/gc/GCRuntime.h
+++ b/js/src/gc/GCRuntime.h
@@ -693,16 +693,22 @@ class GCRuntime
     JS::Zone              *zoneGroups;
     JS::Zone              *currentZoneGroup;
     int                   finalizePhase;
     JS::Zone              *sweepZone;
     int                   sweepKindIndex;
     bool                  abortSweepAfterCurrentGroup;
 
     /*
+     * Concurrent sweep infrastructure.
+     */
+    void startTask(GCParallelTask &task, gcstats::Phase phase);
+    void joinTask(GCParallelTask &task, gcstats::Phase phase);
+
+    /*
      * List head of arenas allocated during the sweep phase.
      */
     js::gc::ArenaHeader   *arenasAllocatedDuringSweep;
 
 #ifdef JS_GC_MARKING_VALIDATION
     js::gc::MarkingValidator *markingValidator;
 #endif
 
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -467,16 +467,25 @@ IsMarked(T **thingp)
 #endif
     return (*thingp)->asTenured().isMarked();
 }
 
 template <typename T>
 static bool
 IsAboutToBeFinalized(T **thingp)
 {
+    MOZ_ASSERT_IF(!ThingIsPermanentAtom(*thingp),
+                  CurrentThreadCanAccessRuntime((*thingp)->runtimeFromMainThread()));
+    return IsAboutToBeFinalizedFromAnyThread(thingp);
+}
+
+template <typename T>
+static bool
+IsAboutToBeFinalizedFromAnyThread(T **thingp)
+{
     MOZ_ASSERT(thingp);
     MOZ_ASSERT(*thingp);
 
     T *thing = *thingp;
     JSRuntime *rt = thing->runtimeFromAnyThread();
 
     /* Permanent atoms are never finalized by non-owning runtimes. */
     if (ThingIsPermanentAtom(thing) && !TlsPerThreadData.get()->associatedWith(rt))
@@ -498,17 +507,17 @@ IsAboutToBeFinalized(T **thingp)
         if (rt->isHeapMinorCollecting()) {
             if (IsInsideNursery(thing))
                 return !nursery.getForwardedPointer(thingp);
             return false;
         }
     }
 #endif  // JSGC_GENERATIONAL
 
-    Zone *zone = thing->asTenured().zone();
+    Zone *zone = thing->asTenured().zoneFromAnyThread();
     if (zone->isGCSweeping()) {
         /*
          * We should return false for things that have been allocated during
          * incremental sweeping, but this possibility doesn't occur at the moment
          * because this function is only called at the very start of the sweeping a
          * compartment group and during minor gc. Rather than do the extra check,
          * we just assert that it's not necessary.
          */
@@ -611,16 +620,22 @@ Is##base##Marked(BarrieredBase<type*> *t
                                                                                                   \
 bool                                                                                              \
 Is##base##AboutToBeFinalized(type **thingp)                                                       \
 {                                                                                                 \
     return IsAboutToBeFinalized<type>(thingp);                                                    \
 }                                                                                                 \
                                                                                                   \
 bool                                                                                              \
+Is##base##AboutToBeFinalizedFromAnyThread(type **thingp)                                          \
+{                                                                                                 \
+    return IsAboutToBeFinalizedFromAnyThread<type>(thingp);                                       \
+}                                                                                                 \
+                                                                                                  \
+bool                                                                                              \
 Is##base##AboutToBeFinalized(BarrieredBase<type*> *thingp)                                        \
 {                                                                                                 \
     return IsAboutToBeFinalized<type>(thingp->unsafeGet());                                       \
 }                                                                                                 \
                                                                                                   \
 type *                                                                                            \
 Update##base##IfRelocated(JSRuntime *rt, BarrieredBase<type*> *thingp)                            \
 {                                                                                                 \
@@ -905,16 +920,38 @@ gc::IsValueAboutToBeFinalized(Value *v)
         MOZ_ASSERT(v->isSymbol());
         JS::Symbol *sym = v->toSymbol();
         rv = IsAboutToBeFinalized<JS::Symbol>(&sym);
         v->setSymbol(sym);
     }
     return rv;
 }
 
+bool
+gc::IsValueAboutToBeFinalizedFromAnyThread(Value *v)
+{
+    MOZ_ASSERT(v->isMarkable());
+    bool rv;
+    if (v->isString()) {
+        JSString *str = (JSString *)v->toGCThing();
+        rv = IsAboutToBeFinalizedFromAnyThread<JSString>(&str);
+        v->setString(str);
+    } else if (v->isObject()) {
+        JSObject *obj = (JSObject *)v->toGCThing();
+        rv = IsAboutToBeFinalizedFromAnyThread<JSObject>(&obj);
+        v->setObject(*obj);
+    } else {
+        MOZ_ASSERT(v->isSymbol());
+        JS::Symbol *sym = v->toSymbol();
+        rv = IsAboutToBeFinalizedFromAnyThread<JS::Symbol>(&sym);
+        v->setSymbol(sym);
+    }
+    return rv;
+}
+
 /*** Slot Marking ***/
 
 bool
 gc::IsSlotMarked(HeapSlot *s)
 {
     return IsMarked(s);
 }
 
@@ -1026,16 +1063,22 @@ gc::IsCellMarked(Cell **thingp)
 }
 
 bool
 gc::IsCellAboutToBeFinalized(Cell **thingp)
 {
     return IsAboutToBeFinalized<Cell>(thingp);
 }
 
+bool
+gc::IsCellAboutToBeFinalizedFromAnyThread(Cell **thingp)
+{
+    return IsAboutToBeFinalizedFromAnyThread<Cell>(thingp);
+}
+
 /*** Push Mark Stack ***/
 
 #define JS_COMPARTMENT_ASSERT(rt, thing)                                \
     MOZ_ASSERT((thing)->zone()->isGCMarking())
 
 #define JS_COMPARTMENT_ASSERT_STR(rt, thing)                            \
     MOZ_ASSERT((thing)->zone()->isGCMarking() ||                        \
                (rt)->isAtomsZone((thing)->zone()));
--- a/js/src/gc/Marking.h
+++ b/js/src/gc/Marking.h
@@ -93,16 +93,17 @@ namespace gc {
 void Mark##base(JSTracer *trc, BarrieredBase<type*> *thing, const char *name);                    \
 void Mark##base##Root(JSTracer *trc, type **thingp, const char *name);                            \
 void Mark##base##Unbarriered(JSTracer *trc, type **thingp, const char *name);                     \
 void Mark##base##Range(JSTracer *trc, size_t len, HeapPtr<type*> *thing, const char *name);       \
 void Mark##base##RootRange(JSTracer *trc, size_t len, type **thing, const char *name);            \
 bool Is##base##Marked(type **thingp);                                                             \
 bool Is##base##Marked(BarrieredBase<type*> *thingp);                                              \
 bool Is##base##AboutToBeFinalized(type **thingp);                                                 \
+bool Is##base##AboutToBeFinalizedFromAnyThread(type **thingp);                                    \
 bool Is##base##AboutToBeFinalized(BarrieredBase<type*> *thingp);                                  \
 type *Update##base##IfRelocated(JSRuntime *rt, BarrieredBase<type*> *thingp);                     \
 type *Update##base##IfRelocated(JSRuntime *rt, type **thingp);
 
 DeclMarker(BaseShape, BaseShape)
 DeclMarker(BaseShape, UnownedBaseShape)
 DeclMarker(JitCode, jit::JitCode)
 DeclMarker(Object, NativeObject)
@@ -214,16 +215,19 @@ void
 MarkTypeRoot(JSTracer *trc, types::Type *v, const char *name);
 
 bool
 IsValueMarked(Value *v);
 
 bool
 IsValueAboutToBeFinalized(Value *v);
 
+bool
+IsValueAboutToBeFinalizedFromAnyThread(Value *v);
+
 /*** Slot Marking ***/
 
 bool
 IsSlotMarked(HeapSlot *s);
 
 void
 MarkSlot(JSTracer *trc, HeapSlot *s, const char *name);
 
@@ -322,16 +326,19 @@ Mark(JSTracer *trc, ScopeObject **obj, c
 }
 
 bool
 IsCellMarked(Cell **thingp);
 
 bool
 IsCellAboutToBeFinalized(Cell **thing);
 
+bool
+IsCellAboutToBeFinalizedFromAnyThread(Cell **thing);
+
 inline bool
 IsMarked(BarrieredBase<Value> *v)
 {
     if (!v->isMarkable())
         return true;
     return IsValueMarked(v->unsafeGet());
 }
 
--- a/js/src/gc/Statistics.cpp
+++ b/js/src/gc/Statistics.cpp
@@ -14,16 +14,17 @@
 #include <stdio.h>
 
 #include "jscrashreport.h"
 #include "jsprf.h"
 #include "jsutil.h"
 #include "prmjtime.h"
 
 #include "gc/Memory.h"
+#include "vm/HelperThreads.h"
 #include "vm/Runtime.h"
 
 using namespace js;
 using namespace js::gc;
 using namespace js::gcstats;
 
 using mozilla::PodArrayZero;
 
@@ -832,16 +833,26 @@ Statistics::endPhase(Phase phase)
     phaseNestingDepth--;
 
     int64_t t = PRMJ_Now() - phaseStartTimes[phase];
     slices.back().phaseTimes[phase] += t;
     phaseTimes[phase] += t;
     phaseStartTimes[phase] = 0;
 }
 
+void
+Statistics::endParallelPhase(Phase phase, const GCParallelTask *task)
+{
+    phaseNestingDepth--;
+
+    slices.back().phaseTimes[phase] += task->duration();
+    phaseTimes[phase] += task->duration();
+    phaseStartTimes[phase] = 0;
+}
+
 int64_t
 Statistics::beginSCC()
 {
     return PRMJ_Now();
 }
 
 void
 Statistics::endSCC(unsigned scc, int64_t start)
--- a/js/src/gc/Statistics.h
+++ b/js/src/gc/Statistics.h
@@ -15,16 +15,19 @@
 #include "jspubtd.h"
 
 #include "js/GCAPI.h"
 #include "js/Vector.h"
 
 struct JSCompartment;
 
 namespace js {
+
+class GCParallelTask;
+
 namespace gcstats {
 
 enum Phase {
     PHASE_GC_BEGIN,
     PHASE_WAIT_BACKGROUND_THREAD,
     PHASE_MARK_DISCARD_CODE,
     PHASE_PURGE,
     PHASE_MARK,
@@ -104,16 +107,17 @@ struct ZoneGCStats
 
 struct Statistics
 {
     explicit Statistics(JSRuntime *rt);
     ~Statistics();
 
     void beginPhase(Phase phase);
     void endPhase(Phase phase);
+    void endParallelPhase(Phase phase, const GCParallelTask *task);
 
     void beginSlice(const ZoneGCStats &zoneStats, JS::gcreason::Reason reason);
     void endSlice();
 
     void reset(const char *reason) { slices.back().resetReason = reason; }
     void nonincremental(const char *reason) { nonincrementalReason = reason; }
 
     void count(Stat s) {
@@ -229,35 +233,51 @@ struct AutoGCSlice
     Statistics &stats;
     MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER
 };
 
 struct AutoPhase
 {
     AutoPhase(Statistics &stats, Phase phase
               MOZ_GUARD_OBJECT_NOTIFIER_PARAM)
-      : stats(stats), phase(phase), enabled(true)
+      : stats(stats), task(nullptr), phase(phase), enabled(true)
     {
         MOZ_GUARD_OBJECT_NOTIFIER_INIT;
         stats.beginPhase(phase);
     }
+
     AutoPhase(Statistics &stats, bool condition, Phase phase
               MOZ_GUARD_OBJECT_NOTIFIER_PARAM)
-      : stats(stats), phase(phase), enabled(condition)
+      : stats(stats), task(nullptr), phase(phase), enabled(condition)
     {
         MOZ_GUARD_OBJECT_NOTIFIER_INIT;
         if (enabled)
             stats.beginPhase(phase);
     }
-    ~AutoPhase() {
+
+    AutoPhase(Statistics &stats, const GCParallelTask &task, Phase phase
+              MOZ_GUARD_OBJECT_NOTIFIER_PARAM)
+      : stats(stats), task(&task), phase(phase), enabled(true)
+    {
+        MOZ_GUARD_OBJECT_NOTIFIER_INIT;
         if (enabled)
-            stats.endPhase(phase);
+            stats.beginPhase(phase);
+    }
+
+    ~AutoPhase() {
+        if (enabled) {
+            if (task)
+                stats.endParallelPhase(phase, task);
+            else
+                stats.endPhase(phase);
+        }
     }
 
     Statistics &stats;
+    const GCParallelTask *task;
     Phase phase;
     bool enabled;
     MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER
 };
 
 struct AutoSCC
 {
     AutoSCC(Statistics &stats, unsigned scc
--- a/js/src/jsatom.cpp
+++ b/js/src/jsatom.cpp
@@ -249,17 +249,17 @@ void
 JSRuntime::sweepAtoms()
 {
     if (!atoms_)
         return;
 
     for (AtomSet::Enum e(*atoms_); !e.empty(); e.popFront()) {
         AtomStateEntry entry = e.front();
         JSAtom *atom = entry.asPtr();
-        bool isDying = IsStringAboutToBeFinalized(&atom);
+        bool isDying = IsStringAboutToBeFinalizedFromAnyThread(&atom);
 
         /* Pinned or interned key cannot be finalized. */
         MOZ_ASSERT_IF(hasContexts() && entry.isTagged(), !isDying);
 
         if (isDying)
             e.removeFront();
     }
 }
--- a/js/src/jscntxt.cpp
+++ b/js/src/jscntxt.cpp
@@ -96,18 +96,19 @@ js::TraceCycleDetectionSet(JSTracer *trc
 }
 
 void
 JSCompartment::sweepCallsiteClones()
 {
     if (callsiteClones.initialized()) {
         for (CallsiteCloneTable::Enum e(callsiteClones); !e.empty(); e.popFront()) {
             CallsiteCloneKey key = e.front().key();
-            if (IsObjectAboutToBeFinalized(&key.original) || IsScriptAboutToBeFinalized(&key.script) ||
-                IsObjectAboutToBeFinalized(e.front().value().unsafeGet()))
+            if (IsObjectAboutToBeFinalizedFromAnyThread(&key.original) ||
+                IsScriptAboutToBeFinalizedFromAnyThread(&key.script) ||
+                IsObjectAboutToBeFinalizedFromAnyThread(e.front().value().unsafeGet()))
             {
                 e.removeFront();
             } else if (key != e.front().key()) {
                 e.rekeyFront(key);
             }
         }
     }
 }
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -530,54 +530,54 @@ JSCompartment::markRoots(JSTracer *trc)
 
     if (jitCompartment_)
         jitCompartment_->mark(trc, this);
 
     /*
      * If a compartment is on-stack, we mark its global so that
      * JSContext::global() remains valid.
      */
-    if (enterCompartmentDepth && global_)
+    if (enterCompartmentDepth && global_.unbarrieredGet())
         MarkObjectRoot(trc, global_.unsafeGet(), "on-stack compartment global");
 }
 
 void
 JSCompartment::sweepInnerViews()
 {
-    innerViews.sweep(runtimeFromMainThread());
+    innerViews.sweep(runtimeFromAnyThread());
 }
 
 void
 JSCompartment::sweepTypeObjectTables()
 {
     sweepNewTypeObjectTable(newTypeObjects);
     sweepNewTypeObjectTable(lazyTypeObjects);
 }
 
 void
 JSCompartment::sweepSavedStacks()
 {
-    savedStacks_.sweep(runtimeFromMainThread());
+    savedStacks_.sweep(runtimeFromAnyThread());
 }
 
 void
 JSCompartment::sweepGlobalObject(FreeOp *fop)
 {
-    if (global_ && IsObjectAboutToBeFinalized(global_.unsafeGet())) {
+    if (global_.unbarrieredGet() && IsObjectAboutToBeFinalizedFromAnyThread(global_.unsafeGet())) {
         if (debugMode())
             Debugger::detachAllDebuggersFromGlobal(fop, global_);
         global_.set(nullptr);
     }
 }
 
 void
 JSCompartment::sweepSelfHostingScriptSource()
 {
-    if (selfHostingScriptSource &&
-        IsObjectAboutToBeFinalized((JSObject **) selfHostingScriptSource.unsafeGet()))
+    if (selfHostingScriptSource.unbarrieredGet() &&
+        IsObjectAboutToBeFinalizedFromAnyThread((JSObject **) selfHostingScriptSource.unsafeGet()))
     {
         selfHostingScriptSource.set(nullptr);
     }
 }
 
 void
 JSCompartment::sweepJitCompartment(FreeOp *fop)
 {
@@ -588,23 +588,23 @@ JSCompartment::sweepJitCompartment(FreeO
 void
 JSCompartment::sweepRegExps()
 {
     /*
      * JIT code increments activeWarmUpCounter for any RegExpShared used by jit
      * code for the lifetime of the JIT script. Thus, we must perform
      * sweeping after clearing jit code.
      */
-    regExps.sweep(runtimeFromMainThread());
+    regExps.sweep(runtimeFromAnyThread());
 }
 
 void
 JSCompartment::sweepDebugScopes()
 {
-    JSRuntime *rt = runtimeFromMainThread();
+    JSRuntime *rt = runtimeFromAnyThread();
     if (debugScopes)
         debugScopes->sweep(rt);
 }
 
 void
 JSCompartment::sweepWeakMaps()
 {
     /* Finalize unreachable (key,value) pairs in all weak maps. */
@@ -614,36 +614,36 @@ JSCompartment::sweepWeakMaps()
 void
 JSCompartment::sweepNativeIterators()
 {
     /* Sweep list of native iterators. */
     NativeIterator *ni = enumerators->next();
     while (ni != enumerators) {
         JSObject *iterObj = ni->iterObj();
         NativeIterator *next = ni->next();
-        if (gc::IsObjectAboutToBeFinalized(&iterObj))
+        if (gc::IsObjectAboutToBeFinalizedFromAnyThread(&iterObj))
             ni->unlink();
         ni = next;
     }
 }
 
 /*
  * Remove dead wrappers from the table. We must sweep all compartments, since
  * string entries in the crossCompartmentWrappers table are not marked during
  * markCrossCompartmentWrappers.
  */
 void
 JSCompartment::sweepCrossCompartmentWrappers()
 {
     /* Remove dead wrappers from the table. */
     for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
         CrossCompartmentKey key = e.front().key();
-        bool keyDying = IsCellAboutToBeFinalized(&key.wrapped);
-        bool valDying = IsValueAboutToBeFinalized(e.front().value().unsafeGet());
-        bool dbgDying = key.debugger && IsObjectAboutToBeFinalized(&key.debugger);
+        bool keyDying = IsCellAboutToBeFinalizedFromAnyThread(&key.wrapped);
+        bool valDying = IsValueAboutToBeFinalizedFromAnyThread(e.front().value().unsafeGet());
+        bool dbgDying = key.debugger && IsObjectAboutToBeFinalizedFromAnyThread(&key.debugger);
         if (keyDying || valDying || dbgDying) {
             MOZ_ASSERT(key.kind != CrossCompartmentKey::StringWrapper);
             e.removeFront();
         } else if (key.wrapped != e.front().key().wrapped ||
                    key.debugger != e.front().key().debugger)
         {
             e.rekeyFront(key);
         }
--- a/js/src/jscompartment.h
+++ b/js/src/jscompartment.h
@@ -249,16 +249,17 @@ struct JSCompartment
     /* Set of initial shapes in the compartment. */
     js::InitialShapeSet          initialShapes;
     void sweepInitialShapeTable();
 
     /* Set of default 'new' or lazy types in the compartment. */
     js::types::TypeObjectWithNewScriptSet newTypeObjects;
     js::types::TypeObjectWithNewScriptSet lazyTypeObjects;
     void sweepNewTypeObjectTable(js::types::TypeObjectWithNewScriptSet &table);
+
 #ifdef JSGC_HASH_TABLE_CHECKS
     void checkTypeObjectTablesAfterMovingGC();
     void checkTypeObjectTableAfterMovingGC(js::types::TypeObjectWithNewScriptSet &table);
     void checkInitialShapesTableAfterMovingGC();
     void checkWrapperMapAfterMovingGC();
 #endif
 
     /*
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -3250,17 +3250,19 @@ GCHelperState::startBackgroundThread(Sta
     HelperThreadState().notifyAll(GlobalHelperThreadState::PRODUCER);
 }
 
 void
 GCHelperState::waitForBackgroundThread()
 {
     MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
 
+#ifdef DEBUG
     rt->gc.lockOwner = nullptr;
+#endif
     PR_WaitCondVar(done, PR_INTERVAL_NO_TIMEOUT);
 #ifdef DEBUG
     rt->gc.lockOwner = PR_GetCurrentThread();
 #endif
 }
 
 void
 GCHelperState::work()
@@ -4605,16 +4607,107 @@ GCRuntime::endMarkingZoneGroup()
     for (GCZoneGroupIter zone(rt); !zone.done(); zone.next()) {
         MOZ_ASSERT(zone->isGCMarkingGray());
         zone->setGCState(Zone::Mark);
     }
     MOZ_ASSERT(marker.isDrained());
     marker.setMarkColorBlack();
 }
 
+#define MAKE_GC_PARALLEL_TASK(name) \
+    class name : public GCParallelTask {\
+        JSRuntime *runtime;\
+        virtual void run() MOZ_OVERRIDE;\
+      public:\
+        name (JSRuntime *rt) : runtime(rt) {}\
+    }
+MAKE_GC_PARALLEL_TASK(SweepAtomsTask);
+MAKE_GC_PARALLEL_TASK(SweepInnerViewsTask);
+MAKE_GC_PARALLEL_TASK(SweepCCWrappersTask);
+MAKE_GC_PARALLEL_TASK(SweepBaseShapesTask);
+MAKE_GC_PARALLEL_TASK(SweepInitialShapesTask);
+MAKE_GC_PARALLEL_TASK(SweepTypeObjectsTask);
+MAKE_GC_PARALLEL_TASK(SweepRegExpsTask);
+MAKE_GC_PARALLEL_TASK(SweepMiscTask);
+
+/* virtual */ void
+SweepAtomsTask::run()
+{
+    runtime->sweepAtoms();
+}
+
+/* virtual */ void
+SweepInnerViewsTask::run()
+{
+    for (GCCompartmentGroupIter c(runtime); !c.done(); c.next())
+        c->sweepInnerViews();
+}
+
+/* virtual */ void
+SweepCCWrappersTask::run()
+{
+    for (GCCompartmentGroupIter c(runtime); !c.done(); c.next())
+        c->sweepCrossCompartmentWrappers();
+}
+
+/* virtual */ void
+SweepBaseShapesTask::run()
+{
+    for (GCCompartmentGroupIter c(runtime); !c.done(); c.next())
+        c->sweepBaseShapeTable();
+}
+
+/* virtual */ void
+SweepInitialShapesTask::run()
+{
+    for (GCCompartmentGroupIter c(runtime); !c.done(); c.next())
+        c->sweepInitialShapeTable();
+}
+
+/* virtual */ void
+SweepTypeObjectsTask::run()
+{
+    for (GCCompartmentGroupIter c(runtime); !c.done(); c.next())
+        c->sweepTypeObjectTables();
+}
+
+/* virtual */ void
+SweepRegExpsTask::run()
+{
+    for (GCCompartmentGroupIter c(runtime); !c.done(); c.next())
+        c->sweepRegExps();
+}
+
+/* virtual */ void
+SweepMiscTask::run()
+{
+    for (GCCompartmentGroupIter c(runtime); !c.done(); c.next()) {
+        c->sweepCallsiteClones();
+        c->sweepSavedStacks();
+        c->sweepSelfHostingScriptSource();
+        c->sweepNativeIterators();
+    }
+}
+
+void
+GCRuntime::startTask(GCParallelTask &task, gcstats::Phase phase)
+{
+    if (!task.startWithLockHeld()) {
+        gcstats::AutoPhase ap(stats, phase);
+        task.runFromMainThread(rt);
+    }
+}
+
+void
+GCRuntime::joinTask(GCParallelTask &task, gcstats::Phase phase)
+{
+    gcstats::AutoPhase ap(stats, task, phase);
+    task.joinWithLockHeld();
+}
+
 void
 GCRuntime::beginSweepingZoneGroup()
 {
     /*
      * Begin sweeping the group of zones in gcCurrentZoneGroup,
      * performing actions that must be done before yielding to caller.
      */
 
@@ -4634,85 +4727,61 @@ GCRuntime::beginSweepingZoneGroup()
             rt->sweepZoneCallback(zone);
 
         zone->gcLastZoneGroupIndex = zoneGroupIndex;
     }
 
     validateIncrementalMarking();
 
     FreeOp fop(rt);
+    SweepAtomsTask sweepAtomsTask(rt);
+    SweepInnerViewsTask sweepInnerViewsTask(rt);
+    SweepCCWrappersTask sweepCCWrappersTask(rt);
+    SweepBaseShapesTask sweepBaseShapesTask(rt);
+    SweepInitialShapesTask sweepInitialShapesTask(rt);
+    SweepTypeObjectsTask sweepTypeObjectsTask(rt);
+    SweepRegExpsTask sweepRegExpsTask(rt);
+    SweepMiscTask sweepMiscTask(rt);
 
     {
         gcstats::AutoPhase ap(stats, gcstats::PHASE_FINALIZE_START);
         callFinalizeCallbacks(&fop, JSFINALIZE_GROUP_START);
         callWeakPointerCallbacks();
     }
 
     if (sweepingAtoms) {
-        gcstats::AutoPhase ap(stats, gcstats::PHASE_SWEEP_ATOMS);
-        rt->sweepAtoms();
+        AutoLockHelperThreadState helperLock;
+        startTask(sweepAtomsTask, gcstats::PHASE_SWEEP_ATOMS);
     }
 
     {
         gcstats::AutoPhase ap(stats, gcstats::PHASE_SWEEP_COMPARTMENTS);
         gcstats::AutoSCC scc(stats, zoneGroupIndex);
 
         {
-            gcstats::AutoPhase apiv(stats, gcstats::PHASE_SWEEP_INNER_VIEWS);
-            for (GCCompartmentGroupIter c(rt); !c.done(); c.next()) {
-                c->sweepInnerViews();
-            }
+            AutoLockHelperThreadState helperLock;
+            startTask(sweepInnerViewsTask, gcstats::PHASE_SWEEP_INNER_VIEWS);
+            startTask(sweepCCWrappersTask, gcstats::PHASE_SWEEP_CC_WRAPPER);
+            startTask(sweepBaseShapesTask, gcstats::PHASE_SWEEP_BASE_SHAPE);
+            startTask(sweepInitialShapesTask, gcstats::PHASE_SWEEP_INITIAL_SHAPE);
+            startTask(sweepTypeObjectsTask, gcstats::PHASE_SWEEP_TYPE_OBJECT);
+            startTask(sweepRegExpsTask, gcstats::PHASE_SWEEP_REGEXP);
+            startTask(sweepMiscTask, gcstats::PHASE_SWEEP_MISC);
         }
 
+        // The remainder of the of the tasks run in parallel on the main
+        // thread until we join, below.
         {
-            gcstats::AutoPhase apccw(stats, gcstats::PHASE_SWEEP_CC_WRAPPER);
-            for (GCCompartmentGroupIter c(rt); !c.done(); c.next()) {
-                c->sweepCrossCompartmentWrappers();
-            }
-        }
-
-        {
-            gcstats::AutoPhase apbs(stats, gcstats::PHASE_SWEEP_BASE_SHAPE);
-            for (GCCompartmentGroupIter c(rt); !c.done(); c.next()) {
-                c->sweepBaseShapeTable();
-            }
-        }
-
-        {
-            gcstats::AutoPhase apis(stats, gcstats::PHASE_SWEEP_INITIAL_SHAPE);
+            gcstats::AutoPhase ap(stats, gcstats::PHASE_SWEEP_MISC);
+
             for (GCCompartmentGroupIter c(rt); !c.done(); c.next()) {
-                c->sweepInitialShapeTable();
-            }
-        }
-
-        {
-            gcstats::AutoPhase apto(stats, gcstats::PHASE_SWEEP_TYPE_OBJECT);
-            for (GCCompartmentGroupIter c(rt); !c.done(); c.next()) {
-                c->sweepTypeObjectTables();
-            }
-        }
-
-        {
-            gcstats::AutoPhase apre(stats, gcstats::PHASE_SWEEP_REGEXP);
-            for (GCCompartmentGroupIter c(rt); !c.done(); c.next()) {
-                c->sweepRegExps();
-            }
-        }
-
-        {
-            gcstats::AutoPhase apmisc(stats, gcstats::PHASE_SWEEP_MISC);
-            for (GCCompartmentGroupIter c(rt); !c.done(); c.next()) {
-                c->sweepCallsiteClones();
-                c->sweepSavedStacks();
                 c->sweepGlobalObject(&fop);
-                c->sweepSelfHostingScriptSource();
                 c->sweepDebugScopes();
                 c->sweepJitCompartment(&fop);
                 c->sweepWeakMaps();
-                c->sweepNativeIterators();
             }
 
             // Bug 1071218: the following two methods have not yet been
             // refactored to work on a single zone-group at once.
 
             // Collect watch points associated with unreachable objects.
             WatchpointMap::sweepAll(rt);
 
@@ -4742,16 +4811,36 @@ GCRuntime::beginSweepingZoneGroup()
         }
     }
 
     if (sweepingAtoms) {
         gcstats::AutoPhase ap(stats, gcstats::PHASE_SWEEP_SYMBOL_REGISTRY);
         rt->symbolRegistry().sweep();
     }
 
+    // Rejoin our off-main-thread tasks.
+    if (sweepingAtoms) {
+        AutoLockHelperThreadState helperLock;
+        joinTask(sweepAtomsTask, gcstats::PHASE_SWEEP_ATOMS);
+    }
+
+    {
+        gcstats::AutoPhase ap(stats, gcstats::PHASE_SWEEP_COMPARTMENTS);
+        gcstats::AutoSCC scc(stats, zoneGroupIndex);
+
+        AutoLockHelperThreadState helperLock;
+        joinTask(sweepInnerViewsTask, gcstats::PHASE_SWEEP_INNER_VIEWS);
+        joinTask(sweepCCWrappersTask, gcstats::PHASE_SWEEP_CC_WRAPPER);
+        joinTask(sweepBaseShapesTask, gcstats::PHASE_SWEEP_BASE_SHAPE);
+        joinTask(sweepInitialShapesTask, gcstats::PHASE_SWEEP_INITIAL_SHAPE);
+        joinTask(sweepTypeObjectsTask, gcstats::PHASE_SWEEP_TYPE_OBJECT);
+        joinTask(sweepRegExpsTask, gcstats::PHASE_SWEEP_REGEXP);
+        joinTask(sweepMiscTask, gcstats::PHASE_SWEEP_MISC);
+    }
+
     /*
      * Queue all GC things in all zones for sweeping, either in the
      * foreground or on the background thread.
      *
      * Note that order is important here for the background case.
      *
      * Objects are finalized immediately but this may change in the future.
      */
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -1065,16 +1065,57 @@ class GCHelperState
     }
 
     bool shouldShrink() const {
         MOZ_ASSERT(isBackgroundSweeping());
         return shrinkFlag;
     }
 };
 
+// A generic task used to dispatch work to the helper thread system.
+// Users should derive from GCParallelTask add what data they need and
+// override |run|.
+class GCParallelTask
+{
+    // The state of the parallel computation.
+    enum TaskState {
+        NotStarted,
+        Dispatched,
+        Finished,
+    } state;
+
+    // Amount of time this task took to execute.
+    uint64_t duration_;
+
+  protected:
+    virtual void run() = 0;
+
+  public:
+    GCParallelTask() : state(NotStarted), duration_(0) {}
+
+    int64_t duration() const { return duration_; }
+
+    // The simple interface to a parallel task works exactly like pthreads.
+    bool start();
+    void join();
+
+    // If multiple tasks are to be started or joined at once, it is more
+    // efficient to take the helper thread lock once and use these methods.
+    bool startWithLockHeld();
+    void joinWithLockHeld();
+
+    // Instead of dispatching to a helper, run the task on the main thread.
+    void runFromMainThread(JSRuntime *rt);
+
+    // This should be friended to HelperThread, but cannot be because it
+    // would introduce several circular dependencies.
+  public:
+    void runFromHelperThread();
+};
+
 struct GCChunkHasher {
     typedef gc::Chunk *Lookup;
 
     /*
      * Strip zeros for better distribution after multiplying by the golden
      * ratio.
      */
     static HashNumber hash(gc::Chunk *chunk) {
--- a/js/src/jsinfer.cpp
+++ b/js/src/jsinfer.cpp
@@ -4779,27 +4779,27 @@ TypeCompartment::sweep(FreeOp *fop)
                 e.rekeyFront(key);
         }
     }
 }
 
 void
 JSCompartment::sweepNewTypeObjectTable(TypeObjectWithNewScriptSet &table)
 {
-    MOZ_ASSERT(zone()->isCollecting());
+    MOZ_ASSERT(zone()->runtimeFromAnyThread()->isHeapCollecting());
     if (table.initialized()) {
         for (TypeObjectWithNewScriptSet::Enum e(table); !e.empty(); e.popFront()) {
             TypeObjectWithNewScriptEntry entry = e.front();
-            if (IsTypeObjectAboutToBeFinalized(entry.object.unsafeGet()) ||
-                (entry.newFunction && IsObjectAboutToBeFinalized(&entry.newFunction)))
+            if (IsTypeObjectAboutToBeFinalizedFromAnyThread(entry.object.unsafeGet()) ||
+                (entry.newFunction && IsObjectAboutToBeFinalizedFromAnyThread(&entry.newFunction)))
             {
                 e.removeFront();
             } else {
                 /* Any rekeying necessary is handled by fixupNewTypeObjectTable() below. */
-                MOZ_ASSERT(entry.object == e.front().object);
+                MOZ_ASSERT(entry.object.unbarrieredGet() == e.front().object.unbarrieredGet());
                 MOZ_ASSERT(entry.newFunction == e.front().newFunction);
             }
         }
     }
 }
 
 #ifdef JSGC_COMPACTING
 
--- a/js/src/vm/ArrayBufferObject.cpp
+++ b/js/src/vm/ArrayBufferObject.cpp
@@ -883,22 +883,22 @@ InnerViewTable::removeViews(ArrayBufferO
     MOZ_ASSERT(p);
 
     map.remove(p);
 }
 
 bool
 InnerViewTable::sweepEntry(JSObject **pkey, ViewVector &views)
 {
-    if (IsObjectAboutToBeFinalized(pkey))
+    if (IsObjectAboutToBeFinalizedFromAnyThread(pkey))
         return true;
 
     MOZ_ASSERT(!views.empty());
     for (size_t i = 0; i < views.length(); i++) {
-        if (IsObjectAboutToBeFinalized(&views[i])) {
+        if (IsObjectAboutToBeFinalizedFromAnyThread(&views[i])) {
             views[i--] = views.back();
             views.popBack();
         }
     }
 
     return views.empty();
 }
 
--- a/js/src/vm/HelperThreads.cpp
+++ b/js/src/vm/HelperThreads.cpp
@@ -709,16 +709,111 @@ GlobalHelperThreadState::canStartCompres
 }
 
 bool
 GlobalHelperThreadState::canStartGCHelperTask()
 {
     return !gcHelperWorklist().empty();
 }
 
+bool
+GlobalHelperThreadState::canStartGCParallelTask()
+{
+    return !gcParallelWorklist().empty();
+}
+
+bool
+js::GCParallelTask::startWithLockHeld()
+{
+    MOZ_ASSERT(HelperThreadState().isLocked());
+
+    // Tasks cannot be started twice.
+    MOZ_ASSERT(state == NotStarted);
+
+    // If we do the shutdown GC before running anything, we may never
+    // have initialized the helper threads. Just use the serial path
+    // since we cannot safely intialize them at this point.
+    if (!HelperThreadState().threads)
+        return false;
+
+    if (!HelperThreadState().gcParallelWorklist().append(this))
+        return false;
+    state = Dispatched;
+
+    HelperThreadState().notifyOne(GlobalHelperThreadState::PRODUCER);
+
+    return true;
+}
+
+bool
+js::GCParallelTask::start()
+{
+    AutoLockHelperThreadState helperLock;
+    return startWithLockHeld();
+}
+
+void
+js::GCParallelTask::joinWithLockHeld()
+{
+    MOZ_ASSERT(HelperThreadState().isLocked());
+
+    if (state == NotStarted)
+        return;
+
+    while (state != Finished)
+        HelperThreadState().wait(GlobalHelperThreadState::CONSUMER);
+    state = NotStarted;
+}
+
+void
+js::GCParallelTask::join()
+{
+    AutoLockHelperThreadState helperLock;
+    joinWithLockHeld();
+}
+
+void
+js::GCParallelTask::runFromMainThread(JSRuntime *rt)
+{
+    MOZ_ASSERT(state == NotStarted);
+    MOZ_ASSERT(js::CurrentThreadCanAccessRuntime(rt));
+    uint64_t timeStart = PRMJ_Now();
+    run();
+    duration_ = PRMJ_Now() - timeStart;
+}
+
+void
+js::GCParallelTask::runFromHelperThread()
+{
+    MOZ_ASSERT(HelperThreadState().isLocked());
+
+    {
+        AutoUnlockHelperThreadState parallelSection;
+        uint64_t timeStart = PRMJ_Now();
+        run();
+        duration_ = PRMJ_Now() - timeStart;
+    }
+
+    state = Finished;
+    HelperThreadState().notifyAll(GlobalHelperThreadState::CONSUMER);
+}
+
+void
+HelperThread::handleGCParallelWorkload()
+{
+    MOZ_ASSERT(HelperThreadState().isLocked());
+    MOZ_ASSERT(HelperThreadState().canStartGCParallelTask());
+    MOZ_ASSERT(idle());
+
+    MOZ_ASSERT(!gcParallelTask);
+    gcParallelTask = HelperThreadState().gcParallelWorklist().popCopy();
+    gcParallelTask->runFromHelperThread();
+    gcParallelTask = nullptr;
+}
+
 static void
 LeaveParseTaskZone(JSRuntime *rt, ParseTask *task)
 {
     // Mark the zone as no longer in use by an ExclusiveContext, and available
     // to be collected by the GC.
     task->cx->leaveCompartment(task->cx->compartment());
     rt->clearUsedByExclusiveThread(task->cx->zone());
 }
@@ -1232,17 +1327,18 @@ HelperThread::threadLoop()
         bool ionCompile = false;
         while (true) {
             if (terminate)
                 return;
             if (HelperThreadState().canStartAsmJSCompile() ||
                 (ionCompile = HelperThreadState().pendingIonCompileHasSufficientPriority()) ||
                 HelperThreadState().canStartParseTask() ||
                 HelperThreadState().canStartCompressionTask() ||
-                HelperThreadState().canStartGCHelperTask())
+                HelperThreadState().canStartGCHelperTask() ||
+                HelperThreadState().canStartGCParallelTask())
             {
                 break;
             }
             HelperThreadState().wait(GlobalHelperThreadState::PRODUCER);
         }
 
         // Dispatch tasks, prioritizing AsmJS work.
         if (HelperThreadState().canStartAsmJSCompile())
@@ -1250,12 +1346,14 @@ HelperThread::threadLoop()
         else if (ionCompile)
             handleIonWorkload();
         else if (HelperThreadState().canStartParseTask())
             handleParseWorkload();
         else if (HelperThreadState().canStartCompressionTask())
             handleCompressionWorkload();
         else if (HelperThreadState().canStartGCHelperTask())
             handleGCHelperWorkload();
+        else if (HelperThreadState().canStartGCParallelTask())
+            handleGCParallelWorkload();
         else
             MOZ_CRASH("No task to perform");
     }
 }
--- a/js/src/vm/HelperThreads.h
+++ b/js/src/vm/HelperThreads.h
@@ -42,16 +42,17 @@ class GlobalHelperThreadState
     // Number of threads to create. May be accessed without locking.
     size_t threadCount;
 
     typedef Vector<jit::IonBuilder*, 0, SystemAllocPolicy> IonBuilderVector;
     typedef Vector<AsmJSParallelTask*, 0, SystemAllocPolicy> AsmJSParallelTaskVector;
     typedef Vector<ParseTask*, 0, SystemAllocPolicy> ParseTaskVector;
     typedef Vector<SourceCompressionTask*, 0, SystemAllocPolicy> SourceCompressionTaskVector;
     typedef Vector<GCHelperState *, 0, SystemAllocPolicy> GCHelperStateVector;
+    typedef Vector<GCParallelTask *, 0, SystemAllocPolicy> GCParallelTaskVector;
     typedef mozilla::LinkedList<jit::IonBuilder> IonBuilderList;
 
     // List of available threads, or null if the thread state has not been initialized.
     HelperThread *threads;
 
   private:
     // The lists below are all protected by |lock|.
 
@@ -82,16 +83,19 @@ class GlobalHelperThreadState
     ParseTaskVector parseWaitingOnGC_;
 
     // Source compression worklist.
     SourceCompressionTaskVector compressionWorklist_;
 
     // Runtimes which have sweeping / allocating work to do.
     GCHelperStateVector gcHelperWorklist_;
 
+    // GC tasks needing to be done in parallel.
+    GCParallelTaskVector gcParallelWorklist_;
+
   public:
     size_t maxIonCompilationThreads() const {
         return 1;
     }
     size_t maxAsmJSCompilationThreads() const {
         if (cpuCount < 2)
             return 2;
         return cpuCount;
@@ -173,21 +177,27 @@ class GlobalHelperThreadState
         return compressionWorklist_;
     }
 
     GCHelperStateVector &gcHelperWorklist() {
         MOZ_ASSERT(isLocked());
         return gcHelperWorklist_;
     }
 
+    GCParallelTaskVector &gcParallelWorklist() {
+        MOZ_ASSERT(isLocked());
+        return gcParallelWorklist_;
+    }
+
     bool canStartAsmJSCompile();
     bool canStartIonCompile();
     bool canStartParseTask();
     bool canStartCompressionTask();
     bool canStartGCHelperTask();
+    bool canStartGCParallelTask();
 
     // Unlike the methods above, the value returned by this method can change
     // over time, even if the helper thread state lock is held throughout.
     bool pendingIonCompileHasSufficientPriority();
 
     jit::IonBuilder *highestPriorityPendingIonCompile(bool remove = false);
     HelperThread *lowestPriorityUnpausedIonCompileAtThreshold();
     HelperThread *highestPriorityPausedIonCompile();
@@ -294,27 +304,36 @@ struct HelperThread
     ParseTask *parseTask;
 
     /* Any source being compressed on this thread. */
     SourceCompressionTask *compressionTask;
 
     /* Any GC state for background sweeping or allocating being performed. */
     GCHelperState *gcHelperState;
 
+    /* State required to perform a GC parallel task. */
+    GCParallelTask *gcParallelTask;
+
     bool idle() const {
-        return !ionBuilder && !asmData && !parseTask && !compressionTask && !gcHelperState;
+        return !ionBuilder &&
+               !asmData &&
+               !parseTask &&
+               !compressionTask &&
+               !gcHelperState &&
+               !gcParallelTask;
     }
 
     void destroy();
 
     void handleAsmJSWorkload();
     void handleIonWorkload();
     void handleParseWorkload();
     void handleCompressionWorkload();
     void handleGCHelperWorkload();
+    void handleGCParallelWorkload();
 
     static void ThreadMain(void *arg);
     void threadLoop();
 };
 
 /* Methods for interacting with helper threads. */
 
 // Initialize helper threads unless already initialized.
--- a/js/src/vm/RegExpObject.cpp
+++ b/js/src/vm/RegExpObject.cpp
@@ -766,35 +766,36 @@ RegExpCompartment::sweep(JSRuntime *rt)
         // compartment being subsequently cleared. This can happen if a GC is
         // restarted while in progress (i.e. performing a full GC in the
         // middle of an incremental GC) or if a RegExpShared referenced via the
         // stack is traced but is not in a zone being collected.
         //
         // Because of this we only treat the marked_ bit as a hint, and destroy
         // the RegExpShared if it was accidentally marked earlier but wasn't
         // marked by the current trace.
-        bool keep = shared->marked() && !IsStringAboutToBeFinalized(shared->source.unsafeGet());
+        bool keep = shared->marked() &&
+                    !IsStringAboutToBeFinalizedFromAnyThread(shared->source.unsafeGet());
         for (size_t i = 0; i < ArrayLength(shared->compilationArray); i++) {
             RegExpShared::RegExpCompilation &compilation = shared->compilationArray[i];
             if (compilation.jitCode &&
-                IsJitCodeAboutToBeFinalized(compilation.jitCode.unsafeGet()))
+                IsJitCodeAboutToBeFinalizedFromAnyThread(compilation.jitCode.unsafeGet()))
             {
                 keep = false;
             }
         }
         if (keep || rt->isHeapCompacting()) {
             shared->clearMarked();
         } else {
             js_delete(shared);
             e.removeFront();
         }
     }
 
     if (matchResultTemplateObject_ &&
-        IsObjectAboutToBeFinalized(matchResultTemplateObject_.unsafeGet()))
+        IsObjectAboutToBeFinalizedFromAnyThread(matchResultTemplateObject_.unsafeGet()))
     {
         matchResultTemplateObject_.set(nullptr);
     }
 }
 
 bool
 RegExpCompartment::get(JSContext *cx, JSAtom *source, RegExpFlag flags, RegExpGuard *g)
 {
--- a/js/src/vm/SavedStacks.cpp
+++ b/js/src/vm/SavedStacks.cpp
@@ -426,20 +426,20 @@ SavedStacks::saveCurrentStack(JSContext 
     return insertFrames(cx, iter, frame, maxFrameCount);
 }
 
 void
 SavedStacks::sweep(JSRuntime *rt)
 {
     if (frames.initialized()) {
         for (SavedFrame::Set::Enum e(frames); !e.empty(); e.popFront()) {
-            JSObject *obj = static_cast<JSObject *>(e.front());
+            JSObject *obj = e.front().unbarrieredGet();
             JSObject *temp = obj;
 
-            if (IsObjectAboutToBeFinalized(&obj)) {
+            if (IsObjectAboutToBeFinalizedFromAnyThread(&obj)) {
                 e.removeFront();
             } else {
                 SavedFrame *frame = &obj->as<SavedFrame>();
                 bool parentMoved = frame->parentMoved();
 
                 if (parentMoved) {
                     frame->updatePrivateParent();
                 }
@@ -454,17 +454,19 @@ SavedStacks::sweep(JSRuntime *rt)
                                  ReadBarriered<SavedFrame *>(frame));
                 }
             }
         }
     }
 
     sweepPCLocationMap();
 
-    if (savedFrameProto && IsObjectAboutToBeFinalized(savedFrameProto.unsafeGet())) {
+    if (savedFrameProto.unbarrieredGet() &&
+        IsObjectAboutToBeFinalizedFromAnyThread(savedFrameProto.unsafeGet()))
+    {
         savedFrameProto.set(nullptr);
     }
 }
 
 void
 SavedStacks::trace(JSTracer *trc)
 {
     if (!pcLocationMap.initialized())
@@ -645,17 +647,17 @@ SavedStacks::createFrameFromLookup(JSCon
  * Remove entries from the table whose JSScript is being collected.
  */
 void
 SavedStacks::sweepPCLocationMap()
 {
     for (PCLocationMap::Enum e(pcLocationMap); !e.empty(); e.popFront()) {
         PCKey key = e.front().key();
         JSScript *script = key.script.get();
-        if (IsScriptAboutToBeFinalized(&script)) {
+        if (IsScriptAboutToBeFinalizedFromAnyThread(&script)) {
             e.removeFront();
         } else if (script != key.script.get()) {
             key.script = script;
             e.rekeyFront(key);
         }
     }
 }
 
--- a/js/src/vm/ScopeObject.cpp
+++ b/js/src/vm/ScopeObject.cpp
@@ -1246,19 +1246,19 @@ ScopeIterKey::match(ScopeIterKey si1, Sc
              si1.staticScope_ == si2.staticScope_ &&
              si1.type_  == si2.type_));
 }
 
 void
 ScopeIterVal::sweep()
 {
     /* We need to update possibly moved pointers on sweep. */
-    MOZ_ALWAYS_FALSE(IsObjectAboutToBeFinalized(cur_.unsafeGet()));
+    MOZ_ALWAYS_FALSE(IsObjectAboutToBeFinalizedFromAnyThread(cur_.unsafeGet()));
     if (staticScope_)
-        MOZ_ALWAYS_FALSE(IsObjectAboutToBeFinalized(staticScope_.unsafeGet()));
+        MOZ_ALWAYS_FALSE(IsObjectAboutToBeFinalizedFromAnyThread(staticScope_.unsafeGet()));
 }
 
 // Live ScopeIter values may be added to DebugScopes::liveScopes, as
 // ScopeIterVal instances.  They need to have write barriers when they are added
 // to the hash table, but no barriers when rehashing inside GC.  It's a nasty
 // hack, but the important thing is that ScopeIterKey and ScopeIterVal need to
 // alias each other.
 void ScopeIterVal::staticAsserts() {
@@ -1947,17 +1947,17 @@ void
 DebugScopes::sweep(JSRuntime *rt)
 {
     /*
      * missingScopes points to debug scopes weakly so that debug scopes can be
      * released more eagerly.
      */
     for (MissingScopeMap::Enum e(missingScopes); !e.empty(); e.popFront()) {
         DebugScopeObject **debugScope = e.front().value().unsafeGet();
-        if (IsObjectAboutToBeFinalized(debugScope)) {
+        if (IsObjectAboutToBeFinalizedFromAnyThread(debugScope)) {
             /*
              * Note that onPopCall and onPopBlock rely on missingScopes to find
              * scope objects that we synthesized for the debugger's sake, and
              * clean up the synthetic scope objects' entries in liveScopes. So
              * if we remove an entry frcom missingScopes here, we must also
              * remove the corresponding liveScopes entry.
              *
              * Since the DebugScopeObject is the only thing using its scope
@@ -1992,17 +1992,17 @@ DebugScopes::sweep(JSRuntime *rt)
         ScopeObject *scope = e.front().key();
 
         e.front().value().sweep();
 
         /*
          * Scopes can be finalized when a debugger-synthesized ScopeObject is
          * no longer reachable via its DebugScopeObject.
          */
-        if (IsObjectAboutToBeFinalized(&scope))
+        if (IsObjectAboutToBeFinalizedFromAnyThread(&scope))
             e.removeFront();
         else if (scope != e.front().key())
             e.rekeyFront(scope);
     }
 }
 
 #ifdef JSGC_HASH_TABLE_CHECKS
 void
--- a/js/src/vm/Shape.cpp
+++ b/js/src/vm/Shape.cpp
@@ -1525,19 +1525,19 @@ BaseShape::assertConsistency()
 }
 
 void
 JSCompartment::sweepBaseShapeTable()
 {
     if (baseShapes.initialized()) {
         for (BaseShapeSet::Enum e(baseShapes); !e.empty(); e.popFront()) {
             UnownedBaseShape *base = e.front().unbarrieredGet();
-            if (IsBaseShapeAboutToBeFinalized(&base)) {
+            if (IsBaseShapeAboutToBeFinalizedFromAnyThread(&base)) {
                 e.removeFront();
-            } else if (base != e.front()) {
+            } else if (base != e.front().unbarrieredGet()) {
                 StackBaseShape sbase(base);
                 ReadBarriered<UnownedBaseShape *> b(base);
                 e.rekeyFront(&sbase, b);
             }
         }
     }
 }
 
@@ -1821,24 +1821,24 @@ EmptyShape::insertInitialShape(Exclusive
 void
 JSCompartment::sweepInitialShapeTable()
 {
     if (initialShapes.initialized()) {
         for (InitialShapeSet::Enum e(initialShapes); !e.empty(); e.popFront()) {
             const InitialShapeEntry &entry = e.front();
             Shape *shape = entry.shape.unbarrieredGet();
             JSObject *proto = entry.proto.raw();
-            if (IsShapeAboutToBeFinalized(&shape) ||
-                (entry.proto.isObject() && IsObjectAboutToBeFinalized(&proto)))
+            if (IsShapeAboutToBeFinalizedFromAnyThread(&shape) ||
+                (entry.proto.isObject() && IsObjectAboutToBeFinalizedFromAnyThread(&proto)))
             {
                 e.removeFront();
             } else {
 #ifdef DEBUG
                 DebugOnly<JSObject *> parent = shape->getObjectParent();
-                MOZ_ASSERT(!parent || !IsObjectAboutToBeFinalized(&parent));
+                MOZ_ASSERT(!parent || !IsObjectAboutToBeFinalizedFromAnyThread(&parent));
                 MOZ_ASSERT(parent == shape->getObjectParent());
 #endif
                 if (shape != entry.shape.unbarrieredGet() || proto != entry.proto.raw()) {
                     ReadBarrieredShape readBarrieredShape(shape);
                     InitialShapeEntry newKey(readBarrieredShape, TaggedProto(proto));
                     e.rekeyFront(newKey.getLookup(), newKey);
                 }
             }