Bug 1313098 - Assert we only access heap state on the main thread r=sfink r=mccr8
authorJon Coppeard <jcoppeard@mozilla.com>
Fri, 04 Nov 2016 17:32:36 +0000
changeset 347737 0f246bb6108c125377deb37e91d473c998ced132
parent 347736 2497845a72a1e5672620b358b93cee51af1cf0a3
child 347738 a5d8d7e811c1cfdb813560df4265d224284c4fb0
push id10298
push userraliiev@mozilla.com
push dateMon, 14 Nov 2016 12:33:03 +0000
treeherdermozilla-aurora@7e29173b1641 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerssfink, mccr8
bugs1313098
milestone52.0a1
Bug 1313098 - Assert we only access heap state on the main thread r=sfink r=mccr8
dom/plugins/base/nsJSNPRuntime.cpp
js/public/GCAPI.h
js/public/HeapAPI.h
js/src/gc/Allocator.cpp
js/src/gc/GCRuntime.h
js/src/gc/Heap.h
js/src/gc/Iteration.cpp
js/src/gc/Nursery.cpp
js/src/jit/JitcodeMap.cpp
js/src/jsgc.cpp
js/src/jsgc.h
js/src/jsgcinlines.h
js/src/jspubtd.h
js/src/vm/HelperThreads.cpp
js/src/vm/Runtime.cpp
js/src/vm/Runtime.h
js/src/vm/TypeInference.cpp
js/xpconnect/src/XPCWrappedNativeProto.cpp
--- a/dom/plugins/base/nsJSNPRuntime.cpp
+++ b/dom/plugins/base/nsJSNPRuntime.cpp
@@ -1739,17 +1739,17 @@ NPObjWrapper_ObjectMoved(JSObject *obj, 
   }
 
   // Calling PLDHashTable::Search() will not result in GC.
   JS::AutoSuppressGCAnalysis nogc;
 
   auto entry =
     static_cast<NPObjWrapperHashEntry*>(sNPObjWrappers->Search(npobj));
   MOZ_ASSERT(entry && entry->mJSObj);
-  MOZ_ASSERT(entry->mJSObj == old);
+  MOZ_ASSERT(entry->mJSObj.unbarrieredGetPtr() == old);
   entry->mJSObj = obj;
 }
 
 static bool
 NPObjWrapper_Call(JSContext *cx, unsigned argc, JS::Value *vp)
 {
   JS::CallArgs args = JS::CallArgsFromVp(argc, vp);
   JS::Rooted<JSObject*> obj(cx, &args.callee());
--- a/js/public/GCAPI.h
+++ b/js/public/GCAPI.h
@@ -627,41 +627,52 @@ UnmarkGrayGCThingRecursively(GCCellPtr t
 namespace js {
 namespace gc {
 
 static MOZ_ALWAYS_INLINE void
 ExposeGCThingToActiveJS(JS::GCCellPtr thing)
 {
     MOZ_ASSERT(thing.kind() != JS::TraceKind::Shape);
 
-    /*
-     * GC things residing in the nursery cannot be gray: they have no mark bits.
-     * All live objects in the nursery are moved to tenured at the beginning of
-     * each GC slice, so the gray marker never sees nursery things.
-     */
+    // GC things residing in the nursery cannot be gray: they have no mark bits.
+    // All live objects in the nursery are moved to tenured at the beginning of
+    // each GC slice, so the gray marker never sees nursery things.
     if (IsInsideNursery(thing.asCell()))
         return;
+
+    // There's nothing to do for permanent GC things that might be owned by
+    // another runtime.
+    if (thing.mayBeOwnedByOtherRuntime())
+        return;
+
     JS::shadow::Runtime* rt = detail::GetGCThingRuntime(thing.unsafeAsUIntPtr());
     MOZ_DIAGNOSTIC_ASSERT(rt->allowGCBarriers());
+
     if (IsIncrementalBarrierNeededOnTenuredGCThing(rt, thing))
         JS::IncrementalReferenceBarrier(thing);
     else if (JS::GCThingIsMarkedGray(thing))
         JS::UnmarkGrayGCThingRecursively(thing);
 }
 
 static MOZ_ALWAYS_INLINE void
 MarkGCThingAsLive(JSRuntime* aRt, JS::GCCellPtr thing)
 {
+    // Any object in the nursery will not be freed during any GC running at that
+    // time.
+    if (IsInsideNursery(thing.asCell()))
+        return;
+
+    // There's nothing to do for permanent GC things that might be owned by
+    // another runtime.
+    if (thing.mayBeOwnedByOtherRuntime())
+        return;
+
     JS::shadow::Runtime* rt = JS::shadow::Runtime::asShadowRuntime(aRt);
     MOZ_DIAGNOSTIC_ASSERT(rt->allowGCBarriers());
-    /*
-     * Any object in the nursery will not be freed during any GC running at that time.
-     */
-    if (IsInsideNursery(thing.asCell()))
-        return;
+
     if (IsIncrementalBarrierNeededOnTenuredGCThing(rt, thing))
         JS::IncrementalReferenceBarrier(thing);
 }
 
 } /* namespace gc */
 } /* namespace js */
 
 namespace JS {
--- a/js/public/HeapAPI.h
+++ b/js/public/HeapAPI.h
@@ -12,21 +12,16 @@
 #include "jspubtd.h"
 
 #include "js/TraceKind.h"
 #include "js/Utility.h"
 
 /* These values are private to the JS engine. */
 namespace js {
 
-// Whether the current thread is permitted access to any part of the specified
-// runtime or zone.
-JS_FRIEND_API(bool)
-CurrentThreadCanAccessRuntime(JSRuntime* rt);
-
 JS_FRIEND_API(bool)
 CurrentThreadCanAccessZone(JS::Zone* zone);
 
 namespace gc {
 
 struct Cell;
 
 const size_t ArenaShift = 12;
@@ -379,18 +374,21 @@ GCThingTraceKind(void* thing);
 namespace js {
 namespace gc {
 
 static MOZ_ALWAYS_INLINE bool
 IsIncrementalBarrierNeededOnTenuredGCThing(JS::shadow::Runtime* rt, const JS::GCCellPtr thing)
 {
     MOZ_ASSERT(thing);
     MOZ_ASSERT(!js::gc::IsInsideNursery(thing.asCell()));
-    if (rt->isHeapCollecting())
-        return false;
+
+    // TODO: I'd like to assert !isHeapBusy() here but this gets called while we
+    // are tracing the heap, e.g. during memory reporting (see bug 1313318).
+    MOZ_ASSERT(!rt->isHeapCollecting());
+
     JS::Zone* zone = JS::GetTenuredGCThingZone(thing);
     return JS::shadow::Zone::asShadowZone(zone)->needsIncrementalBarrier();
 }
 
 /**
  * Create an object providing access to the garbage collector's internal notion
  * of the current state of memory (both GC heap memory and GCthing-controlled
  * malloc memory.
--- a/js/src/gc/Allocator.cpp
+++ b/js/src/gc/Allocator.cpp
@@ -289,52 +289,55 @@ GCRuntime::refillFreeListFromAnyThread(E
 GCRuntime::refillFreeListFromMainThread(JSContext* cx, AllocKind thingKind, size_t thingSize)
 {
     // It should not be possible to allocate on the main thread while we are
     // inside a GC.
     Zone *zone = cx->zone();
     MOZ_ASSERT(!cx->runtime()->isHeapBusy(), "allocating while under GC");
 
     AutoMaybeStartBackgroundAllocation maybeStartBGAlloc;
-    return cx->arenas()->allocateFromArena(zone, thingKind, maybeStartBGAlloc);
+    return cx->arenas()->allocateFromArena(zone, thingKind, CheckThresholds, maybeStartBGAlloc);
 }
 
 /* static */ TenuredCell*
 GCRuntime::refillFreeListOffMainThread(ExclusiveContext* cx, AllocKind thingKind)
 {
     // A GC may be happening on the main thread, but zones used by exclusive
     // contexts are never collected.
     Zone* zone = cx->zone();
     MOZ_ASSERT(!zone->wasGCStarted());
 
     AutoMaybeStartBackgroundAllocation maybeStartBGAlloc;
-    return cx->arenas()->allocateFromArena(zone, thingKind, maybeStartBGAlloc);
+    return cx->arenas()->allocateFromArena(zone, thingKind, CheckThresholds, maybeStartBGAlloc);
 }
 
 /* static */ TenuredCell*
 GCRuntime::refillFreeListInGC(Zone* zone, AllocKind thingKind)
 {
     /*
      * Called by compacting GC to refill a free list while we are in a GC.
      */
 
     zone->arenas.checkEmptyFreeList(thingKind);
     mozilla::DebugOnly<JSRuntime*> rt = zone->runtimeFromMainThread();
     MOZ_ASSERT(rt->isHeapCollecting());
     MOZ_ASSERT_IF(!rt->isHeapMinorCollecting(), !rt->gc.isBackgroundSweeping());
 
     AutoMaybeStartBackgroundAllocation maybeStartBackgroundAllocation;
-    return zone->arenas.allocateFromArena(zone, thingKind, maybeStartBackgroundAllocation);
+    return zone->arenas.allocateFromArena(zone, thingKind, DontCheckThresholds,
+                                          maybeStartBackgroundAllocation);
 }
 
 TenuredCell*
 ArenaLists::allocateFromArena(JS::Zone* zone, AllocKind thingKind,
+                              ShouldCheckThresholds checkThresholds,
                               AutoMaybeStartBackgroundAllocation& maybeStartBGAlloc)
 {
     JSRuntime* rt = zone->runtimeFromAnyThread();
+
     mozilla::Maybe<AutoLockGC> maybeLock;
 
     // See if we can proceed without taking the GC lock.
     if (backgroundFinalizeState[thingKind] != BFS_DONE)
         maybeLock.emplace(rt);
 
     ArenaList& al = arenaLists[thingKind];
     Arena* arena = al.takeNextArena();
@@ -351,17 +354,17 @@ ArenaLists::allocateFromArena(JS::Zone* 
         maybeLock.emplace(rt);
 
     Chunk* chunk = rt->gc.pickChunk(maybeLock.ref(), maybeStartBGAlloc);
     if (!chunk)
         return nullptr;
 
     // Although our chunk should definitely have enough space for another arena,
     // there are other valid reasons why Chunk::allocateArena() may fail.
-    arena = rt->gc.allocateArena(chunk, zone, thingKind, maybeLock.ref());
+    arena = rt->gc.allocateArena(chunk, zone, thingKind, checkThresholds, maybeLock.ref());
     if (!arena)
         return nullptr;
 
     MOZ_ASSERT(al.isCursorAtEnd());
     al.insertBeforeCursor(arena);
 
     return allocateFromArenaInner(zone, arena, thingKind);
 }
@@ -402,33 +405,30 @@ GCRuntime::wantBackgroundAllocation(cons
     // allocation if we already have some empty chunks or when the runtime has
     // a small heap size (and therefore likely has a small growth rate).
     return allocTask.enabled() &&
            emptyChunks(lock).count() < tunables.minEmptyChunkCount(lock) &&
            (fullChunks(lock).count() + availableChunks(lock).count()) >= 4;
 }
 
 Arena*
-GCRuntime::allocateArena(Chunk* chunk, Zone* zone, AllocKind thingKind, const AutoLockGC& lock)
+GCRuntime::allocateArena(Chunk* chunk, Zone* zone, AllocKind thingKind,
+                         ShouldCheckThresholds checkThresholds, const AutoLockGC& lock)
 {
     MOZ_ASSERT(chunk->hasAvailableArenas());
 
     // Fail the allocation if we are over our heap size limits.
-    if (!rt->isHeapMinorCollecting() &&
-        !isHeapCompacting() &&
-        usage.gcBytes() >= tunables.gcMaxBytes())
-    {
+    if (checkThresholds && usage.gcBytes() >= tunables.gcMaxBytes())
         return nullptr;
-    }
 
     Arena* arena = chunk->allocateArena(rt, zone, thingKind, lock);
     zone->usage.addGCArena();
 
     // Trigger an incremental slice if needed.
-    if (!rt->isHeapMinorCollecting() && !isHeapCompacting())
+    if (checkThresholds)
         maybeAllocTriggerZoneGC(zone, lock);
 
     return arena;
 }
 
 Arena*
 Chunk::allocateArena(JSRuntime* rt, Zone* zone, AllocKind thingKind, const AutoLockGC& lock)
 {
--- a/js/src/gc/GCRuntime.h
+++ b/js/src/gc/GCRuntime.h
@@ -912,17 +912,18 @@ class GCRuntime
         NotFinished = 0,
         Finished
     };
 
     // For ArenaLists::allocateFromArena()
     friend class ArenaLists;
     Chunk* pickChunk(const AutoLockGC& lock,
                      AutoMaybeStartBackgroundAllocation& maybeStartBGAlloc);
-    Arena* allocateArena(Chunk* chunk, Zone* zone, AllocKind kind, const AutoLockGC& lock);
+    Arena* allocateArena(Chunk* chunk, Zone* zone, AllocKind kind,
+                         ShouldCheckThresholds checkThresholds, const AutoLockGC& lock);
     void arenaAllocatedDuringGC(JS::Zone* zone, Arena* arena);
 
     // Allocator internals
     MOZ_MUST_USE bool gcIfNeededPerAllocation(JSContext* cx);
     template <typename T>
     static void checkIncrementalZoneState(ExclusiveContext* cx, T* t);
     static TenuredCell* refillFreeListFromAnyThread(ExclusiveContext* cx, AllocKind thingKind,
                                                     size_t thingSize);
--- a/js/src/gc/Heap.h
+++ b/js/src/gc/Heap.h
@@ -1310,17 +1310,17 @@ TenuredCell::readBarrier(TenuredCell* th
 void
 AssertSafeToSkipBarrier(TenuredCell* thing);
 
 /* static */ MOZ_ALWAYS_INLINE void
 TenuredCell::writeBarrierPre(TenuredCell* thing)
 {
     MOZ_ASSERT(!CurrentThreadIsIonCompiling());
     MOZ_ASSERT_IF(thing, !isNullLike(thing));
-    if (!thing || thing->shadowRuntimeFromAnyThread()->isHeapCollecting())
+    if (!thing)
         return;
 
 #ifdef JS_GC_ZEAL
     // When verifying pre barriers we need to switch on all barriers, even
     // those on the Atoms Zone. Normally, we never enter a parse task when
     // collecting in the atoms zone, so will filter out atoms below.
     // Unfortuantely, If we try that when verifying pre-barriers, we'd never be
     // able to handle OMT parse tasks at all as we switch on the verifier any
--- a/js/src/gc/Iteration.cpp
+++ b/js/src/gc/Iteration.cpp
@@ -30,17 +30,17 @@ IterateCompartmentsArenasCells(JSContext
 
     for (auto thingKind : AllAllocKinds()) {
         JS::TraceKind traceKind = MapAllocToTraceKind(thingKind);
         size_t thingSize = Arena::thingSize(thingKind);
 
         for (ArenaIter aiter(zone, thingKind); !aiter.done(); aiter.next()) {
             Arena* arena = aiter.get();
             (*arenaCallback)(cx, data, arena, traceKind, thingSize);
-            for (ArenaCellIterUnderGC iter(arena); !iter.done(); iter.next())
+            for (ArenaCellIter iter(arena); !iter.done(); iter.next())
                 (*cellCallback)(cx, data, iter.getCell(), traceKind, thingSize);
         }
     }
 }
 
 void
 js::IterateZonesCompartmentsArenasCells(JSContext* cx, void* data,
                                         IterateZoneCallback zoneCallback,
--- a/js/src/gc/Nursery.cpp
+++ b/js/src/gc/Nursery.cpp
@@ -653,16 +653,17 @@ js::Nursery::collect(JSRuntime* rt, JS::
     }
 }
 
 double
 js::Nursery::doCollection(JSRuntime* rt, JS::gcreason::Reason reason,
                           TenureCountCache& tenureCounts)
 {
     AutoTraceSession session(rt, JS::HeapState::MinorCollecting);
+    AutoSetThreadIsPerformingGC performingGC;
     AutoStopVerifyingBarriers av(rt, false);
     AutoDisableProxyCheck disableStrictProxyChecking(rt);
     mozilla::DebugOnly<AutoEnterOOMUnsafeRegion> oomUnsafeRegion;
 
     size_t initialNurserySize = spaceToEnd();
 
     // Move objects pointed to by roots from the nursery to the major heap.
     TenuringTracer mover(rt, this);
--- a/js/src/jit/JitcodeMap.cpp
+++ b/js/src/jit/JitcodeMap.cpp
@@ -447,19 +447,19 @@ JitcodeGlobalTable::lookupForSamplerInfa
     if (entry->isIonCache()) {
         JitcodeGlobalEntry& rejoinEntry = RejoinEntry(rt, entry->ionCacheEntry(), ptr);
         rejoinEntry.setGeneration(sampleBufferGen);
     }
 
     // JitcodeGlobalEntries are marked at the end of the mark phase. A read
     // barrier is not needed. Any JS frames sampled during the sweep phase of
     // the GC must be on stack, and on-stack frames must already be marked at
-    // the beginning of the sweep phase. This assumption is verified below.
-    MOZ_ASSERT_IF(rt->isHeapBusy() && entry->jitcode()->zoneFromAnyThread()->isGCSweeping(),
-                  entry->isMarkedFromAnyThread(rt));
+    // the beginning of the sweep phase. It's not possible to assert this here
+    // as we may not be running on the main thread when called from the gecko
+    // profiler.
 
     return *entry;
 }
 
 JitcodeGlobalEntry*
 JitcodeGlobalTable::lookupInternal(void* ptr)
 {
     JitcodeGlobalEntry query = JitcodeGlobalEntry::MakeQuery(ptr);
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -1956,23 +1956,23 @@ RelocateArena(Arena* arena, SliceBudget&
     MOZ_ASSERT(!arena->allocatedDuringIncremental);
     MOZ_ASSERT(arena->bufferedCells->isEmpty());
 
     Zone* zone = arena->zone;
 
     AllocKind thingKind = arena->getAllocKind();
     size_t thingSize = arena->getThingSize();
 
-    for (ArenaCellIterUnderFinalize i(arena); !i.done(); i.next()) {
+    for (ArenaCellIterUnderGC i(arena); !i.done(); i.next()) {
         RelocateCell(zone, i.getCell(), thingKind, thingSize);
         sliceBudget.step();
     }
 
 #ifdef DEBUG
-    for (ArenaCellIterUnderFinalize i(arena); !i.done(); i.next()) {
+    for (ArenaCellIterUnderGC i(arena); !i.done(); i.next()) {
         TenuredCell* src = i.getCell();
         MOZ_ASSERT(RelocationOverlay::isCellForwarded(src));
         TenuredCell* dest = Forwarded(src);
         MOZ_ASSERT(src->isMarked(BLACK) == dest->isMarked(BLACK));
         MOZ_ASSERT(src->isMarked(GRAY) == dest->isMarked(GRAY));
     }
 #endif
 }
@@ -3579,17 +3579,17 @@ ArenaLists::checkEmptyArenaList(AllocKin
     size_t num_live = 0;
 #ifdef DEBUG
     if (!arenaLists[kind].isEmpty()) {
         size_t max_cells = 20;
         char *env = getenv("JS_GC_MAX_LIVE_CELLS");
         if (env && *env)
             max_cells = atol(env);
         for (Arena* current = arenaLists[kind].head(); current; current = current->next) {
-            for (ArenaCellIterUnderFinalize i(current); !i.done(); i.next()) {
+            for (ArenaCellIterUnderGC i(current); !i.done(); i.next()) {
                 Cell* t = i.get<Cell>();
                 MOZ_ASSERT(t->asTenured().isMarked(), "unmarked cells should have been finalized");
                 if (++num_live <= max_cells) {
                     fprintf(stderr, "ERROR: GC found live Cell %p of kind %s at shutdown\n",
                             t, AllocKindToAscii(kind));
                 }
             }
         }
@@ -4805,20 +4805,16 @@ GCRuntime::endMarkingZoneGroup()
         zone->setGCState(Zone::Mark);
     }
     MOZ_ASSERT(marker.isDrained());
     marker.setMarkColorBlack();
 }
 
 class GCSweepTask : public GCParallelTask
 {
-    virtual void runFromHelperThread(AutoLockHelperThreadState& locked) override {
-        AutoSetThreadIsSweeping threadIsSweeping;
-        GCParallelTask::runFromHelperThread(locked);
-    }
     GCSweepTask(const GCSweepTask&) = delete;
 
   protected:
     JSRuntime* runtime;
 
   public:
     explicit GCSweepTask(JSRuntime* rt) : runtime(rt) {}
     GCSweepTask(GCSweepTask&& other)
@@ -5575,29 +5571,29 @@ HeapStateToLabel(JS::HeapState heapState
     MOZ_ASSERT_UNREACHABLE("Should have exhausted every JS::HeapState variant!");
     return nullptr;
 }
 
 /* Start a new heap session. */
 AutoTraceSession::AutoTraceSession(JSRuntime* rt, JS::HeapState heapState)
   : lock(rt),
     runtime(rt),
-    prevState(rt->heapState_),
+    prevState(rt->heapState()),
     pseudoFrame(rt, HeapStateToLabel(heapState), ProfileEntry::Category::GC)
 {
-    MOZ_ASSERT(rt->heapState_ == JS::HeapState::Idle);
+    MOZ_ASSERT(prevState == JS::HeapState::Idle);
     MOZ_ASSERT(heapState != JS::HeapState::Idle);
     MOZ_ASSERT_IF(heapState == JS::HeapState::MajorCollecting, rt->gc.nursery.isEmpty());
-    rt->heapState_ = heapState;
+    rt->setHeapState(heapState);
 }
 
 AutoTraceSession::~AutoTraceSession()
 {
     MOZ_ASSERT(runtime->isHeapBusy());
-    runtime->heapState_ = prevState;
+    runtime->setHeapState(prevState);
 }
 
 void
 GCRuntime::resetIncrementalGC(gc::AbortReason reason, AutoLockForExclusiveAccess& lock)
 {
     MOZ_ASSERT(reason != gc::AbortReason::None);
 
     switch (incrementalState) {
@@ -5713,16 +5709,17 @@ namespace {
 
 class AutoGCSlice {
   public:
     explicit AutoGCSlice(JSRuntime* rt);
     ~AutoGCSlice();
 
   private:
     JSRuntime* runtime;
+    AutoSetThreadIsPerformingGC performingGC;
 };
 
 } /* anonymous namespace */
 
 AutoGCSlice::AutoGCSlice(JSRuntime* rt)
   : runtime(rt)
 {
     /*
@@ -7074,26 +7071,26 @@ AutoAssertNoNurseryAlloc::AutoAssertNoNu
 }
 
 AutoAssertNoNurseryAlloc::~AutoAssertNoNurseryAlloc()
 {
     gc.allowNurseryAlloc();
 }
 
 JS::AutoEnterCycleCollection::AutoEnterCycleCollection(JSContext* cx)
-  : runtime(shadow::Runtime::asShadowRuntime(cx->runtime()))
-{
-    MOZ_ASSERT(runtime->heapState_ == HeapState::Idle);
-    runtime->heapState_ = HeapState::CycleCollecting;
+  : runtime(cx->runtime())
+{
+    MOZ_ASSERT(!runtime->isHeapBusy());
+    runtime->setHeapState(HeapState::CycleCollecting);
 }
 
 JS::AutoEnterCycleCollection::~AutoEnterCycleCollection()
 {
-    MOZ_ASSERT(runtime->heapState_ == HeapState::CycleCollecting);
-    runtime->heapState_ = HeapState::Idle;
+    MOZ_ASSERT(runtime->isCycleCollecting());
+    runtime->setHeapState(HeapState::Idle);
 }
 #endif
 
 JS::AutoAssertGCCallback::AutoAssertGCCallback(JSObject* obj)
   : AutoSuppressGCAnalysis()
 {
     MOZ_ASSERT(obj->runtimeFromMainThread()->isHeapCollecting());
 }
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -593,16 +593,22 @@ class SortedArenaList
         // the list is empty, this will just set segments[0].head to null.
         segments[tailIndex].linkTo(nullptr);
         // Create an ArenaList with head and cursor set to the head and tail of
         // the first segment (if that segment is empty, only the head is used).
         return ArenaList(segments[0]);
     }
 };
 
+enum ShouldCheckThresholds
+{
+    DontCheckThresholds = 0,
+    CheckThresholds = 1
+};
+
 class ArenaLists
 {
     JSRuntime* runtime_;
 
     /*
      * For each arena kind its free list is represented as the first span with
      * free things. Initially all the spans are initialized as empty. After we
      * find a new arena with available things we move its first free span into
@@ -800,16 +806,17 @@ class ArenaLists
                             KeepArenasEnum keepArenas, Arena** empty = nullptr);
     inline void forceFinalizeNow(FreeOp* fop, AllocKind thingKind,
                                  KeepArenasEnum keepArenas, Arena** empty = nullptr);
     inline void queueForForegroundSweep(FreeOp* fop, AllocKind thingKind);
     inline void queueForBackgroundSweep(FreeOp* fop, AllocKind thingKind);
     inline void mergeSweptArenas(AllocKind thingKind);
 
     TenuredCell* allocateFromArena(JS::Zone* zone, AllocKind thingKind,
+                                   ShouldCheckThresholds checkThresholds,
                                    AutoMaybeStartBackgroundAllocation& maybeStartBGAlloc);
     inline TenuredCell* allocateFromArenaInner(JS::Zone* zone, Arena* arena, AllocKind kind);
 
     inline void normalizeBackgroundFinalizeState(AllocKind thingKind);
 
     friend class GCRuntime;
     friend class js::Nursery;
     friend class js::TenuringTracer;
@@ -984,17 +991,17 @@ class GCParallelTask
 
     // Check if a task is actively running.
     bool isRunningWithLockHeld(const AutoLockHelperThreadState& locked) const;
     bool isRunning() const;
 
     // This should be friended to HelperThread, but cannot be because it
     // would introduce several circular dependencies.
   public:
-    virtual void runFromHelperThread(AutoLockHelperThreadState& locked);
+    void runFromHelperThread(AutoLockHelperThreadState& locked);
 };
 
 typedef void (*IterateChunkCallback)(JSRuntime* rt, void* data, gc::Chunk* chunk);
 typedef void (*IterateZoneCallback)(JSRuntime* rt, void* data, JS::Zone* zone);
 typedef void (*IterateArenaCallback)(JSRuntime* rt, void* data, gc::Arena* arena,
                                      JS::TraceKind traceKind, size_t thingSize);
 typedef void (*IterateCellCallback)(JSRuntime* rt, void* data, void* thing,
                                     JS::TraceKind traceKind, size_t thingSize);
--- a/js/src/jsgcinlines.h
+++ b/js/src/jsgcinlines.h
@@ -1,10 +1,10 @@
 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
- * vim: set ts=8 sts=4 et sw=4 tw=99:
+* vim: set ts=8 sts=4 et sw=4 tw=99:
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef jsgcinlines_h
 #define jsgcinlines_h
 
 #include "jsgc.h"
@@ -176,28 +176,44 @@ class ArenaCellIterImpl
             moveForwardIfFree();
     }
 };
 
 template<>
 JSObject*
 ArenaCellIterImpl::get<JSObject>() const;
 
+class ArenaCellIter : public ArenaCellIterImpl
+{
+  public:
+    explicit ArenaCellIter(Arena* arena)
+      : ArenaCellIterImpl(arena)
+    {
+        MOZ_ASSERT(arena->zone->runtimeFromMainThread()->isHeapTracing());
+    }
+};
+
 class ArenaCellIterUnderGC : public ArenaCellIterImpl
 {
   public:
-    explicit ArenaCellIterUnderGC(Arena* arena) : ArenaCellIterImpl(arena) {
-        MOZ_ASSERT(arena->zone->runtimeFromAnyThread()->isHeapBusy());
+    explicit ArenaCellIterUnderGC(Arena* arena)
+      : ArenaCellIterImpl(arena)
+    {
+        MOZ_ASSERT(CurrentThreadIsPerformingGC());
     }
 };
 
 class ArenaCellIterUnderFinalize : public ArenaCellIterImpl
 {
   public:
-    explicit ArenaCellIterUnderFinalize(Arena* arena) : ArenaCellIterImpl(arena) {}
+    explicit ArenaCellIterUnderFinalize(Arena* arena)
+      : ArenaCellIterImpl(arena)
+    {
+        MOZ_ASSERT(CurrentThreadIsGCSweeping());
+    }
 };
 
 template <typename T>
 class ZoneCellIter;
 
 template <>
 class ZoneCellIter<TenuredCell> {
     ArenaIter arenaIter;
@@ -363,16 +379,17 @@ class GrayObjectIter : public ZoneCellIt
 
 class GCZonesIter
 {
   private:
     ZonesIter zone;
 
   public:
     explicit GCZonesIter(JSRuntime* rt, ZoneSelector selector = WithAtoms) : zone(rt, selector) {
+        MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt) && rt->isHeapBusy());
         if (!zone->isCollecting())
             next();
     }
 
     bool done() const { return zone.done(); }
 
     void next() {
         MOZ_ASSERT(!done());
@@ -394,17 +411,17 @@ typedef CompartmentsIterT<GCZonesIter> G
 
 /* Iterates over all zones in the current zone group. */
 class GCZoneGroupIter {
   private:
     JS::Zone* current;
 
   public:
     explicit GCZoneGroupIter(JSRuntime* rt) {
-        MOZ_ASSERT(rt->isHeapBusy());
+        MOZ_ASSERT(CurrentThreadIsPerformingGC());
         current = rt->gc.getCurrentZoneGroup();
     }
 
     bool done() const { return !current; }
 
     void next() {
         MOZ_ASSERT(!done());
         current = current->nextNodeInGroup();
--- a/js/src/jspubtd.h
+++ b/js/src/jspubtd.h
@@ -39,17 +39,21 @@ class JS_FRIEND_API(ReadOnlyCompileOptio
 class JS_FRIEND_API(OwningCompileOptions);
 class JS_FRIEND_API(TransitiveCompileOptions);
 class JS_PUBLIC_API(CompartmentOptions);
 
 struct RootingContext;
 class Value;
 struct Zone;
 
-} /* namespace JS */
+namespace shadow {
+struct Runtime;
+} // namespace shadow
+
+} // namespace JS
 
 namespace js {
 class RootLists;
 } // namespace js
 
 /*
  * Run-time version enumeration.  For compile-time version checking, please use
  * the JS_HAS_* macros in jsversion.h, or use MOZJS_MAJOR_VERSION,
@@ -121,16 +125,27 @@ typedef JSConstScalarSpec<int32_t> JSCon
 typedef void
 (* JSTraceDataOp)(JSTracer* trc, void* data);
 
 namespace js {
 namespace gc {
 class AutoTraceSession;
 class StoreBuffer;
 } // namespace gc
+
+// Whether the current thread is permitted access to any part of the specified
+// runtime or zone.
+JS_FRIEND_API(bool)
+CurrentThreadCanAccessRuntime(const JSRuntime* rt);
+
+#ifdef DEBUG
+JS_FRIEND_API(bool)
+CurrentThreadIsPerformingGC();
+#endif
+
 } // namespace js
 
 namespace JS {
 
 class JS_PUBLIC_API(AutoEnterCycleCollection);
 class JS_PUBLIC_API(AutoAssertOnBarrier);
 struct PropertyDescriptor;
 
@@ -143,48 +158,63 @@ enum class HeapState {
     MinorCollecting,  // doing a GC of the minor heap (nursery)
     CycleCollecting   // in the "Unlink" phase of cycle collection
 };
 
 namespace shadow {
 
 struct Runtime
 {
+  private:
+    JS::HeapState heapState_;
+
   protected:
-    // Allow inlining of heapState checks.
-    friend class js::gc::AutoTraceSession;
-    friend class JS::AutoEnterCycleCollection;
-    JS::HeapState heapState_;
+    void setHeapState(JS::HeapState newState) {
+        MOZ_ASSERT(js::CurrentThreadCanAccessRuntime(asRuntime()));
+        MOZ_ASSERT(heapState_ != newState);
+        heapState_ = newState;
+    }
+
+    JS::HeapState heapState() const {
+        MOZ_ASSERT(js::CurrentThreadCanAccessRuntime(asRuntime()) ||
+                   js::CurrentThreadIsPerformingGC());
+        return heapState_;
+    }
 
     // In some cases, invoking GC barriers (incremental or otherwise) will break
     // things. These barriers assert if this flag is set.
     bool allowGCBarriers_;
     friend class JS::AutoAssertOnBarrier;
 
     js::gc::StoreBuffer* gcStoreBufferPtr_;
 
   public:
     Runtime()
       : heapState_(JS::HeapState::Idle)
       , allowGCBarriers_(true)
       , gcStoreBufferPtr_(nullptr)
     {}
 
-    bool isHeapBusy() const { return heapState_ != JS::HeapState::Idle; }
-    bool isHeapMajorCollecting() const { return heapState_ == JS::HeapState::MajorCollecting; }
-    bool isHeapMinorCollecting() const { return heapState_ == JS::HeapState::MinorCollecting; }
+    bool isHeapBusy() const { return heapState() != JS::HeapState::Idle; }
+    bool isHeapTracing() const { return heapState() == JS::HeapState::Tracing; }
+    bool isHeapMajorCollecting() const { return heapState() == JS::HeapState::MajorCollecting; }
+    bool isHeapMinorCollecting() const { return heapState() == JS::HeapState::MinorCollecting; }
     bool isHeapCollecting() const { return isHeapMinorCollecting() || isHeapMajorCollecting(); }
     bool isCycleCollecting() const {
-        return heapState_ == JS::HeapState::CycleCollecting;
+        return heapState() == JS::HeapState::CycleCollecting;
     }
 
     bool allowGCBarriers() const { return allowGCBarriers_; }
 
     js::gc::StoreBuffer* gcStoreBufferPtr() { return gcStoreBufferPtr_; }
 
+    const JSRuntime* asRuntime() const {
+        return reinterpret_cast<const JSRuntime*>(this);
+    }
+
     static JS::shadow::Runtime* asShadowRuntime(JSRuntime* rt) {
         return reinterpret_cast<JS::shadow::Runtime*>(rt);
     }
 
   protected:
     void setGCStoreBufferPtr(js::gc::StoreBuffer* storeBuffer) {
         gcStoreBufferPtr_ = storeBuffer;
     }
@@ -192,17 +222,17 @@ struct Runtime
 
 } /* namespace shadow */
 
 // Decorates the Unlinking phase of CycleCollection so that accidental use
 // of barriered accessors results in assertions instead of leaks.
 class MOZ_STACK_CLASS JS_PUBLIC_API(AutoEnterCycleCollection)
 {
 #ifdef DEBUG
-    shadow::Runtime* runtime;
+    JSRuntime* runtime;
 
   public:
     explicit AutoEnterCycleCollection(JSContext* cx);
     ~AutoEnterCycleCollection();
 #else
   public:
     explicit AutoEnterCycleCollection(JSContext* cx) {}
     ~AutoEnterCycleCollection() {}
--- a/js/src/vm/HelperThreads.cpp
+++ b/js/src/vm/HelperThreads.cpp
@@ -1148,16 +1148,17 @@ js::GCParallelTask::runFromMainThread(JS
     duration_ = PRMJ_Now() - timeStart;
 }
 
 void
 js::GCParallelTask::runFromHelperThread(AutoLockHelperThreadState& locked)
 {
     {
         AutoUnlockHelperThreadState parallelSection(locked);
+        gc::AutoSetThreadIsPerformingGC performingGC;
         uint64_t timeStart = PRMJ_Now();
         run();
         duration_ = PRMJ_Now() - timeStart;
     }
 
     state = Finished;
     HelperThreadState().notifyAll(GlobalHelperThreadState::CONSUMER, locked);
 }
--- a/js/src/vm/Runtime.cpp
+++ b/js/src/vm/Runtime.cpp
@@ -89,16 +89,17 @@ PerThreadData::PerThreadData(JSRuntime* 
   , traceLogger(nullptr)
 #endif
   , autoFlushICache_(nullptr)
   , dtoaState(nullptr)
   , suppressGC(0)
 #ifdef DEBUG
   , ionCompiling(false)
   , ionCompilingSafeForMinorGC(false)
+  , performingGC(false)
   , gcSweeping(false)
 #endif
 {}
 
 PerThreadData::~PerThreadData()
 {
     if (dtoaState)
         DestroyDtoaState(dtoaState);
@@ -844,33 +845,41 @@ JSRuntime::clearUsedByExclusiveThread(Zo
     MOZ_ASSERT(zone->usedByExclusiveThread);
     zone->usedByExclusiveThread = false;
     numExclusiveThreads--;
     if (gc.fullGCForAtomsRequested() && !keepAtoms())
         gc.triggerFullGCForAtoms();
 }
 
 bool
-js::CurrentThreadCanAccessRuntime(JSRuntime* rt)
+js::CurrentThreadCanAccessRuntime(const JSRuntime* rt)
 {
     return rt->ownerThread_ == js::ThisThread::GetId();
 }
 
 bool
 js::CurrentThreadCanAccessZone(Zone* zone)
 {
     if (CurrentThreadCanAccessRuntime(zone->runtime_))
         return true;
 
     // Only zones in use by an exclusive thread can be used off the main thread.
     // We don't keep track of which thread owns such zones though, so this check
     // is imperfect.
     return zone->usedByExclusiveThread;
 }
 
+#ifdef DEBUG
+bool
+js::CurrentThreadIsPerformingGC()
+{
+    return TlsPerThreadData.get()->performingGC;
+}
+#endif
+
 JS_FRIEND_API(void)
 JS::UpdateJSContextProfilerSampleBufferGen(JSContext* cx, uint32_t generation,
                                            uint32_t lapCount)
 {
     cx->setProfilerSampleBufferGen(generation);
     cx->updateProfilerSampleBufferLapCount(lapCount);
 }
 
--- a/js/src/vm/Runtime.h
+++ b/js/src/vm/Runtime.h
@@ -293,17 +293,25 @@ class PerThreadData
     // Whether this thread is actively Ion compiling.
     bool ionCompiling;
 
     // Whether this thread is actively Ion compiling in a context where a minor
     // GC could happen simultaneously. If this is true, this thread cannot use
     // any pointers into the nursery.
     bool ionCompilingSafeForMinorGC;
 
-    // Whether this thread is currently sweeping GC things.
+    // Whether this thread is currently performing GC.  This thread could be the
+    // main thread or a helper thread while the main thread is running the
+    // collector.
+    bool performingGC;
+
+    // Whether this thread is currently sweeping GC things.  This thread could
+    // be the main thread or a helper thread while the main thread is running
+    // the mutator.  This is used to assert that destruction of GCPtr only
+    // happens when we are sweeping.
     bool gcSweeping;
 #endif
 
     // Pools used for recycling name maps and vectors when parsing and
     // emitting bytecode. Purged on GC when there are no active script
     // compilations.
     frontend::NameCollectionPool frontendCollectionPool;
 
@@ -696,17 +704,17 @@ struct JSRuntime : public JS::shadow::Ru
 
     /* Futex state, used by Atomics.wait() and Atomics.wake() on the Atomics object */
     js::FutexRuntime fx;
 
   private:
     /* See comment for JS_AbortIfWrongThread in jsapi.h. */
     js::Thread::Id ownerThread_;
     size_t ownerThreadNative_;
-    friend bool js::CurrentThreadCanAccessRuntime(JSRuntime* rt);
+    friend bool js::CurrentThreadCanAccessRuntime(const JSRuntime* rt);
   public:
 
     size_t ownerThreadNative() const {
         return ownerThreadNative_;
     }
 
     /* Temporary arena pool used while compiling and decompiling. */
     static const size_t TEMP_LIFO_ALLOC_PRIMARY_CHUNK_SIZE = 4 * 1024;
@@ -1281,16 +1289,20 @@ struct JSRuntime : public JS::shadow::Ru
         MOZ_ASSERT(rt->stackFormat_ != js::StackFormat::Default);
         return rt->stackFormat_;
     }
     void setStackFormat(js::StackFormat format) {
         MOZ_ASSERT(!parentRuntime);
         MOZ_ASSERT(format != js::StackFormat::Default);
         stackFormat_ = format;
     }
+
+    // For inherited heap state accessors.
+    friend class js::gc::AutoTraceSession;
+    friend class JS::AutoEnterCycleCollection;
 };
 
 namespace js {
 
 static inline JSContext*
 GetJSContextFromMainThread()
 {
     return js::TlsPerThreadData.get()->contextFromMainThread();
@@ -1631,16 +1643,39 @@ class MOZ_RAII AutoEnterIonCompilation
 #endif
     }
 
     MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER
 };
 
 namespace gc {
 
+// In debug builds, set/unset the performing GC flag for the current thread.
+struct MOZ_RAII AutoSetThreadIsPerformingGC
+{
+#ifdef DEBUG
+    AutoSetThreadIsPerformingGC()
+      : threadData_(js::TlsPerThreadData.get())
+    {
+        MOZ_ASSERT(!threadData_->performingGC);
+        threadData_->performingGC = true;
+    }
+
+    ~AutoSetThreadIsPerformingGC() {
+        MOZ_ASSERT(threadData_->performingGC);
+        threadData_->performingGC = false;
+    }
+
+  private:
+    PerThreadData* threadData_;
+#else
+    AutoSetThreadIsPerformingGC() {}
+#endif
+};
+
 // In debug builds, set/unset the GC sweeping flag for the current thread.
 struct MOZ_RAII AutoSetThreadIsSweeping
 {
 #ifdef DEBUG
     AutoSetThreadIsSweeping()
       : threadData_(js::TlsPerThreadData.get())
     {
         MOZ_ASSERT(!threadData_->gcSweeping);
--- a/js/src/vm/TypeInference.cpp
+++ b/js/src/vm/TypeInference.cpp
@@ -3500,17 +3500,17 @@ PreliminaryObjectArrayWithTemplate::trac
     TraceNullableEdge(trc, &shape_, "PreliminaryObjectArrayWithTemplate_shape");
 }
 
 /* static */ void
 PreliminaryObjectArrayWithTemplate::writeBarrierPre(PreliminaryObjectArrayWithTemplate* objects)
 {
     Shape* shape = objects->shape();
 
-    if (!shape || shape->runtimeFromAnyThread()->isHeapCollecting())
+    if (!shape)
         return;
 
     JS::Zone* zone = shape->zoneFromAnyThread();
     if (zone->needsIncrementalBarrier())
         objects->trace(zone->barrierTracer());
 }
 
 // Return whether shape consists entirely of plain data properties.
--- a/js/xpconnect/src/XPCWrappedNativeProto.cpp
+++ b/js/xpconnect/src/XPCWrappedNativeProto.cpp
@@ -110,17 +110,17 @@ XPCWrappedNativeProto::CallPostCreatePro
     }
 
     return true;
 }
 
 void
 XPCWrappedNativeProto::JSProtoObjectFinalized(js::FreeOp* fop, JSObject* obj)
 {
-    MOZ_ASSERT(obj == mJSProtoObject, "huh?");
+    MOZ_ASSERT(obj == mJSProtoObject.unbarrieredGet(), "huh?");
 
     // Only remove this proto from the map if it is the one in the map.
     ClassInfo2WrappedNativeProtoMap* map = GetScope()->GetWrappedNativeProtoMap();
     if (map->Find(mClassInfo) == this)
         map->Remove(mClassInfo);
 
     GetContext()->GetDyingWrappedNativeProtoMap()->Add(this);