Bug 1045940 - Rename needsBarrier to needsIncrementalBarrier; r=jonco
authorTerrence Cole <terrence@mozilla.com>
Mon, 28 Jul 2014 10:16:56 -0700
changeset 197206 b58d0320743263c18cd242c8b59c4994ac71eb61
parent 197205 8da59dd9fc7fe1422d1bf26fa169f8089034a57e
child 197207 81ce8a74aba12f11286db003ba17575d80675ca5
push id27236
push useremorley@mozilla.com
push dateFri, 01 Aug 2014 15:52:48 +0000
treeherdermozilla-central@44e5072476b7 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjonco
bugs1045940
milestone34.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1045940 - Rename needsBarrier to needsIncrementalBarrier; r=jonco
js/public/HeapAPI.h
js/src/gc/Barrier.h
js/src/gc/ForkJoinNursery.cpp
js/src/gc/Nursery.cpp
js/src/gc/Verifier.cpp
js/src/gc/Zone.cpp
js/src/gc/Zone.h
js/src/jit/BaselineCompiler.cpp
js/src/jit/BaselineIC.cpp
js/src/jit/BaselineJIT.cpp
js/src/jit/CodeGenerator.cpp
js/src/jit/CompileWrappers.cpp
js/src/jit/CompileWrappers.h
js/src/jit/Ion.cpp
js/src/jit/IonCaches.cpp
js/src/jit/IonMacroAssembler.h
js/src/jsfun.cpp
js/src/jsfun.h
js/src/jsgc.cpp
js/src/jsgcinlines.h
js/src/jsinferinlines.h
js/src/jsiter.cpp
js/src/jsobj.cpp
js/src/jsobj.h
js/src/jspropertytree.cpp
js/src/jspubtd.h
js/src/vm/ForkJoin.cpp
js/src/vm/ObjectImpl.h
js/src/vm/RegExpObject.cpp
js/src/vm/Runtime.cpp
js/src/vm/Runtime.h
js/src/vm/String.cpp
--- a/js/public/HeapAPI.h
+++ b/js/public/HeapAPI.h
@@ -164,30 +164,30 @@ struct ArenaHeader
 
 struct Zone
 {
   protected:
     JSRuntime *const runtime_;
     JSTracer *const barrierTracer_;     // A pointer to the JSRuntime's |gcMarker|.
 
   public:
-    bool needsBarrier_;
+    bool needsIncrementalBarrier_;
 
     Zone(JSRuntime *runtime, JSTracer *barrierTracerArg)
       : runtime_(runtime),
         barrierTracer_(barrierTracerArg),
-        needsBarrier_(false)
+        needsIncrementalBarrier_(false)
     {}
 
-    bool needsBarrier() const {
-        return needsBarrier_;
+    bool needsIncrementalBarrier() const {
+        return needsIncrementalBarrier_;
     }
 
     JSTracer *barrierTracer() {
-        MOZ_ASSERT(needsBarrier_);
+        MOZ_ASSERT(needsIncrementalBarrier_);
         MOZ_ASSERT(js::CurrentThreadCanAccessRuntime(runtime_));
         return barrierTracer_;
     }
 
     JSRuntime *runtimeFromMainThread() const {
         MOZ_ASSERT(js::CurrentThreadCanAccessRuntime(runtime_));
         return runtime_;
     }
@@ -306,17 +306,17 @@ GCThingIsMarkedGray(void *thing)
 
 static MOZ_ALWAYS_INLINE bool
 IsIncrementalBarrierNeededOnTenuredGCThing(shadow::Runtime *rt, void *thing, JSGCTraceKind kind)
 {
     MOZ_ASSERT(thing);
 #ifdef JSGC_GENERATIONAL
     MOZ_ASSERT(!js::gc::IsInsideNursery((js::gc::Cell *)thing));
 #endif
-    if (!rt->needsBarrier_)
+    if (!rt->needsIncrementalBarrier())
         return false;
     JS::Zone *zone = GetTenuredGCThingZone(thing);
-    return reinterpret_cast<shadow::Zone *>(zone)->needsBarrier_;
+    return reinterpret_cast<shadow::Zone *>(zone)->needsIncrementalBarrier();
 }
 
 } /* namespace JS */
 
 #endif /* js_HeapAPI_h */
--- a/js/src/gc/Barrier.h
+++ b/js/src/gc/Barrier.h
@@ -202,43 +202,43 @@ class BarrieredCell : public gc::Cell
     MOZ_ALWAYS_INLINE JS::shadow::Zone *shadowZoneFromAnyThread() const {
         return JS::shadow::Zone::asShadowZone(zoneFromAnyThread());
     }
 
     static MOZ_ALWAYS_INLINE void readBarrier(T *thing) {
 #ifdef JSGC_INCREMENTAL
         JS_ASSERT(!CurrentThreadIsIonCompiling());
         JS::shadow::Zone *shadowZone = thing->shadowZoneFromAnyThread();
-        if (shadowZone->needsBarrier()) {
+        if (shadowZone->needsIncrementalBarrier()) {
             MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(shadowZone));
             T *tmp = thing;
             js::gc::MarkUnbarriered<T>(shadowZone->barrierTracer(), &tmp, "read barrier");
             JS_ASSERT(tmp == thing);
         }
 #endif
     }
 
     static MOZ_ALWAYS_INLINE bool needWriteBarrierPre(JS::Zone *zone) {
 #ifdef JSGC_INCREMENTAL
-        return JS::shadow::Zone::asShadowZone(zone)->needsBarrier();
+        return JS::shadow::Zone::asShadowZone(zone)->needsIncrementalBarrier();
 #else
         return false;
 #endif
     }
 
     static MOZ_ALWAYS_INLINE bool isNullLike(T *thing) { return !thing; }
 
     static MOZ_ALWAYS_INLINE void writeBarrierPre(T *thing) {
 #ifdef JSGC_INCREMENTAL
         JS_ASSERT(!CurrentThreadIsIonCompiling());
-        if (isNullLike(thing) || !thing->shadowRuntimeFromAnyThread()->needsBarrier())
+        if (isNullLike(thing) || !thing->shadowRuntimeFromAnyThread()->needsIncrementalBarrier())
             return;
 
         JS::shadow::Zone *shadowZone = thing->shadowZoneFromAnyThread();
-        if (shadowZone->needsBarrier()) {
+        if (shadowZone->needsIncrementalBarrier()) {
             MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(shadowZone));
             T *tmp = thing;
             js::gc::MarkUnbarriered<T>(shadowZone->barrierTracer(), &tmp, "write barrier");
             JS_ASSERT(tmp == thing);
         }
 #endif
     }
 
@@ -329,29 +329,29 @@ struct InternalGCMethods<Value>
         return reinterpret_cast<JS::shadow::Runtime*>(runtimeFromMainThread(v));
     }
 
     static bool isMarkable(Value v) { return v.isMarkable(); }
 
     static void preBarrier(Value v) {
 #ifdef JSGC_INCREMENTAL
         JS_ASSERT(!CurrentThreadIsIonCompiling());
-        if (v.isMarkable() && shadowRuntimeFromAnyThread(v)->needsBarrier())
+        if (v.isMarkable() && shadowRuntimeFromAnyThread(v)->needsIncrementalBarrier())
             preBarrier(ZoneOfValueFromAnyThread(v), v);
 #endif
     }
 
     static void preBarrier(Zone *zone, Value v) {
 #ifdef JSGC_INCREMENTAL
         JS_ASSERT(!CurrentThreadIsIonCompiling());
         if (v.isString() && StringIsPermanentAtom(v.toString()))
             return;
         JS::shadow::Zone *shadowZone = JS::shadow::Zone::asShadowZone(zone);
-        if (shadowZone->needsBarrier()) {
-            JS_ASSERT_IF(v.isMarkable(), shadowRuntimeFromMainThread(v)->needsBarrier());
+        if (shadowZone->needsIncrementalBarrier()) {
+            JS_ASSERT_IF(v.isMarkable(), shadowRuntimeFromMainThread(v)->needsIncrementalBarrier());
             Value tmp(v);
             js::gc::MarkValueUnbarriered(shadowZone->barrierTracer(), &tmp, "write barrier");
             JS_ASSERT(tmp == v);
         }
 #endif
     }
 
     static void postBarrier(Value *vp) {
@@ -395,24 +395,24 @@ struct InternalGCMethods<jsid>
 {
     static bool isMarkable(jsid id) { return JSID_IS_STRING(id) || JSID_IS_SYMBOL(id); }
 
     static void preBarrier(jsid id) {
 #ifdef JSGC_INCREMENTAL
         if (JSID_IS_STRING(id)) {
             JSString *str = JSID_TO_STRING(id);
             JS::shadow::Zone *shadowZone = ShadowZoneOfStringFromAnyThread(str);
-            if (shadowZone->needsBarrier()) {
+            if (shadowZone->needsIncrementalBarrier()) {
                 js::gc::MarkStringUnbarriered(shadowZone->barrierTracer(), &str, "write barrier");
                 JS_ASSERT(str == JSID_TO_STRING(id));
             }
         } else if (JSID_IS_SYMBOL(id)) {
             JS::Symbol *sym = JSID_TO_SYMBOL(id);
             JS::shadow::Zone *shadowZone = ShadowZoneOfSymbolFromAnyThread(sym);
-            if (shadowZone->needsBarrier()) {
+            if (shadowZone->needsIncrementalBarrier()) {
                 js::gc::MarkSymbolUnbarriered(shadowZone->barrierTracer(), &sym, "write barrier");
                 JS_ASSERT(sym == JSID_TO_SYMBOL(id));
             }
         }
 #endif
     }
     static void preBarrier(Zone *zone, jsid id) { preBarrier(id); }
 
--- a/js/src/gc/ForkJoinNursery.cpp
+++ b/js/src/gc/ForkJoinNursery.cpp
@@ -230,17 +230,17 @@ ForkJoinNursery::pjsCollection(int op)
     JS_ASSERT(!evacuationZone_);
     JS_ASSERT(!head_);
     JS_ASSERT(tail_ == &head_);
 
     JSRuntime *const rt = shared_->runtime();
     const unsigned currentNumActiveChunks_ = numActiveChunks_;
     const char *msg = "";
 
-    JS_ASSERT(!rt->needsBarrier());
+    JS_ASSERT(!rt->needsIncrementalBarrier());
 
     TIME_START(pjsCollection);
 
     rt->gc.incFJMinorCollecting();
     if (evacuate) {
         isEvacuating_ = true;
         evacuationZone_ = shared_->zone();
     }
--- a/js/src/gc/Nursery.cpp
+++ b/js/src/gc/Nursery.cpp
@@ -336,47 +336,48 @@ class MinorCollectionTracer : public JST
 
     MinorCollectionTracer(JSRuntime *rt, Nursery *nursery)
       : JSTracer(rt, Nursery::MinorGCCallback, TraceWeakMapKeysValues),
         nursery(nursery),
         session(rt, MinorCollecting),
         tenuredSize(0),
         head(nullptr),
         tail(&head),
-        savedRuntimeNeedBarrier(rt->needsBarrier()),
+        savedRuntimeNeedBarrier(rt->needsIncrementalBarrier()),
         disableStrictProxyChecking(rt)
     {
         rt->gc.incGcNumber();
 
         /*
-         * We disable the runtime needsBarrier() check so that pre-barriers do
-         * not fire on objects that have been relocated. The pre-barrier's
-         * call to obj->zone() will try to look through shape_, which is now
-         * the relocation magic and will crash. However, zone->needsBarrier()
-         * must still be set correctly so that allocations we make in minor
-         * GCs between incremental slices will allocate their objects marked.
+         * We disable the runtime needsIncrementalBarrier() check so that
+         * pre-barriers do not fire on objects that have been relocated. The
+         * pre-barrier's call to obj->zone() will try to look through shape_,
+         * which is now the relocation magic and will crash. However,
+         * zone->needsIncrementalBarrier() must still be set correctly so that
+         * allocations we make in minor GCs between incremental slices will
+         * allocate their objects marked.
          */
-        rt->setNeedsBarrier(false);
+        rt->setNeedsIncrementalBarrier(false);
 
         /*
          * We use the live array buffer lists to track traced buffers so we can
          * sweep their dead views. Incremental collection also use these lists,
          * so we may need to save and restore their contents here.
          */
         if (rt->gc.state() != NO_INCREMENTAL) {
             for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
                 if (!ArrayBufferObject::saveArrayBufferList(c, liveArrayBuffers))
                     CrashAtUnhandlableOOM("OOM while saving live array buffers");
                 ArrayBufferObject::resetArrayBufferList(c);
             }
         }
     }
 
     ~MinorCollectionTracer() {
-        runtime()->setNeedsBarrier(savedRuntimeNeedBarrier);
+        runtime()->setNeedsIncrementalBarrier(savedRuntimeNeedBarrier);
         if (runtime()->gc.state() != NO_INCREMENTAL)
             ArrayBufferObject::restoreArrayBufferLists(liveArrayBuffers);
     }
 };
 
 } /* namespace gc */
 } /* namespace js */
 
--- a/js/src/gc/Verifier.cpp
+++ b/js/src/gc/Verifier.cpp
@@ -243,20 +243,20 @@ gc::GCRuntime::startVerifyPreBarriers()
 
         node = NextNode(node);
     }
 
     verifyPreData = trc;
     incrementalState = MARK;
     marker.start();
 
-    rt->setNeedsBarrier(true);
+    rt->setNeedsIncrementalBarrier(true);
     for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
         PurgeJITCaches(zone);
-        zone->setNeedsBarrier(true, Zone::UpdateJit);
+        zone->setNeedsIncrementalBarrier(true, Zone::UpdateJit);
         zone->allocator.arenas.purge();
     }
 
     return;
 
 oom:
     incrementalState = NO_INCREMENTAL;
     js_delete(trc);
@@ -328,23 +328,23 @@ gc::GCRuntime::endVerifyPreBarriers()
     JS_ASSERT(!JS::IsGenerationalGCEnabled(rt));
 
     AutoPrepareForTracing prep(rt, SkipAtoms);
 
     bool compartmentCreated = false;
 
     /* We need to disable barriers before tracing, which may invoke barriers. */
     for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
-        if (!zone->needsBarrier())
+        if (!zone->needsIncrementalBarrier())
             compartmentCreated = true;
 
-        zone->setNeedsBarrier(false, Zone::UpdateJit);
+        zone->setNeedsIncrementalBarrier(false, Zone::UpdateJit);
         PurgeJITCaches(zone);
     }
-    rt->setNeedsBarrier(false);
+    rt->setNeedsIncrementalBarrier(false);
 
     /*
      * We need to bump gcNumber so that the methodjit knows that jitcode has
      * been discarded.
      */
     JS_ASSERT(trc->number == number);
     number++;
 
--- a/js/src/gc/Zone.cpp
+++ b/js/src/gc/Zone.cpp
@@ -59,28 +59,28 @@ Zone::~Zone()
 
 bool Zone::init(bool isSystemArg)
 {
     isSystem = isSystemArg;
     return gcZoneGroupEdges.init();
 }
 
 void
-Zone::setNeedsBarrier(bool needs, ShouldUpdateJit updateJit)
+Zone::setNeedsIncrementalBarrier(bool needs, ShouldUpdateJit updateJit)
 {
     if (updateJit == UpdateJit && needs != jitUsingBarriers_) {
         jit::ToggleBarriers(this, needs);
         jitUsingBarriers_ = needs;
     }
 
     if (needs && runtimeFromMainThread()->isAtomsZone(this))
         JS_ASSERT(!runtimeFromMainThread()->exclusiveThreadsPresent());
 
     JS_ASSERT_IF(needs, canCollect());
-    needsBarrier_ = needs;
+    needsIncrementalBarrier_ = needs;
 }
 
 void
 Zone::resetGCMallocBytes()
 {
     gcMallocBytes = ptrdiff_t(gcMaxMallocBytes);
     gcMallocGCTriggered = false;
 }
--- a/js/src/gc/Zone.h
+++ b/js/src/gc/Zone.h
@@ -182,50 +182,51 @@ struct Zone : public JS::shadow::Zone,
         JS_ASSERT_IF(state != NoGC, canCollect());
         gcState_ = state;
     }
 
     bool isCollecting() const {
         if (runtimeFromMainThread()->isHeapCollecting())
             return gcState_ != NoGC;
         else
-            return needsBarrier();
+            return needsIncrementalBarrier();
     }
 
     // If this returns true, all object tracing must be done with a GC marking
     // tracer.
     bool requireGCTracer() const {
         return runtimeFromMainThread()->isHeapMajorCollecting() && gcState_ != NoGC;
     }
 
     bool isGCMarking() {
         if (runtimeFromMainThread()->isHeapCollecting())
             return gcState_ == Mark || gcState_ == MarkGray;
         else
-            return needsBarrier();
+            return needsIncrementalBarrier();
     }
 
     bool wasGCStarted() const { return gcState_ != NoGC; }
     bool isGCMarkingBlack() { return gcState_ == Mark; }
     bool isGCMarkingGray() { return gcState_ == MarkGray; }
     bool isGCSweeping() { return gcState_ == Sweep; }
     bool isGCFinished() { return gcState_ == Finished; }
 
     // Get a number that is incremented whenever this zone is collected, and
     // possibly at other times too.
     uint64_t gcNumber();
 
-    bool compileBarriers() const { return compileBarriers(needsBarrier()); }
-    bool compileBarriers(bool needsBarrier) const {
-        return needsBarrier || runtimeFromMainThread()->gcZeal() == js::gc::ZealVerifierPreValue;
+    bool compileBarriers() const { return compileBarriers(needsIncrementalBarrier()); }
+    bool compileBarriers(bool needsIncrementalBarrier) const {
+        return needsIncrementalBarrier ||
+               runtimeFromMainThread()->gcZeal() == js::gc::ZealVerifierPreValue;
     }
 
     enum ShouldUpdateJit { DontUpdateJit, UpdateJit };
-    void setNeedsBarrier(bool needs, ShouldUpdateJit updateJit);
-    const bool *addressOfNeedsBarrier() const { return &needsBarrier_; }
+    void setNeedsIncrementalBarrier(bool needs, ShouldUpdateJit updateJit);
+    const bool *addressOfNeedsIncrementalBarrier() const { return &needsIncrementalBarrier_; }
 
     js::jit::JitZone *getJitZone(JSContext *cx) { return jitZone_ ? jitZone_ : createJitZone(cx); }
     js::jit::JitZone *jitZone() { return jitZone_; }
 
 #ifdef DEBUG
     // For testing purposes, return the index of the zone group which this zone
     // was swept in in the last GC.
     unsigned lastZoneGroupIndex() { return gcLastZoneGroupIndex; }
--- a/js/src/jit/BaselineCompiler.cpp
+++ b/js/src/jit/BaselineCompiler.cpp
@@ -222,17 +222,17 @@ BaselineCompiler::compile()
                                            ImmPtr(entryAddr),
                                            ImmPtr((void*)-1));
     }
 
     if (modifiesArguments_)
         baselineScript->setModifiesArguments();
 
     // All barriers are emitted off-by-default, toggle them on if needed.
-    if (cx->zone()->needsBarrier())
+    if (cx->zone()->needsIncrementalBarrier())
         baselineScript->toggleBarriers(true);
 
     // All SPS instrumentation is emitted toggled off.  Toggle them on if needed.
     if (cx->runtime()->spsProfiler.enabled())
         baselineScript->toggleSPS(true);
 
     uint32_t *bytecodeMap = baselineScript->bytecodeTypeMap();
     types::FillBytecodeTypeMap(script, bytecodeMap);
--- a/js/src/jit/BaselineIC.cpp
+++ b/js/src/jit/BaselineIC.cpp
@@ -476,17 +476,17 @@ ICFallbackStub::unlinkStub(Zone *zone, I
             JS_ASSERT(icEntry()->firstStub() == stub);
             icEntry()->setFirstStub(stub->next());
         }
     }
 
     JS_ASSERT(numOptimizedStubs_ > 0);
     numOptimizedStubs_--;
 
-    if (zone->needsBarrier()) {
+    if (zone->needsIncrementalBarrier()) {
         // We are removing edges from ICStub to gcthings. Perform one final trace
         // of the stub for incremental GC, as it must know about those edges.
         stub->trace(zone->barrierTracer());
     }
 
     if (ICStub::CanMakeCalls(stub->kind()) && stub->isMonitored()) {
         // This stub can make calls so we can return to it if it's on the stack.
         // We just have to reset its firstMonitorStub_ field to avoid a stale
@@ -513,17 +513,17 @@ ICFallbackStub::unlinkStubsWithKind(JSCo
         if (iter->kind() == kind)
             iter.unlink(cx);
     }
 }
 
 void
 ICTypeMonitor_Fallback::resetMonitorStubChain(Zone *zone)
 {
-    if (zone->needsBarrier()) {
+    if (zone->needsIncrementalBarrier()) {
         // We are removing edges from monitored stubs to gcthings (JitCode).
         // Perform one final trace of all monitor stubs for incremental GC,
         // as it must know about those edges.
         for (ICStub *s = firstMonitorStub_; !s->isTypeMonitor_Fallback(); s = s->next())
             s->trace(zone->barrierTracer());
     }
 
     firstMonitorStub_ = this;
@@ -616,17 +616,17 @@ ICStubCompiler::getStubCode()
     if (!newStubCode)
         return nullptr;
 
     // After generating code, run postGenerateStubCode()
     if (!postGenerateStubCode(masm, newStubCode))
         return nullptr;
 
     // All barriers are emitted off-by-default, enable them if needed.
-    if (cx->zone()->needsBarrier())
+    if (cx->zone()->needsIncrementalBarrier())
         newStubCode->togglePreBarriers(true);
 
     // Cache newly compiled stubcode.
     if (!comp->putStubCode(stubKey, newStubCode))
         return nullptr;
 
     JS_ASSERT(entersStubFrame_ == ICStub::CanMakeCalls(kind));
 
--- a/js/src/jit/BaselineJIT.cpp
+++ b/js/src/jit/BaselineJIT.cpp
@@ -417,17 +417,17 @@ BaselineScript::trace(JSTracer *trc)
     }
 }
 
 /* static */
 void
 BaselineScript::writeBarrierPre(Zone *zone, BaselineScript *script)
 {
 #ifdef JSGC_INCREMENTAL
-    if (zone->needsBarrier())
+    if (zone->needsIncrementalBarrier())
         script->trace(zone->barrierTracer());
 #endif
 }
 
 void
 BaselineScript::Trace(JSTracer *trc, BaselineScript *script)
 {
     script->trace(trc);
--- a/js/src/jit/CodeGenerator.cpp
+++ b/js/src/jit/CodeGenerator.cpp
@@ -5918,17 +5918,17 @@ CodeGenerator::emitArrayPopShift(LInstru
     } else {
         JS_ASSERT(mir->mode() == MArrayPopShift::Shift);
         ool = oolCallVM(ArrayShiftDenseInfo, lir, (ArgList(), obj), StoreValueTo(out));
         if (!ool)
             return false;
     }
 
     // VM call if a write barrier is necessary.
-    masm.branchTestNeedsBarrier(Assembler::NonZero, ool->entry());
+    masm.branchTestNeedsIncrementalBarrier(Assembler::NonZero, ool->entry());
 
     // Load elements and length.
     masm.loadPtr(Address(obj, JSObject::offsetOfElements()), elementsTemp);
     masm.load32(Address(elementsTemp, ObjectElements::offsetOfLength()), lengthTemp);
 
     // VM call if length != initializedLength.
     Int32Key key = Int32Key(lengthTemp);
     Address initLength(elementsTemp, ObjectElements::offsetOfInitializedLength());
@@ -6181,17 +6181,17 @@ CodeGenerator::visitIteratorStart(LItera
 #ifdef JSGC_GENERATIONAL
         // Bug 867815: When using a nursery, we unconditionally take this out-
         // of-line so that we do not have to post-barrier the store to
         // NativeIter::obj. This just needs JIT support for the Cell* buffer.
         Address objAddr(niTemp, offsetof(NativeIterator, obj));
         masm.branchPtr(Assembler::NotEqual, objAddr, obj, ool->entry());
 #else
         Label noBarrier;
-        masm.branchTestNeedsBarrier(Assembler::Zero, &noBarrier);
+        masm.branchTestNeedsIncrementalBarrier(Assembler::Zero, &noBarrier);
 
         Address objAddr(niTemp, offsetof(NativeIterator, obj));
         masm.branchPtr(Assembler::NotEqual, objAddr, obj, ool->entry());
 
         masm.bind(&noBarrier);
 #endif // !JSGC_GENERATIONAL
     }
 
@@ -6821,17 +6821,17 @@ CodeGenerator::link(JSContext *cx, types
     }
 #endif
 
     switch (executionMode) {
       case SequentialExecution:
         // The correct state for prebarriers is unknown until the end of compilation,
         // since a GC can occur during code generation. All barriers are emitted
         // off-by-default, and are toggled on here if necessary.
-        if (cx->zone()->needsBarrier())
+        if (cx->zone()->needsIncrementalBarrier())
             ionScript->toggleBarriers(true);
         break;
       case ParallelExecution:
         // We don't run incremental GC during parallel execution; no need to
         // turn on barriers.
         break;
       default:
         MOZ_ASSUME_UNREACHABLE("No such execution mode");
--- a/js/src/jit/CompileWrappers.cpp
+++ b/js/src/jit/CompileWrappers.cpp
@@ -186,19 +186,19 @@ CompileZone::zone()
 
 /* static */ CompileZone *
 CompileZone::get(Zone *zone)
 {
     return reinterpret_cast<CompileZone *>(zone);
 }
 
 const void *
-CompileZone::addressOfNeedsBarrier()
+CompileZone::addressOfNeedsIncrementalBarrier()
 {
-    return zone()->addressOfNeedsBarrier();
+    return zone()->addressOfNeedsIncrementalBarrier();
 }
 
 const void *
 CompileZone::addressOfFreeListFirst(gc::AllocKind allocKind)
 {
     return zone()->allocator.arenas.getFreeList(allocKind)->addressOfFirst();
 }
 
--- a/js/src/jit/CompileWrappers.h
+++ b/js/src/jit/CompileWrappers.h
@@ -86,17 +86,17 @@ class CompileRuntime
 
 class CompileZone
 {
     Zone *zone();
 
   public:
     static CompileZone *get(Zone *zone);
 
-    const void *addressOfNeedsBarrier();
+    const void *addressOfNeedsIncrementalBarrier();
 
     // allocator.arenas.getFreeList(allocKind)
     const void *addressOfFreeListFirst(gc::AllocKind allocKind);
     const void *addressOfFreeListLast(gc::AllocKind allocKind);
 };
 
 class CompileCompartment
 {
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -963,17 +963,17 @@ IonScript::trace(JSTracer *trc)
     for (size_t i = 0; i < callTargetEntries(); i++)
         gc::MarkScriptUnbarriered(trc, &callTargetList()[i], "callTarget");
 }
 
 /* static */ void
 IonScript::writeBarrierPre(Zone *zone, IonScript *ionScript)
 {
 #ifdef JSGC_INCREMENTAL
-    if (zone->needsBarrier())
+    if (zone->needsIncrementalBarrier())
         ionScript->trace(zone->barrierTracer());
 #endif
 }
 
 void
 IonScript::copySnapshots(const SnapshotWriter *writer)
 {
     MOZ_ASSERT(writer->listSize() == snapshotsListSize_);
@@ -2611,17 +2611,17 @@ InvalidateActivation(FreeOp *fop, uint8_
         // snapshot after the call occurs.
 
         ionScript->incref();
 
         const SafepointIndex *si = ionScript->getSafepointIndex(it.returnAddressToFp());
         JitCode *ionCode = ionScript->method();
 
         JS::Zone *zone = script->zone();
-        if (zone->needsBarrier()) {
+        if (zone->needsIncrementalBarrier()) {
             // We're about to remove edges from the JSScript to gcthings
             // embedded in the JitCode. Perform one final trace of the
             // JitCode for the incremental GC, as it must know about
             // those edges.
             ionCode->trace(zone->barrierTracer());
         }
         ionCode->setInvalidated();
 
--- a/js/src/jit/IonCaches.cpp
+++ b/js/src/jit/IonCaches.cpp
@@ -1985,27 +1985,27 @@ GenerateSetSlot(JSContext *cx, MacroAsse
             masm.guardTypeSet(valReg, propTypes, BarrierKind::TypeSet, scratchReg, &barrierFailure);
             masm.pop(object);
         }
     }
 
     if (obj->isFixedSlot(shape->slot())) {
         Address addr(object, JSObject::getFixedSlotOffset(shape->slot()));
 
-        if (cx->zone()->needsBarrier())
+        if (cx->zone()->needsIncrementalBarrier())
             masm.callPreBarrier(addr, MIRType_Value);
 
         masm.storeConstantOrRegister(value, addr);
     } else {
         Register slotsReg = object;
         masm.loadPtr(Address(object, JSObject::offsetOfSlots()), slotsReg);
 
         Address addr(slotsReg, obj->dynamicSlotIndex(shape->slot()) * sizeof(Value));
 
-        if (cx->zone()->needsBarrier())
+        if (cx->zone()->needsIncrementalBarrier())
             masm.callPreBarrier(addr, MIRType_Value);
 
         masm.storeConstantOrRegister(value, addr);
     }
 
     attacher.jumpRejoin(masm);
 
     if (barrierFailure.used()) {
@@ -2556,17 +2556,17 @@ GenerateAddSlot(JSContext *cx, MacroAsse
         proto = proto->getProto();
     }
 
     masm.pop(object);     // restore object reg
 
     // Changing object shape.  Write the object's new shape.
     Shape *newShape = obj->lastProperty();
     Address shapeAddr(object, JSObject::offsetOfShape());
-    if (cx->zone()->needsBarrier())
+    if (cx->zone()->needsIncrementalBarrier())
         masm.callPreBarrier(shapeAddr, MIRType_Shape);
     masm.storePtr(ImmGCPtr(newShape), shapeAddr);
 
     // Set the value on the object. Since this is an add, obj->lastProperty()
     // must be the shape of the property we are adding.
     if (obj->isFixedSlot(newShape->slot())) {
         Address addr(object, JSObject::getFixedSlotOffset(newShape->slot()));
         masm.storeConstantOrRegister(value, addr);
@@ -3665,17 +3665,17 @@ GenerateSetDenseElement(JSContext *cx, M
                 // Restore the index.
                 masm.bumpKey(&newLength, -1);
                 masm.jump(&storeElement);
             }
             // else
             masm.bind(&markElem);
         }
 
-        if (cx->zone()->needsBarrier())
+        if (cx->zone()->needsIncrementalBarrier())
             masm.callPreBarrier(target, MIRType_Value);
 
         // Store the value.
         if (guardHoles)
             masm.branchTestMagic(Assembler::Equal, target, &failures);
         else
             masm.bind(&storeElement);
         StoreDenseElement(masm, value, elements, target);
--- a/js/src/jit/IonMacroAssembler.h
+++ b/js/src/jit/IonMacroAssembler.h
@@ -629,20 +629,20 @@ class MacroAssembler : public MacroAssem
     template<typename T>
     void branchKey(Condition cond, const T &length, const Int32Key &key, Label *label) {
         if (key.isRegister())
             branch32(cond, length, key.reg(), label);
         else
             branch32(cond, length, Imm32(key.constant()), label);
     }
 
-    void branchTestNeedsBarrier(Condition cond, Label *label) {
+    void branchTestNeedsIncrementalBarrier(Condition cond, Label *label) {
         JS_ASSERT(cond == Zero || cond == NonZero);
         CompileZone *zone = GetIonContext()->compartment->zone();
-        AbsoluteAddress needsBarrierAddr(zone->addressOfNeedsBarrier());
+        AbsoluteAddress needsBarrierAddr(zone->addressOfNeedsIncrementalBarrier());
         branchTest32(cond, needsBarrierAddr, Imm32(0x1), label);
     }
 
     template <typename T>
     void callPreBarrier(const T &address, MIRType type) {
         JS_ASSERT(type == MIRType_Value ||
                   type == MIRType_String ||
                   type == MIRType_Symbol ||
--- a/js/src/jsfun.cpp
+++ b/js/src/jsfun.cpp
@@ -1186,17 +1186,17 @@ JSFunction::getBoundFunctionArgumentCoun
 /* static */ bool
 JSFunction::createScriptForLazilyInterpretedFunction(JSContext *cx, HandleFunction fun)
 {
     JS_ASSERT(fun->isInterpretedLazy());
 
     Rooted<LazyScript*> lazy(cx, fun->lazyScriptOrNull());
     if (lazy) {
         // Trigger a pre barrier on the lazy script being overwritten.
-        if (cx->zone()->needsBarrier())
+        if (cx->zone()->needsIncrementalBarrier())
             LazyScript::writeBarrierPre(lazy);
 
         // Suppress GC for now although we should be able to remove this by
         // making 'lazy' a Rooted<LazyScript*> (which requires adding a
         // THING_ROOT_LAZY_SCRIPT).
         AutoSuppressGC suppressGC(cx);
 
         RootedScript script(cx, lazy->maybeScript());
--- a/js/src/jsfun.h
+++ b/js/src/jsfun.h
@@ -296,17 +296,17 @@ class JSFunction : public JSObject
             // Baseline code it has not been relazified. Note that we can't
             // use lazyScript->script_ here as it may be null in some cases,
             // see bug 976536.
             js::LazyScript *lazy = lazyScript();
             JSFunction *fun = lazy->functionNonDelazifying();
             MOZ_ASSERT(fun);
             JSScript *script = fun->nonLazyScript();
 
-            if (shadowZone()->needsBarrier())
+            if (shadowZone()->needsIncrementalBarrier())
                 js::LazyScript::writeBarrierPre(lazy);
 
             flags_ &= ~INTERPRETED_LAZY;
             flags_ |= INTERPRETED;
             initScript(script);
         }
         return nonLazyScript();
     }
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -1828,17 +1828,17 @@ ArenaLists::prepareForIncrementalGC(JSRu
             rt->gc.marker.delayMarkingArena(aheader);
         }
     }
 }
 
 inline void
 GCRuntime::arenaAllocatedDuringGC(JS::Zone *zone, ArenaHeader *arena)
 {
-    if (zone->needsBarrier()) {
+    if (zone->needsIncrementalBarrier()) {
         arena->allocatedDuringIncremental = true;
         marker.delayMarkingArena(arena);
     } else if (zone->isGCSweeping()) {
         arena->setNextAllocDuringSweep(arenasAllocatedDuringSweep);
         arenasAllocatedDuringSweep = arena;
     }
 }
 
@@ -3656,18 +3656,18 @@ GCRuntime::finishMarkingValidation()
 }
 
 static void
 AssertNeedsBarrierFlagsConsistent(JSRuntime *rt)
 {
 #ifdef JS_GC_MARKING_VALIDATION
     bool anyNeedsBarrier = false;
     for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next())
-        anyNeedsBarrier |= zone->needsBarrier();
-    JS_ASSERT(rt->needsBarrier() == anyNeedsBarrier);
+        anyNeedsBarrier |= zone->needsIncrementalBarrier();
+    JS_ASSERT(rt->needsIncrementalBarrier() == anyNeedsBarrier);
 #endif
 }
 
 static void
 DropStringWrappers(JSRuntime *rt)
 {
     /*
      * String "wrappers" are dropped on GC because their presence would require
@@ -3818,21 +3818,21 @@ GCRuntime::getNextZoneGroup()
     if (!isIncremental)
         ComponentFinder<Zone>::mergeGroups(currentZoneGroup);
 
     if (abortSweepAfterCurrentGroup) {
         JS_ASSERT(!isIncremental);
         for (GCZoneGroupIter zone(rt); !zone.done(); zone.next()) {
             JS_ASSERT(!zone->gcNextGraphComponent);
             JS_ASSERT(zone->isGCMarking());
-            zone->setNeedsBarrier(false, Zone::UpdateJit);
+            zone->setNeedsIncrementalBarrier(false, Zone::UpdateJit);
             zone->setGCState(Zone::NoGC);
             zone->gcGrayRoots.clearAndFree();
         }
-        rt->setNeedsBarrier(false);
+        rt->setNeedsIncrementalBarrier(false);
         AssertNeedsBarrierFlagsConsistent(rt);
 
         for (GCCompartmentGroupIter comp(rt); !comp.done(); comp.next()) {
             ArrayBufferObject::resetArrayBufferList(comp);
             ResetGrayList(comp);
         }
 
         abortSweepAfterCurrentGroup = false;
@@ -4637,20 +4637,20 @@ GCRuntime::resetIncrementalGC(const char
 
         for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
             ArrayBufferObject::resetArrayBufferList(c);
             ResetGrayList(c);
         }
 
         for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
             JS_ASSERT(zone->isGCMarking());
-            zone->setNeedsBarrier(false, Zone::UpdateJit);
+            zone->setNeedsIncrementalBarrier(false, Zone::UpdateJit);
             zone->setGCState(Zone::NoGC);
         }
-        rt->setNeedsBarrier(false);
+        rt->setNeedsIncrementalBarrier(false);
         AssertNeedsBarrierFlagsConsistent(rt);
 
         incrementalState = NO_INCREMENTAL;
 
         JS_ASSERT(!marker.shouldCheckCompartments());
 
         break;
       }
@@ -4677,17 +4677,17 @@ GCRuntime::resetIncrementalGC(const char
 
     stats.reset(reason);
 
 #ifdef DEBUG
     for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next())
         JS_ASSERT(c->gcLiveArrayBuffers.empty());
 
     for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
-        JS_ASSERT(!zone->needsBarrier());
+        JS_ASSERT(!zone->needsIncrementalBarrier());
         for (unsigned i = 0; i < FINALIZE_LIMIT; ++i)
             JS_ASSERT(!zone->allocator.arenas.arenaListsToSweep[i]);
     }
 #endif
 }
 
 namespace {
 
@@ -4711,46 +4711,46 @@ AutoGCSlice::AutoGCSlice(JSRuntime *rt)
      * is set at the beginning of the mark phase. During incremental GC, we also
      * set it at the start of every phase.
      */
     for (ActivationIterator iter(rt); !iter.done(); ++iter)
         iter->compartment()->zone()->active = true;
 
     for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
         /*
-         * Clear needsBarrier early so we don't do any write barriers during
-         * GC. We don't need to update the Ion barriers (which is expensive)
-         * because Ion code doesn't run during GC. If need be, we'll update the
-         * Ion barriers in ~AutoGCSlice.
+         * Clear needsIncrementalBarrier early so we don't do any write
+         * barriers during GC. We don't need to update the Ion barriers (which
+         * is expensive) because Ion code doesn't run during GC. If need be,
+         * we'll update the Ion barriers in ~AutoGCSlice.
          */
         if (zone->isGCMarking()) {
-            JS_ASSERT(zone->needsBarrier());
-            zone->setNeedsBarrier(false, Zone::DontUpdateJit);
+            JS_ASSERT(zone->needsIncrementalBarrier());
+            zone->setNeedsIncrementalBarrier(false, Zone::DontUpdateJit);
         } else {
-            JS_ASSERT(!zone->needsBarrier());
+            JS_ASSERT(!zone->needsIncrementalBarrier());
         }
     }
-    rt->setNeedsBarrier(false);
+    rt->setNeedsIncrementalBarrier(false);
     AssertNeedsBarrierFlagsConsistent(rt);
 }
 
 AutoGCSlice::~AutoGCSlice()
 {
     /* We can't use GCZonesIter if this is the end of the last slice. */
     bool haveBarriers = false;
     for (ZonesIter zone(runtime, WithAtoms); !zone.done(); zone.next()) {
         if (zone->isGCMarking()) {
-            zone->setNeedsBarrier(true, Zone::UpdateJit);
+            zone->setNeedsIncrementalBarrier(true, Zone::UpdateJit);
             zone->allocator.arenas.prepareForIncrementalGC(runtime);
             haveBarriers = true;
         } else {
-            zone->setNeedsBarrier(false, Zone::UpdateJit);
+            zone->setNeedsIncrementalBarrier(false, Zone::UpdateJit);
         }
     }
-    runtime->setNeedsBarrier(haveBarriers);
+    runtime->setNeedsIncrementalBarrier(haveBarriers);
     AssertNeedsBarrierFlagsConsistent(runtime);
 }
 
 void
 GCRuntime::pushZealSelectedObjects()
 {
 #ifdef JS_GC_ZEAL
     /* Push selected objects onto the mark stack and clear the list. */
@@ -5097,17 +5097,17 @@ gcstats::ZoneGCStats
 GCRuntime::scanZonesBeforeGC()
 {
     gcstats::ZoneGCStats zoneStats;
     for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
         if (mode == JSGC_MODE_GLOBAL)
             zone->scheduleGC();
 
         /* This is a heuristic to avoid resets. */
-        if (incrementalState != NO_INCREMENTAL && zone->needsBarrier())
+        if (incrementalState != NO_INCREMENTAL && zone->needsIncrementalBarrier())
             zone->scheduleGC();
 
         zoneStats.zoneCount++;
         if (zone->isGCScheduled())
             zoneStats.collectedCount++;
     }
 
     for (CompartmentsIter c(rt, WithAtoms); !c.done(); c.next())
--- a/js/src/jsgcinlines.h
+++ b/js/src/jsgcinlines.h
@@ -67,18 +67,18 @@ ThreadSafeContext::isThreadLocal(T thing
     // Global invariant
     JS_ASSERT(!IsInsideNursery(thing));
 
     // The thing is not in the nursery, but is it in the private tenured area?
     if (allocator_->arenas.containsArena(runtime_, thing->arenaHeader()))
     {
         // GC should be suppressed in preparation for mutating thread local
         // objects, as we don't want to trip any barriers.
-        JS_ASSERT(!thing->zoneFromAnyThread()->needsBarrier());
-        JS_ASSERT(!thing->runtimeFromAnyThread()->needsBarrier());
+        JS_ASSERT(!thing->zoneFromAnyThread()->needsIncrementalBarrier());
+        JS_ASSERT(!thing->runtimeFromAnyThread()->needsIncrementalBarrier());
 
         return true;
     }
 
     return false;
 }
 
 namespace gc {
--- a/js/src/jsinferinlines.h
+++ b/js/src/jsinferinlines.h
@@ -1289,21 +1289,21 @@ TypeObjectAddendum::writeBarrierPre(Type
     }
 #endif
 }
 
 inline void
 TypeNewScript::writeBarrierPre(TypeNewScript *newScript)
 {
 #ifdef JSGC_INCREMENTAL
-    if (!newScript || !newScript->fun->runtimeFromAnyThread()->needsBarrier())
+    if (!newScript || !newScript->fun->runtimeFromAnyThread()->needsIncrementalBarrier())
         return;
 
     JS::Zone *zone = newScript->fun->zoneFromAnyThread();
-    if (zone->needsBarrier()) {
+    if (zone->needsIncrementalBarrier()) {
         MarkObject(zone->barrierTracer(), &newScript->fun, "write barrier");
         MarkObject(zone->barrierTracer(), &newScript->templateObject, "write barrier");
     }
 #endif
 }
 
 } } /* namespace js::types */
 
--- a/js/src/jsiter.cpp
+++ b/js/src/jsiter.cpp
@@ -1581,17 +1581,17 @@ MarkGeneratorFrame(JSTracer *trc, JSGene
                    HeapValueify(gen->regs.sp),
                    "Generator Floating Stack");
 }
 
 static void
 GeneratorWriteBarrierPre(JSContext *cx, JSGenerator *gen)
 {
     JS::Zone *zone = cx->zone();
-    if (zone->needsBarrier())
+    if (zone->needsIncrementalBarrier())
         MarkGeneratorFrame(zone->barrierTracer(), gen);
 }
 
 static void
 GeneratorWriteBarrierPost(JSContext *cx, JSGenerator *gen)
 {
 #ifdef JSGC_GENERATIONAL
     cx->runtime()->gc.storeBuffer.putWholeCellFromMainThread(gen->obj);
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -2514,17 +2514,17 @@ JSObject::TradeGuts(JSContext *cx, JSObj
      * solves this.
      *
      * Normally write barriers happen before the write. However, that's not
      * necessary here because nothing is being destroyed. We're just swapping.
      * We don't do the barrier before TradeGuts because ReserveForTradeGuts
      * makes changes to the objects that might confuse the tracing code.
      */
     JS::Zone *zone = a->zone();
-    if (zone->needsBarrier()) {
+    if (zone->needsIncrementalBarrier()) {
         MarkChildren(zone->barrierTracer(), a);
         MarkChildren(zone->barrierTracer(), b);
     }
 #endif
 }
 
 /* Use this method with extreme caution. It trades the guts of two objects. */
 bool
--- a/js/src/jsobj.h
+++ b/js/src/jsobj.h
@@ -695,17 +695,17 @@ class JSObject : public js::ObjectImpl
          * 3. Incremental GC finishes by marking slots 1 and 2 (i.e., C).
          *
          * Since normal marking never happens on B, it is very important that the
          * write barrier is invoked here on B, despite the fact that it exists in
          * the array before and after the move.
         */
         JS::Zone *zone = this->zone();
         JS::shadow::Zone *shadowZone = JS::shadow::Zone::asShadowZone(zone);
-        if (shadowZone->needsBarrier()) {
+        if (shadowZone->needsIncrementalBarrier()) {
             if (dstStart < srcStart) {
                 js::HeapSlot *dst = elements + dstStart;
                 js::HeapSlot *src = elements + srcStart;
                 for (uint32_t i = 0; i < count; i++, dst++, src++)
                     dst->set(zone, this, js::HeapSlot::Element, dst - elements, *src);
             } else {
                 js::HeapSlot *dst = elements + dstStart + count - 1;
                 js::HeapSlot *src = elements + srcStart + count - 1;
@@ -714,17 +714,17 @@ class JSObject : public js::ObjectImpl
             }
         } else {
             memmove(elements + dstStart, elements + srcStart, count * sizeof(js::HeapSlot));
             DenseRangeWriteBarrierPost(runtimeFromMainThread(), this, dstStart, count);
         }
     }
 
     void moveDenseElementsNoPreBarrier(uint32_t dstStart, uint32_t srcStart, uint32_t count) {
-        JS_ASSERT(!shadowZone()->needsBarrier());
+        JS_ASSERT(!shadowZone()->needsIncrementalBarrier());
 
         JS_ASSERT(dstStart + count <= getDenseCapacity());
         JS_ASSERT(srcStart + count <= getDenseCapacity());
 
         memmove(elements + dstStart, elements + srcStart, count * sizeof(js::Value));
         DenseRangeWriteBarrierPost(runtimeFromMainThread(), this, dstStart, count);
     }
 
--- a/js/src/jspropertytree.cpp
+++ b/js/src/jspropertytree.cpp
@@ -151,17 +151,17 @@ PropertyTree::getChild(ExclusiveContext 
         existingShape = *p;
     } else {
         /* If kidp->isNull(), we always insert. */
     }
 
 #ifdef JSGC_INCREMENTAL
     if (existingShape) {
         JS::Zone *zone = existingShape->zone();
-        if (zone->needsBarrier()) {
+        if (zone->needsIncrementalBarrier()) {
             /*
              * We need a read barrier for the shape tree, since these are weak
              * pointers.
              */
             Shape *tmp = existingShape;
             MarkShapeUnbarriered(zone->barrierTracer(), &tmp, "read barrier");
             JS_ASSERT(tmp == existingShape);
         } else if (zone->isGCSweeping() && !existingShape->isMarked() &&
@@ -213,17 +213,17 @@ PropertyTree::lookupChild(ThreadSafeCont
             shape = *p;
     } else {
         return nullptr;
     }
 
 #if defined(JSGC_INCREMENTAL) && defined(DEBUG)
     if (shape) {
         JS::Zone *zone = shape->arenaHeader()->zone;
-        JS_ASSERT(!zone->needsBarrier());
+        JS_ASSERT(!zone->needsIncrementalBarrier());
         JS_ASSERT(!(zone->isGCSweeping() && !shape->isMarked() &&
                     !shape->arenaHeader()->allocatedDuringIncremental));
     }
 #endif
 
     return shape;
 }
 
--- a/js/src/jspubtd.h
+++ b/js/src/jspubtd.h
@@ -168,37 +168,37 @@ namespace JS {
 
 typedef void (*OffThreadCompileCallback)(void *token, void *callbackData);
 
 namespace shadow {
 
 struct Runtime
 {
     /* Restrict zone access during Minor GC. */
-    bool needsBarrier_;
+    bool needsIncrementalBarrier_;
 
 #ifdef JSGC_GENERATIONAL
   private:
     js::gc::StoreBuffer *gcStoreBufferPtr_;
 #endif
 
   public:
     explicit Runtime(
 #ifdef JSGC_GENERATIONAL
         js::gc::StoreBuffer *storeBuffer
 #endif
     )
-      : needsBarrier_(false)
+      : needsIncrementalBarrier_(false)
 #ifdef JSGC_GENERATIONAL
       , gcStoreBufferPtr_(storeBuffer)
 #endif
     {}
 
-    bool needsBarrier() const {
-        return needsBarrier_;
+    bool needsIncrementalBarrier() const {
+        return needsIncrementalBarrier_;
     }
 
 #ifdef JSGC_GENERATIONAL
     js::gc::StoreBuffer *gcStoreBufferPtr() { return gcStoreBufferPtr_; }
 #endif
 
     static JS::shadow::Runtime *asShadowRuntime(JSRuntime *rt) {
         return reinterpret_cast<JS::shadow::Runtime*>(rt);
--- a/js/src/vm/ForkJoin.cpp
+++ b/js/src/vm/ForkJoin.cpp
@@ -354,18 +354,18 @@ ForkJoinActivation::ForkJoinActivation(J
         JS::PrepareForIncrementalGC(cx->runtime());
         JS::FinishIncrementalGC(cx->runtime(), JS::gcreason::API);
     }
 
     MinorGC(cx->runtime(), JS::gcreason::API);
 
     cx->runtime()->gc.waitBackgroundSweepEnd();
 
-    JS_ASSERT(!cx->runtime()->needsBarrier());
-    JS_ASSERT(!cx->zone()->needsBarrier());
+    JS_ASSERT(!cx->runtime()->needsIncrementalBarrier());
+    JS_ASSERT(!cx->zone()->needsIncrementalBarrier());
 }
 
 ForkJoinActivation::~ForkJoinActivation()
 {
     cx_->perThreadData->jitTop = prevJitTop_;
 }
 
 ///////////////////////////////////////////////////////////////////////////
--- a/js/src/vm/ObjectImpl.h
+++ b/js/src/vm/ObjectImpl.h
@@ -1001,17 +1001,17 @@ BarrieredCell<ObjectImpl>::writeBarrierP
 
 } // namespace gc
 
 inline void
 ObjectImpl::privateWriteBarrierPre(void **oldval)
 {
 #ifdef JSGC_INCREMENTAL
     JS::shadow::Zone *shadowZone = this->shadowZoneFromAnyThread();
-    if (shadowZone->needsBarrier()) {
+    if (shadowZone->needsIncrementalBarrier()) {
         if (*oldval && getClass()->trace)
             getClass()->trace(shadowZone->barrierTracer(), this->asObjectPtr());
     }
 #endif
 }
 
 inline Value
 ObjectValue(ObjectImpl &obj)
--- a/js/src/vm/RegExpObject.cpp
+++ b/js/src/vm/RegExpObject.cpp
@@ -212,17 +212,17 @@ VectorMatchPairs::allocOrExpandArray(siz
 }
 
 /* RegExpObject */
 
 static inline void
 MaybeTraceRegExpShared(JSContext *cx, RegExpShared *shared)
 {
     Zone *zone = cx->zone();
-    if (zone->needsBarrier())
+    if (zone->needsIncrementalBarrier())
         shared->trace(zone->barrierTracer());
 }
 
 bool
 RegExpObject::getShared(JSContext *cx, RegExpGuard *g)
 {
     if (RegExpShared *shared = maybeShared()) {
         // Fetching a RegExpShared from an object requires a read
--- a/js/src/vm/Runtime.cpp
+++ b/js/src/vm/Runtime.cpp
@@ -700,17 +700,17 @@ JSRuntime::onOutOfMemoryCanGC(void *p, s
     largeAllocationFailureCallback(largeAllocationFailureCallbackData);
     return onOutOfMemory(p, bytes);
 }
 
 bool
 JSRuntime::activeGCInAtomsZone()
 {
     Zone *zone = atomsCompartment_->zone();
-    return zone->needsBarrier() || zone->isGCScheduled() || zone->wasGCStarted();
+    return zone->needsIncrementalBarrier() || zone->isGCScheduled() || zone->wasGCStarted();
 }
 
 void
 JSRuntime::setUsedByExclusiveThread(Zone *zone)
 {
     JS_ASSERT(!zone->usedByExclusiveThread);
     zone->usedByExclusiveThread = true;
     numExclusiveThreads++;
--- a/js/src/vm/Runtime.h
+++ b/js/src/vm/Runtime.h
@@ -977,18 +977,18 @@ struct JSRuntime : public JS::shadow::Ru
         gc.unlockGC();
     }
 
 #if defined(JS_ARM_SIMULATOR) || defined(JS_MIPS_SIMULATOR)
     js::jit::SimulatorRuntime *simulatorRuntime_;
 #endif
 
   public:
-    void setNeedsBarrier(bool needs) {
-        needsBarrier_ = needs;
+    void setNeedsIncrementalBarrier(bool needs) {
+        needsIncrementalBarrier_ = needs;
     }
 
 #if defined(JS_ARM_SIMULATOR) || defined(JS_MIPS_SIMULATOR)
     js::jit::SimulatorRuntime *simulatorRuntime() const;
     void setSimulatorRuntime(js::jit::SimulatorRuntime *srt);
 #endif
 
     /* Strong references on scripts held for PCCount profiling API. */
--- a/js/src/vm/String.cpp
+++ b/js/src/vm/String.cpp
@@ -451,17 +451,17 @@ JSRope::flattenInternal(ExclusiveContext
         return flattenInternal<b, jschar>(maybecx);
     return flattenInternal<b, Latin1Char>(maybecx);
 }
 
 JSFlatString *
 JSRope::flatten(ExclusiveContext *maybecx)
 {
 #ifdef JSGC_INCREMENTAL
-    if (zone()->needsBarrier())
+    if (zone()->needsIncrementalBarrier())
         return flattenInternal<WithIncrementalBarrier>(maybecx);
 #endif
     return flattenInternal<NoBarrier>(maybecx);
 }
 
 template <AllowGC allowGC>
 JSString *
 js::ConcatStrings(ThreadSafeContext *cx,