Bug 1107349 - Always build in enerational GC support; r=jonco
authorTerrence Cole <terrence@mozilla.com>
Thu, 04 Dec 2014 09:45:05 -0800
changeset 218911 3d0a1db612fa5455a07ece3e00b89cd3318a2367
parent 218910 9a7e59858dc68ea4063a8daa76cc00bd64327bfd
child 218912 97df7d232e7f9c1fa855f7823bd5ace4a44ae195
push id27949
push usercbook@mozilla.com
push dateWed, 10 Dec 2014 10:50:45 +0000
treeherdermozilla-central@551c3cd74dbd [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjonco
bugs1107349
milestone37.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1107349 - Always build in enerational GC support; r=jonco
b2g/app/b2g.js
b2g/confvars.sh
browser/confvars.sh
dom/xbl/nsXBLMaybeCompiled.h
js/public/GCAPI.h
js/public/HeapAPI.h
js/public/Id.h
js/public/RootingAPI.h
js/public/Value.h
js/src/builtin/MapObject.cpp
js/src/builtin/TestingFunctions.cpp
js/src/builtin/TypedObject.cpp
js/src/configure.in
js/src/gc/Barrier.h
js/src/gc/ForkJoinNursery.h
js/src/gc/GCRuntime.h
js/src/gc/Marking.cpp
js/src/gc/Nursery-inl.h
js/src/gc/Nursery.cpp
js/src/gc/Nursery.h
js/src/gc/StoreBuffer.cpp
js/src/gc/StoreBuffer.h
js/src/gc/Verifier.cpp
js/src/jit/BaselineCompiler.cpp
js/src/jit/BaselineCompiler.h
js/src/jit/BaselineDebugModeOSR.cpp
js/src/jit/BaselineIC.cpp
js/src/jit/BaselineIC.h
js/src/jit/BaselineJIT.cpp
js/src/jit/CodeGenerator.cpp
js/src/jit/CompileWrappers.cpp
js/src/jit/CompileWrappers.h
js/src/jit/IonBuilder.cpp
js/src/jit/JitFrames.cpp
js/src/jit/JitFrames.h
js/src/jit/Linker.h
js/src/jit/Lowering.cpp
js/src/jit/MacroAssembler.cpp
js/src/jit/VMFunctions.cpp
js/src/jit/VMFunctions.h
js/src/jit/arm/MacroAssembler-arm.cpp
js/src/jit/arm/MacroAssembler-arm.h
js/src/jit/mips/MacroAssembler-mips.cpp
js/src/jit/mips/MacroAssembler-mips.h
js/src/jit/none/MacroAssembler-none.h
js/src/jit/shared/Assembler-shared.h
js/src/jit/x64/MacroAssembler-x64.cpp
js/src/jit/x64/MacroAssembler-x64.h
js/src/jit/x86/MacroAssembler-x86.cpp
js/src/jit/x86/MacroAssembler-x86.h
js/src/js-config.h.in
js/src/jsapi-tests/testGCHeapPostBarriers.cpp
js/src/jsapi-tests/testGCStoreBufferRemoval.cpp
js/src/jsapi-tests/testIsInsideNursery.cpp
js/src/jscntxt.h
js/src/jscompartment.cpp
js/src/jsfriendapi.cpp
js/src/jsfriendapi.h
js/src/jsgc.cpp
js/src/jsgc.h
js/src/jsgcinlines.h
js/src/jshashutil.h
js/src/jsinfer.cpp
js/src/jsinfer.h
js/src/jsobj.cpp
js/src/jsobj.h
js/src/jspropertytree.cpp
js/src/jspubtd.h
js/src/jsweakmap.cpp
js/src/moz.build
js/src/shell/js.cpp
js/src/vm/Debugger.cpp
js/src/vm/NativeObject-inl.h
js/src/vm/NativeObject.cpp
js/src/vm/NativeObject.h
js/src/vm/Runtime.cpp
js/src/vm/Runtime.h
js/src/vm/ScopeObject.cpp
js/src/vm/ScopeObject.h
js/src/vm/Shape.cpp
js/src/vm/Shape.h
mobile/android/app/mobile.js
mobile/android/confvars.sh
modules/libpref/init/all.js
xpcom/glue/tests/gtest/TestGCPostBarriers.cpp
--- a/b2g/app/b2g.js
+++ b/b2g/app/b2g.js
@@ -672,21 +672,17 @@ pref("javascript.options.mem.gc_high_fre
 pref("javascript.options.mem.gc_high_frequency_heap_growth_max", 300);
 pref("javascript.options.mem.gc_high_frequency_heap_growth_min", 120);
 pref("javascript.options.mem.gc_high_frequency_high_limit_mb", 40);
 pref("javascript.options.mem.gc_high_frequency_low_limit_mb", 0);
 pref("javascript.options.mem.gc_low_frequency_heap_growth", 120);
 pref("javascript.options.mem.high_water_mark", 6);
 pref("javascript.options.mem.gc_allocation_threshold_mb", 1);
 pref("javascript.options.mem.gc_decommit_threshold_mb", 1);
-#ifdef JSGC_GENERATIONAL
 pref("javascript.options.mem.gc_min_empty_chunk_count", 1);
-#else
-pref("javascript.options.mem.gc_min_empty_chunk_count", 0);
-#endif
 pref("javascript.options.mem.gc_max_empty_chunk_count", 2);
 
 // Show/Hide scrollbars when active/inactive
 pref("ui.showHideScrollbars", 1);
 pref("ui.useOverlayScrollbars", 1);
 pref("ui.scrollbarFadeBeginDelay", 450);
 pref("ui.scrollbarFadeDuration", 200);
 
--- a/b2g/confvars.sh
+++ b/b2g/confvars.sh
@@ -60,10 +60,9 @@ if test "$OS_TARGET" = "Android"; then
 MOZ_NUWA_PROCESS=1
 MOZ_B2G_LOADER=1
 fi
 
 MOZ_JSDOWNLOADS=1
 
 MOZ_BUNDLED_FONTS=1
 
-export JSGC_GENERATIONAL=1
 export JS_GC_SMALL_CHUNK_SIZE=1
--- a/browser/confvars.sh
+++ b/browser/confvars.sh
@@ -57,10 +57,8 @@ MOZ_WEBAPP_RUNTIME=1
 MOZ_MEDIA_NAVIGATOR=1
 MOZ_WEBGL_CONFORMANT=1
 # Enable navigator.mozPay
 MOZ_PAY=1
 # Enable activities. These are used for FxOS developers currently.
 MOZ_ACTIVITIES=1
 MOZ_JSDOWNLOADS=1
 MOZ_WEBM_ENCODER=1
-# Enable generational GC on desktop.
-export JSGC_GENERATIONAL=1
--- a/dom/xbl/nsXBLMaybeCompiled.h
+++ b/dom/xbl/nsXBLMaybeCompiled.h
@@ -96,27 +96,25 @@ struct GCMethods<nsXBLMaybeCompiled<Unco
     return function.IsCompiled() && Base::poisoned(function.GetJSFunction());
   }
 
   static bool needsPostBarrier(nsXBLMaybeCompiled<UncompiledT> function)
   {
     return function.IsCompiled() && Base::needsPostBarrier(function.GetJSFunction());
   }
 
-#ifdef JSGC_GENERATIONAL
   static void postBarrier(nsXBLMaybeCompiled<UncompiledT>* functionp)
   {
     Base::postBarrier(&functionp->UnsafeGetJSFunction());
   }
 
   static void relocate(nsXBLMaybeCompiled<UncompiledT>* functionp)
   {
     Base::relocate(&functionp->UnsafeGetJSFunction());
   }
-#endif
 };
 
 template <class UncompiledT>
 class HeapBase<nsXBLMaybeCompiled<UncompiledT> >
 {
   const JS::Heap<nsXBLMaybeCompiled<UncompiledT> >& wrapper() const {
     return *static_cast<const JS::Heap<nsXBLMaybeCompiled<UncompiledT> >*>(this);
   }
--- a/js/public/GCAPI.h
+++ b/js/public/GCAPI.h
@@ -344,17 +344,17 @@ WasIncrementalGC(JSRuntime *rt);
  *       is non-functional unless SpiderMonkey was configured with
  *       --enable-gcgenerational.
  */
 
 /* Ensure that generational GC is disabled within some scope. */
 class JS_FRIEND_API(AutoDisableGenerationalGC)
 {
     js::gc::GCRuntime *gc;
-#if defined(JSGC_GENERATIONAL) && defined(JS_GC_ZEAL)
+#ifdef JS_GC_ZEAL
     bool restartVerifier;
 #endif
 
   public:
     explicit AutoDisableGenerationalGC(JSRuntime *rt);
     ~AutoDisableGenerationalGC();
 };
 
@@ -494,42 +494,38 @@ namespace js {
 namespace gc {
 
 static MOZ_ALWAYS_INLINE void
 ExposeGCThingToActiveJS(JS::GCCellPtr thing)
 {
     MOZ_ASSERT(thing.kind() != JSTRACE_SHAPE);
 
     JS::shadow::Runtime *rt = GetGCThingRuntime(thing.asCell());
-#ifdef JSGC_GENERATIONAL
     /*
      * GC things residing in the nursery cannot be gray: they have no mark bits.
      * All live objects in the nursery are moved to tenured at the beginning of
      * each GC slice, so the gray marker never sees nursery things.
      */
     if (IsInsideNursery(thing.asCell()))
         return;
-#endif
     if (IsIncrementalBarrierNeededOnTenuredGCThing(rt, thing))
         JS::IncrementalReferenceBarrier(thing);
     else if (JS::GCThingIsMarkedGray(thing.asCell()))
         JS::UnmarkGrayGCThingRecursively(thing.asCell(), thing.kind());
 }
 
 static MOZ_ALWAYS_INLINE void
 MarkGCThingAsLive(JSRuntime *aRt, JS::GCCellPtr thing)
 {
     JS::shadow::Runtime *rt = JS::shadow::Runtime::asShadowRuntime(aRt);
-#ifdef JSGC_GENERATIONAL
     /*
      * Any object in the nursery will not be freed during any GC running at that time.
      */
     if (IsInsideNursery(thing.asCell()))
         return;
-#endif
     if (IsIncrementalBarrierNeededOnTenuredGCThing(rt, thing))
         JS::IncrementalReferenceBarrier(thing);
 }
 
 } /* namespace gc */
 } /* namespace js */
 
 namespace JS {
--- a/js/public/HeapAPI.h
+++ b/js/public/HeapAPI.h
@@ -336,78 +336,68 @@ GetGCThingArena(void *thing)
     uintptr_t addr = uintptr_t(thing);
     addr &= ~js::gc::ArenaMask;
     return reinterpret_cast<JS::shadow::ArenaHeader *>(addr);
 }
 
 MOZ_ALWAYS_INLINE bool
 IsInsideNursery(const js::gc::Cell *cell)
 {
-#ifdef JSGC_GENERATIONAL
     if (!cell)
         return false;
     uintptr_t addr = uintptr_t(cell);
     addr &= ~js::gc::ChunkMask;
     addr |= js::gc::ChunkLocationOffset;
     uint32_t location = *reinterpret_cast<uint32_t *>(addr);
     MOZ_ASSERT(location != 0);
     return location & ChunkLocationAnyNursery;
-#else
-    return false;
-#endif
 }
 
 } /* namespace gc */
 } /* namespace js */
 
 namespace JS {
 
 static MOZ_ALWAYS_INLINE Zone *
 GetTenuredGCThingZone(void *thing)
 {
     MOZ_ASSERT(thing);
-#ifdef JSGC_GENERATIONAL
     MOZ_ASSERT(!js::gc::IsInsideNursery((js::gc::Cell *)thing));
-#endif
     return js::gc::GetGCThingArena(thing)->zone;
 }
 
 extern JS_PUBLIC_API(Zone *)
 GetObjectZone(JSObject *obj);
 
 static MOZ_ALWAYS_INLINE bool
 GCThingIsMarkedGray(void *thing)
 {
     MOZ_ASSERT(thing);
-#ifdef JSGC_GENERATIONAL
     /*
      * GC things residing in the nursery cannot be gray: they have no mark bits.
      * All live objects in the nursery are moved to tenured at the beginning of
      * each GC slice, so the gray marker never sees nursery things.
      */
     if (js::gc::IsInsideNursery((js::gc::Cell *)thing))
         return false;
-#endif
     uintptr_t *word, mask;
     js::gc::GetGCThingMarkWordAndMask(thing, js::gc::GRAY, &word, &mask);
     return *word & mask;
 }
 
 } /* namespace JS */
 
 namespace js {
 namespace gc {
 
 static MOZ_ALWAYS_INLINE bool
 IsIncrementalBarrierNeededOnTenuredGCThing(JS::shadow::Runtime *rt, const JS::GCCellPtr thing)
 {
     MOZ_ASSERT(thing);
-#ifdef JSGC_GENERATIONAL
     MOZ_ASSERT(!js::gc::IsInsideNursery(thing.asCell()));
-#endif
     if (!rt->needsIncrementalBarrier())
         return false;
     JS::Zone *zone = JS::GetTenuredGCThingZone(thing.asCell());
     return JS::shadow::Zone::asShadowZone(zone)->needsIncrementalBarrier();
 }
 
 } /* namespace gc */
 } /* namespace js */
--- a/js/public/Id.h
+++ b/js/public/Id.h
@@ -177,19 +177,17 @@ IsPoisonedId(jsid id)
     return false;
 }
 
 template <> struct GCMethods<jsid>
 {
     static jsid initial() { return JSID_VOID; }
     static bool poisoned(jsid id) { return IsPoisonedId(id); }
     static bool needsPostBarrier(jsid id) { return false; }
-#ifdef JSGC_GENERATIONAL
     static void postBarrier(jsid *idp) {}
     static void relocate(jsid *idp) {}
-#endif
 };
 
 #undef id
 
 }
 
 #endif /* js_Id_h */
--- a/js/public/RootingAPI.h
+++ b/js/public/RootingAPI.h
@@ -162,20 +162,18 @@ JS_FRIEND_API(bool) isGCEnabled();
  *   foo(JS::NullPtr());
  * which avoids creating a Rooted<JSObject*> just to pass nullptr.
  */
 struct JS_PUBLIC_API(NullPtr)
 {
     static void * const constNullValue;
 };
 
-#ifdef JSGC_GENERATIONAL
 JS_FRIEND_API(void) HeapCellPostBarrier(js::gc::Cell **cellp);
 JS_FRIEND_API(void) HeapCellRelocate(js::gc::Cell **cellp);
-#endif
 
 #ifdef JS_DEBUG
 /*
  * For generational GC, assert that an object is in the tenured generation as
  * opposed to being in the nursery.
  */
 extern JS_FRIEND_API(void)
 AssertGCThingMustBeTenured(JSObject* obj);
@@ -279,26 +277,22 @@ class Heap : public js::HeapBase<T>
     void init(T newPtr) {
         MOZ_ASSERT(!js::GCMethods<T>::poisoned(newPtr));
         ptr = newPtr;
         if (js::GCMethods<T>::needsPostBarrier(ptr))
             post();
     }
 
     void post() {
-#ifdef JSGC_GENERATIONAL
         MOZ_ASSERT(js::GCMethods<T>::needsPostBarrier(ptr));
         js::GCMethods<T>::postBarrier(&ptr);
-#endif
     }
 
     void relocate() {
-#ifdef JSGC_GENERATIONAL
         js::GCMethods<T>::relocate(&ptr);
-#endif
     }
 
     enum {
         crashOnTouchPointer = 1
     };
 
     T ptr;
 };
@@ -651,62 +645,56 @@ struct RootKind<T *>
 };
 
 template <typename T>
 struct GCMethods<T *>
 {
     static T *initial() { return nullptr; }
     static bool poisoned(T *v) { return JS::IsPoisonedPtr(v); }
     static bool needsPostBarrier(T *v) { return false; }
-#ifdef JSGC_GENERATIONAL
     static void postBarrier(T **vp) {}
     static void relocate(T **vp) {}
-#endif
 };
 
 template <>
 struct GCMethods<JSObject *>
 {
     static JSObject *initial() { return nullptr; }
     static bool poisoned(JSObject *v) { return JS::IsPoisonedPtr(v); }
     static gc::Cell *asGCThingOrNull(JSObject *v) {
         if (!v)
             return nullptr;
         MOZ_ASSERT(uintptr_t(v) > 32);
         return reinterpret_cast<gc::Cell *>(v);
     }
     static bool needsPostBarrier(JSObject *v) {
         return v != nullptr && gc::IsInsideNursery(reinterpret_cast<gc::Cell *>(v));
     }
-#ifdef JSGC_GENERATIONAL
     static void postBarrier(JSObject **vp) {
         JS::HeapCellPostBarrier(reinterpret_cast<js::gc::Cell **>(vp));
     }
     static void relocate(JSObject **vp) {
         JS::HeapCellRelocate(reinterpret_cast<js::gc::Cell **>(vp));
     }
-#endif
 };
 
 template <>
 struct GCMethods<JSFunction *>
 {
     static JSFunction *initial() { return nullptr; }
     static bool poisoned(JSFunction *v) { return JS::IsPoisonedPtr(v); }
     static bool needsPostBarrier(JSFunction *v) {
         return v != nullptr && gc::IsInsideNursery(reinterpret_cast<gc::Cell *>(v));
     }
-#ifdef JSGC_GENERATIONAL
     static void postBarrier(JSFunction **vp) {
         JS::HeapCellPostBarrier(reinterpret_cast<js::gc::Cell **>(vp));
     }
     static void relocate(JSFunction **vp) {
         JS::HeapCellRelocate(reinterpret_cast<js::gc::Cell **>(vp));
     }
-#endif
 };
 
 #ifdef JS_DEBUG
 /* This helper allows us to assert that Rooted<T> is scoped within a request. */
 extern JS_PUBLIC_API(bool)
 IsInRequest(JSContext *cx);
 #endif
 
--- a/js/public/Value.h
+++ b/js/public/Value.h
@@ -1625,22 +1625,20 @@ SameType(const Value &lhs, const Value &
 {
     return JSVAL_SAME_TYPE_IMPL(lhs.data, rhs.data);
 }
 
 } // namespace JS
 
 /************************************************************************/
 
-#ifdef JSGC_GENERATIONAL
 namespace JS {
 JS_PUBLIC_API(void) HeapValuePostBarrier(Value *valuep);
 JS_PUBLIC_API(void) HeapValueRelocate(Value *valuep);
 }
-#endif
 
 namespace js {
 
 template <> struct GCMethods<const JS::Value>
 {
     static JS::Value initial() { return JS::UndefinedValue(); }
     static bool poisoned(const JS::Value &v) {
         return v.isMarkable() && JS::IsPoisonedPtr(v.toGCThing());
@@ -1654,20 +1652,18 @@ template <> struct GCMethods<JS::Value>
         return v.isMarkable() && JS::IsPoisonedPtr(v.toGCThing());
     }
     static gc::Cell *asGCThingOrNull(const JS::Value &v) {
         return v.isMarkable() ? v.toGCThing() : nullptr;
     }
     static bool needsPostBarrier(const JS::Value &v) {
         return v.isObject() && gc::IsInsideNursery(reinterpret_cast<gc::Cell*>(&v.toObject()));
     }
-#ifdef JSGC_GENERATIONAL
     static void postBarrier(JS::Value *v) { JS::HeapValuePostBarrier(v); }
     static void relocate(JS::Value *v) { JS::HeapValueRelocate(v); }
-#endif
 };
 
 template <class Outer> class MutableValueOperations;
 
 /*
  * A class designed for CRTP use in implementing the non-mutating parts of the
  * Value interface in Value-like classes.  Outer must be a class inheriting
  * ValueOperations<Outer> with a visible extract() method returning the
--- a/js/src/builtin/MapObject.cpp
+++ b/js/src/builtin/MapObject.cpp
@@ -1108,17 +1108,16 @@ MapObject::mark(JSTracer *trc, JSObject 
     if (ValueMap *map = obj->as<MapObject>().getData()) {
         for (ValueMap::Range r = map->all(); !r.empty(); r.popFront()) {
             MarkKey(r, r.front().key, trc);
             gc::MarkValue(trc, &r.front().value, "value");
         }
     }
 }
 
-#ifdef JSGC_GENERATIONAL
 struct UnbarrieredHashPolicy {
     typedef Value Lookup;
     static HashNumber hash(const Lookup &v) { return v.asRawBits(); }
     static bool match(const Value &k, const Lookup &l) { return k == l; }
     static bool isEmpty(const Value &v) { return v.isMagic(JS_HASH_KEY_EMPTY); }
     static void makeEmpty(Value *vp) { vp->setMagic(JS_HASH_KEY_EMPTY); }
 };
 
@@ -1134,40 +1133,35 @@ class OrderedHashTableRef : public gc::B
     void mark(JSTracer *trc) {
         MOZ_ASSERT(UnbarrieredHashPolicy::hash(key) ==
                    HashableValue::Hasher::hash(*reinterpret_cast<HashableValue*>(&key)));
         Value prior = key;
         gc::MarkValueUnbarriered(trc, &key, "ordered hash table key");
         table->rekeyOneEntry(prior, key);
     }
 };
-#endif
 
 inline static void
 WriteBarrierPost(JSRuntime *rt, ValueMap *map, const Value &key)
 {
-#ifdef JSGC_GENERATIONAL
     typedef OrderedHashMap<Value, Value, UnbarrieredHashPolicy, RuntimeAllocPolicy> UnbarrieredMap;
     if (MOZ_UNLIKELY(key.isObject() && IsInsideNursery(&key.toObject()))) {
         rt->gc.storeBuffer.putGeneric(OrderedHashTableRef<UnbarrieredMap>(
                     reinterpret_cast<UnbarrieredMap *>(map), key));
     }
-#endif
 }
 
 inline static void
 WriteBarrierPost(JSRuntime *rt, ValueSet *set, const Value &key)
 {
-#ifdef JSGC_GENERATIONAL
     typedef OrderedHashSet<Value, UnbarrieredHashPolicy, RuntimeAllocPolicy> UnbarrieredSet;
     if (MOZ_UNLIKELY(key.isObject() && IsInsideNursery(&key.toObject()))) {
         rt->gc.storeBuffer.putGeneric(OrderedHashTableRef<UnbarrieredSet>(
                     reinterpret_cast<UnbarrieredSet *>(set), key));
     }
-#endif
 }
 
 bool
 MapObject::getKeysAndValuesInterleaved(JSContext *cx, HandleObject obj,
                                        JS::AutoValueVector *entries)
 {
     ValueMap *map = obj->as<MapObject>().getData();
     if (!map)
--- a/js/src/builtin/TestingFunctions.cpp
+++ b/js/src/builtin/TestingFunctions.cpp
@@ -64,16 +64,19 @@ GetBuildConfiguration(JSContext *cx, uns
         return false;
 
     if (!JS_SetProperty(cx, info, "trace-jscalls-api", FalseHandleValue))
         return false;
 
     if (!JS_SetProperty(cx, info, "incremental-gc", TrueHandleValue))
         return false;
 
+    if (!JS_SetProperty(cx, info, "generational-gc", TrueHandleValue))
+        return false;
+
     RootedValue value(cx);
 #ifdef DEBUG
     value = BooleanValue(true);
 #else
     value = BooleanValue(false);
 #endif
     if (!JS_SetProperty(cx, info, "debug", value))
         return false;
@@ -145,24 +148,16 @@ GetBuildConfiguration(JSContext *cx, uns
 #ifdef INCLUDE_MOZILLA_DTRACE
     value = BooleanValue(true);
 #else
     value = BooleanValue(false);
 #endif
     if (!JS_SetProperty(cx, info, "dtrace", value))
         return false;
 
-#ifdef JSGC_GENERATIONAL
-    value = BooleanValue(true);
-#else
-    value = BooleanValue(false);
-#endif
-    if (!JS_SetProperty(cx, info, "generational-gc", value))
-        return false;
-
 #ifdef MOZ_VALGRIND
     value = BooleanValue(true);
 #else
     value = BooleanValue(false);
 #endif
     if (!JS_SetProperty(cx, info, "valgrind", value))
         return false;
 
@@ -259,22 +254,20 @@ GC(JSContext *cx, unsigned argc, jsval *
     args.rval().setString(str);
     return true;
 }
 
 static bool
 MinorGC(JSContext *cx, unsigned argc, jsval *vp)
 {
     CallArgs args = CallArgsFromVp(argc, vp);
-#ifdef JSGC_GENERATIONAL
     if (args.get(0) == BooleanValue(true))
         cx->runtime()->gc.storeBuffer.setAboutToOverflow();
 
     cx->minorGC(JS::gcreason::API);
-#endif
     args.rval().setUndefined();
     return true;
 }
 
 static const struct ParamPair {
     const char      *name;
     JSGCParamKey    param;
 } paramMap[] = {
--- a/js/src/builtin/TypedObject.cpp
+++ b/js/src/builtin/TypedObject.cpp
@@ -2393,34 +2393,32 @@ bool
 LazyArrayBufferTable::addBuffer(JSContext *cx, InlineTransparentTypedObject *obj, ArrayBufferObject *buffer)
 {
     MOZ_ASSERT(!map.has(obj));
     if (!map.put(obj, buffer)) {
         js_ReportOutOfMemory(cx);
         return false;
     }
 
-#ifdef JSGC_GENERATIONAL
     MOZ_ASSERT(!IsInsideNursery(buffer));
     if (IsInsideNursery(obj)) {
         // Strip the barriers from the type before inserting into the store
         // buffer, as is done for DebugScopes::proxiedScopes.
         Map::Base *baseHashMap = static_cast<Map::Base *>(&map);
 
         typedef HashMap<JSObject *, JSObject *> UnbarrieredMap;
         UnbarrieredMap *unbarrieredMap = reinterpret_cast<UnbarrieredMap *>(baseHashMap);
 
         typedef gc::HashKeyRef<UnbarrieredMap, JSObject *> Ref;
         cx->runtime()->gc.storeBuffer.putGeneric(Ref(unbarrieredMap, obj));
 
         // Also make sure the buffer is traced, so that its data pointer is
         // updated after the typed object moves.
         cx->runtime()->gc.storeBuffer.putWholeCellFromMainThread(buffer);
     }
-#endif
 
     return true;
 }
 
 void
 LazyArrayBufferTable::trace(JSTracer *trc)
 {
     map.trace(trc);
--- a/js/src/configure.in
+++ b/js/src/configure.in
@@ -3128,33 +3128,16 @@ fi
 dnl ========================================================
 dnl = Location of malloc wrapper lib
 dnl ========================================================
 MOZ_ARG_WITH_STRING(wrap-malloc,
 [  --with-wrap-malloc=DIR  Location of malloc wrapper library],
     WRAP_LDFLAGS="${WRAP_LDFLAGS} $withval")
 
 dnl ========================================================
-dnl = Use generational GC
-dnl ========================================================
-dnl Use generational GC by default in all shell builds. The top-level mozilla
-dnl configure.in will configure SpiderMonkey with --disable-gcgenerational as
-dnl needed on a per-platform basis.
-JSGC_GENERATIONAL=1
-MOZ_ARG_DISABLE_BOOL(gcgenerational,
-[  --disable-gcgenerational Disable generational GC],
-    JSGC_GENERATIONAL= ,
-    JSGC_GENERATIONAL=1 )
-if test -n "$JSGC_GENERATIONAL"; then
-    AC_DEFINE(JSGC_GENERATIONAL)
-fi
-JSGC_GENERATIONAL_CONFIGURED=$JSGC_GENERATIONAL
-AC_SUBST(JSGC_GENERATIONAL_CONFIGURED)
-
-dnl ========================================================
 dnl = Use compacting GC
 dnl ========================================================
 dnl Compact the heap by moving GC things when doing a shrinking colletion.
 MOZ_ARG_ENABLE_BOOL(gccompacting,
 [  --enable-gccompacting   Compact the heap by moving GC things],
     JSGC_COMPACTING=1,
     JSGC_COMPACTING= )
 if test -n "$JSGC_COMPACTING"; then
--- a/js/src/gc/Barrier.h
+++ b/js/src/gc/Barrier.h
@@ -5,19 +5,17 @@
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef gc_Barrier_h
 #define gc_Barrier_h
 
 #include "NamespaceImports.h"
 
 #include "gc/Heap.h"
-#ifdef JSGC_GENERATIONAL
-# include "gc/StoreBuffer.h"
-#endif
+#include "gc/StoreBuffer.h"
 #include "js/HashTable.h"
 #include "js/Id.h"
 #include "js/RootingAPI.h"
 
 /*
  * A write barrier is a mechanism used by incremental or generation GCs to
  * ensure that every value that needs to be marked is marked. In general, the
  * write barrier should be invoked whenever a write can cause the set of things
@@ -344,46 +342,40 @@ struct InternalGCMethods<Value>
             MOZ_ASSERT_IF(v.isMarkable(), shadowRuntimeFromMainThread(v)->needsIncrementalBarrier());
             Value tmp(v);
             js::gc::MarkValueUnbarriered(shadowZone->barrierTracer(), &tmp, "write barrier");
             MOZ_ASSERT(tmp == v);
         }
     }
 
     static void postBarrier(Value *vp) {
-#ifdef JSGC_GENERATIONAL
         MOZ_ASSERT(!CurrentThreadIsIonCompiling());
         if (vp->isObject()) {
             gc::StoreBuffer *sb = reinterpret_cast<gc::Cell *>(&vp->toObject())->storeBuffer();
             if (sb)
                 sb->putValueFromAnyThread(vp);
         }
-#endif
     }
 
     static void postBarrierRelocate(Value *vp) {
-#ifdef JSGC_GENERATIONAL
         MOZ_ASSERT(!CurrentThreadIsIonCompiling());
         if (vp->isObject()) {
             gc::StoreBuffer *sb = reinterpret_cast<gc::Cell *>(&vp->toObject())->storeBuffer();
             if (sb)
                 sb->putRelocatableValueFromAnyThread(vp);
         }
-#endif
     }
 
     static void postBarrierRemove(Value *vp) {
-#ifdef JSGC_GENERATIONAL
         MOZ_ASSERT(vp);
         MOZ_ASSERT(vp->isMarkable());
         MOZ_ASSERT(!CurrentThreadIsIonCompiling());
         JSRuntime *rt = static_cast<js::gc::Cell *>(vp->toGCThing())->runtimeFromAnyThread();
         JS::shadow::Runtime *shadowRuntime = JS::shadow::Runtime::asShadowRuntime(rt);
         shadowRuntime->gcStoreBufferPtr()->removeRelocatableValueFromAnyThread(vp);
-#endif
     }
 
     static void readBarrier(const Value &v) { ValueReadBarrier(v); }
 };
 
 template <>
 struct InternalGCMethods<jsid>
 {
@@ -667,27 +659,23 @@ class RelocatablePtr : public BarrieredB
             this->value = v;
         }
 
         return *this;
     }
 
   protected:
     void post() {
-#ifdef JSGC_GENERATIONAL
         MOZ_ASSERT(GCMethods<T>::needsPostBarrier(this->value));
         InternalGCMethods<T>::postBarrierRelocate(&this->value);
-#endif
     }
 
     void relocate() {
-#ifdef JSGC_GENERATIONAL
         MOZ_ASSERT(GCMethods<T>::needsPostBarrier(this->value));
         InternalGCMethods<T>::postBarrierRemove(&this->value);
-#endif
     }
 };
 
 /*
  * This is a hack for RegExpStatics::updateFromMatch. It allows us to do two
  * barriers with only one branch to check if we're in an incremental GC.
  */
 template <class T1, class T2>
@@ -911,23 +899,21 @@ class HeapSlot : public BarrieredBase<Va
     /* For users who need to manually barrier the raw types. */
     static void writeBarrierPost(NativeObject *owner, Kind kind, uint32_t slot, const Value &target) {
         reinterpret_cast<HeapSlot *>(const_cast<Value *>(&target))->post(owner, kind, slot, target);
     }
 
   private:
     void post(NativeObject *owner, Kind kind, uint32_t slot, const Value &target) {
         MOZ_ASSERT(preconditionForWriteBarrierPost(owner, kind, slot, target));
-#ifdef JSGC_GENERATIONAL
         if (this->value.isObject()) {
             gc::Cell *cell = reinterpret_cast<gc::Cell *>(&this->value.toObject());
             if (cell->storeBuffer())
                 cell->storeBuffer()->putSlotFromAnyThread(owner, kind, slot, 1);
         }
-#endif
     }
 };
 
 static inline const Value *
 Valueify(const BarrieredBase<Value> *array)
 {
     JS_STATIC_ASSERT(sizeof(HeapValue) == sizeof(Value));
     JS_STATIC_ASSERT(sizeof(HeapSlot) == sizeof(Value));
--- a/js/src/gc/ForkJoinNursery.h
+++ b/js/src/gc/ForkJoinNursery.h
@@ -5,20 +5,16 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef gc_ForkJoinNursery_h
 #define gc_ForkJoinNursery_h
 
 #ifdef JSGC_FJGENERATIONAL
 
-#ifndef JSGC_GENERATIONAL
-#error "JSGC_GENERATIONAL is required for the ForkJoinNursery"
-#endif
-
 #include "jsalloc.h"
 #include "jspubtd.h"
 
 #include "gc/Heap.h"
 #include "gc/Memory.h"
 #include "gc/Nursery.h"
 
 #include "js/HashTable.h"
--- a/js/src/gc/GCRuntime.h
+++ b/js/src/gc/GCRuntime.h
@@ -5,23 +5,19 @@
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef gc_GCRuntime_h
 #define gc_GCRuntime_h
 
 #include "jsgc.h"
 
 #include "gc/Heap.h"
-#ifdef JSGC_GENERATIONAL
-# include "gc/Nursery.h"
-#endif
+#include "gc/Nursery.h"
 #include "gc/Statistics.h"
-#ifdef JSGC_GENERATIONAL
-# include "gc/StoreBuffer.h"
-#endif
+#include "gc/StoreBuffer.h"
 #include "gc/Tracer.h"
 
 /* Perform validation of incremental marking in debug builds but not on B2G. */
 #if defined(DEBUG) && !defined(MOZ_B2G)
 #define JS_GC_MARKING_VALIDATION
 #endif
 
 namespace js {
@@ -369,19 +365,17 @@ class GCRuntime
     js::gc::State state() { return incrementalState; }
     bool isBackgroundSweeping() { return helperState.isBackgroundSweeping(); }
     void waitBackgroundSweepEnd() { helperState.waitBackgroundSweepEnd(); }
     void waitBackgroundSweepOrAllocEnd() {
         helperState.waitBackgroundSweepEnd();
         allocTask.cancel(GCParallelTask::CancelAndWait);
     }
 
-#ifdef JSGC_GENERATIONAL
     void requestMinorGC(JS::gcreason::Reason reason);
-#endif
 
 #ifdef DEBUG
 
     bool onBackgroundThread() { return helperState.onBackgroundThread(); }
 
     bool currentThreadOwnsGCLock() {
         return lockOwner == PR_GetCurrentThread();
     }
@@ -638,20 +632,18 @@ class GCRuntime
     JSRuntime *rt;
 
     /* Embedders can use this zone however they wish. */
     JS::Zone *systemZone;
 
     /* List of compartments and zones (protected by the GC lock). */
     js::gc::ZoneVector zones;
 
-#ifdef JSGC_GENERATIONAL
     js::Nursery nursery;
     js::gc::StoreBuffer storeBuffer;
-#endif
 
     js::gcstats::Statistics stats;
 
     js::GCMarker marker;
 
     /* Track heap usage for this runtime. */
     HeapUsage usage;
 
@@ -703,20 +695,18 @@ class GCRuntime
      * The gray bits can become invalid if UnmarkGray overflows the stack. A
      * full GC will reset this bit, since it fills in all the gray bits.
      */
     bool grayBitsValid;
 
     volatile uintptr_t majorGCRequested;
     JS::gcreason::Reason majorGCTriggerReason;
 
-#ifdef JSGC_GENERATIONAL
     bool minorGCRequested;
     JS::gcreason::Reason minorGCTriggerReason;
-#endif
 
     /* Incremented at the start of every major GC. */
     uint64_t majorGCNumber;
 
     /* The major GC number at which to release observed type information. */
     uint64_t jitReleaseNumber;
 
     /* Incremented on every GC slice. */
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -18,19 +18,17 @@
 #include "vm/Shape.h"
 #include "vm/Symbol.h"
 #include "vm/TypedArrayObject.h"
 
 #include "jscompartmentinlines.h"
 #include "jsinferinlines.h"
 #include "jsobjinlines.h"
 
-#ifdef JSGC_GENERATIONAL
-# include "gc/Nursery-inl.h"
-#endif
+#include "gc/Nursery-inl.h"
 #include "vm/String-inl.h"
 #include "vm/Symbol-inl.h"
 
 using namespace js;
 using namespace js::gc;
 
 using mozilla::DebugOnly;
 
@@ -434,36 +432,35 @@ namespace js {
 namespace gc {
 
 template <typename T>
 static bool
 IsMarked(T **thingp)
 {
     MOZ_ASSERT(thingp);
     MOZ_ASSERT(*thingp);
-#ifdef JSGC_GENERATIONAL
     JSRuntime* rt = (*thingp)->runtimeFromAnyThread();
 #ifdef JSGC_FJGENERATIONAL
-    // Must precede the case for JSGC_GENERATIONAL because IsInsideNursery()
+    // Must precede the case for GGC because IsInsideNursery()
     // will also be true for the ForkJoinNursery.
     if (rt->isFJMinorCollecting()) {
         ForkJoinContext *ctx = ForkJoinContext::current();
         ForkJoinNursery &nursery = ctx->nursery();
         if (nursery.isInsideFromspace(*thingp))
             return nursery.getForwardedPointer(thingp);
     }
     else
 #endif
     {
         if (IsInsideNursery(*thingp)) {
             Nursery &nursery = rt->gc.nursery;
             return nursery.getForwardedPointer(thingp);
         }
     }
-#endif  // JSGC_GENERATIONAL
+
     Zone *zone = (*thingp)->asTenured().zone();
     if (!zone->isCollecting() || zone->isGCFinished())
         return true;
 #ifdef JSGC_COMPACTING
     if (zone->isGCCompacting() && IsForwarded(*thingp))
         *thingp = Forwarded(*thingp);
 #endif
     return (*thingp)->asTenured().isMarked();
@@ -487,17 +484,16 @@ IsAboutToBeFinalizedFromAnyThread(T **th
 
     T *thing = *thingp;
     JSRuntime *rt = thing->runtimeFromAnyThread();
 
     /* Permanent atoms are never finalized by non-owning runtimes. */
     if (ThingIsPermanentAtom(thing) && !TlsPerThreadData.get()->associatedWith(rt))
         return false;
 
-#ifdef JSGC_GENERATIONAL
 #ifdef JSGC_FJGENERATIONAL
     if (rt->isFJMinorCollecting()) {
         ForkJoinContext *ctx = ForkJoinContext::current();
         ForkJoinNursery &nursery = ctx->nursery();
         if (nursery.isInsideFromspace(thing))
             return !nursery.getForwardedPointer(thingp);
     }
     else
@@ -506,17 +502,16 @@ IsAboutToBeFinalizedFromAnyThread(T **th
         Nursery &nursery = rt->gc.nursery;
         MOZ_ASSERT_IF(!rt->isHeapMinorCollecting(), !IsInsideNursery(thing));
         if (rt->isHeapMinorCollecting()) {
             if (IsInsideNursery(thing))
                 return !nursery.getForwardedPointer(thingp);
             return false;
         }
     }
-#endif  // JSGC_GENERATIONAL
 
     Zone *zone = thing->asTenured().zoneFromAnyThread();
     if (zone->isGCSweeping()) {
         if (thing->asTenured().arenaHeader()->allocatedDuringIncremental)
             return false;
         return !thing->asTenured().isMarked();
     }
 #ifdef JSGC_COMPACTING
@@ -532,33 +527,30 @@ IsAboutToBeFinalizedFromAnyThread(T **th
 template <typename T>
 T *
 UpdateIfRelocated(JSRuntime *rt, T **thingp)
 {
     MOZ_ASSERT(thingp);
     if (!*thingp)
         return nullptr;
 
-#ifdef JSGC_GENERATIONAL
-
 #ifdef JSGC_FJGENERATIONAL
     if (rt->isFJMinorCollecting()) {
         ForkJoinContext *ctx = ForkJoinContext::current();
         ForkJoinNursery &nursery = ctx->nursery();
         if (nursery.isInsideFromspace(*thingp))
             nursery.getForwardedPointer(thingp);
         return *thingp;
     }
 #endif
 
     if (rt->isHeapMinorCollecting() && IsInsideNursery(*thingp)) {
         rt->gc.nursery.getForwardedPointer(thingp);
         return *thingp;
     }
-#endif  // JSGC_GENERATIONAL
 
 #ifdef JSGC_COMPACTING
     Zone *zone = (*thingp)->zone();
     if (zone->isGCCompacting() && IsForwarded(*thingp))
         *thingp = Forwarded(*thingp);
 #endif
     return *thingp;
 }
--- a/js/src/gc/Nursery-inl.h
+++ b/js/src/gc/Nursery-inl.h
@@ -3,18 +3,16 @@
  *
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef gc_Nursery_inl_h
 #define gc_Nursery_inl_h
 
-#ifdef JSGC_GENERATIONAL
-
 #include "gc/Nursery.h"
 
 #include "gc/Heap.h"
 #include "js/TracingAPI.h"
 #include "vm/Runtime.h"
 
 template <typename T>
 MOZ_ALWAYS_INLINE bool
@@ -31,11 +29,9 @@ js::Nursery::getForwardedPointer(T **ref
 }
 
 inline void
 js::Nursery::forwardBufferPointer(JSTracer* trc, HeapSlot **pSlotElems)
 {
     trc->runtime()->gc.nursery.forwardBufferPointer(pSlotElems);
 }
 
-#endif /* JSGC_GENERATIONAL */
-
 #endif /* gc_Nursery_inl_h */
--- a/js/src/gc/Nursery.cpp
+++ b/js/src/gc/Nursery.cpp
@@ -1,17 +1,15 @@
 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
  * vim: set ts=8 sw=4 et tw=78:
  *
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
-#ifdef JSGC_GENERATIONAL
-
 #include "gc/Nursery-inl.h"
 
 #include "mozilla/IntegerPrintfMacros.h"
 
 #include "jscompartment.h"
 #include "jsgc.h"
 #include "jsinfer.h"
 #include "jsutil.h"
@@ -973,10 +971,8 @@ js::Nursery::shrinkAllocableSpace()
 {
 #ifdef JS_GC_ZEAL
     if (runtime()->gcZeal() == ZealGenerationalGCValue)
         return;
 #endif
     numActiveChunks_ = Max(numActiveChunks_ - 1, 1);
     updateDecommittedRegion();
 }
-
-#endif /* JSGC_GENERATIONAL */
--- a/js/src/gc/Nursery.h
+++ b/js/src/gc/Nursery.h
@@ -3,18 +3,16 @@
  *
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef gc_Nursery_h
 #define gc_Nursery_h
 
-#ifdef JSGC_GENERATIONAL
-
 #include "jsalloc.h"
 #include "jspubtd.h"
 
 #include "ds/BitArray.h"
 #include "gc/Heap.h"
 #include "gc/Memory.h"
 #include "js/GCAPI.h"
 #include "js/HashTable.h"
@@ -334,10 +332,9 @@ class Nursery
     static void MinorGCCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind);
 
     friend class gc::MinorCollectionTracer;
     friend class jit::MacroAssembler;
 };
 
 } /* namespace js */
 
-#endif /* JSGC_GENERATIONAL */
 #endif /* gc_Nursery_h */
--- a/js/src/gc/StoreBuffer.cpp
+++ b/js/src/gc/StoreBuffer.cpp
@@ -1,16 +1,14 @@
 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
  * vim: set ts=8 sts=4 et sw=4 tw=99:
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
-#ifdef JSGC_GENERATIONAL
-
 #include "gc/StoreBuffer.h"
 
 #include "mozilla/Assertions.h"
 
 #include "gc/Statistics.h"
 #include "vm/ArgumentsObject.h"
 #include "vm/ForkJoin.h"
 
@@ -251,10 +249,8 @@ JS::HeapValueRelocate(JS::Value *valuep)
     JSRuntime *runtime = static_cast<js::gc::Cell *>(valuep->toGCThing())->runtimeFromMainThread();
     runtime->gc.storeBuffer.removeRelocatableValueFromAnyThread(valuep);
 }
 
 template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::ValueEdge>;
 template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::CellPtrEdge>;
 template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::SlotsEdge>;
 template struct StoreBuffer::MonoTypeBuffer<StoreBuffer::WholeCellEdges>;
-
-#endif /* JSGC_GENERATIONAL */
--- a/js/src/gc/StoreBuffer.h
+++ b/js/src/gc/StoreBuffer.h
@@ -2,18 +2,16 @@
  * vim: set ts=8 sts=4 et sw=4 tw=99:
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef gc_StoreBuffer_h
 #define gc_StoreBuffer_h
 
-#ifdef JSGC_GENERATIONAL
-
 #include "mozilla/Attributes.h"
 #include "mozilla/DebugOnly.h"
 #include "mozilla/ReentrancyGuard.h"
 
 #include "jsalloc.h"
 
 #include "ds/LifoAlloc.h"
 #include "gc/Nursery.h"
@@ -498,11 +496,9 @@ class StoreBuffer
     }
 
     void addSizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf, JS::GCSizes *sizes);
 };
 
 } /* namespace gc */
 } /* namespace js */
 
-#endif /* JSGC_GENERATIONAL */
-
 #endif /* gc_StoreBuffer_h */
--- a/js/src/gc/Verifier.cpp
+++ b/js/src/gc/Verifier.cpp
@@ -398,36 +398,33 @@ struct VerifyPostTracer : JSTracer
 /*
  * The post-barrier verifier runs the full store buffer and a fake nursery when
  * running and when it stops, walks the full heap to ensure that all the
  * important edges were inserted into the storebuffer.
  */
 void
 gc::GCRuntime::startVerifyPostBarriers()
 {
-#ifdef JSGC_GENERATIONAL
     if (verifyPostData ||
         incrementalState != NO_INCREMENTAL)
     {
         return;
     }
 
     evictNursery();
 
     number++;
 
     VerifyPostTracer *trc = js_new<VerifyPostTracer>(rt, JSTraceCallback(nullptr));
     if (!trc)
         return;
 
     verifyPostData = trc;
-#endif
 }
 
-#ifdef JSGC_GENERATIONAL
 void
 PostVerifierCollectStoreBufferEdges(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
 {
     VerifyPostTracer *trc = (VerifyPostTracer *)jstrc;
 
     /* The nursery only stores objects. */
     if (kind != JSTRACE_OBJECT)
         return;
@@ -480,22 +477,20 @@ PostVerifierVisitEdge(JSTracer *jstrc, v
      * only things that enter this callback are marked by the JS_TraceChildren
      * below. Since JSObject::markChildren handles this, the real trace
      * location will be set correctly in these cases.
      */
     void **loc = trc->tracingLocation(thingp);
 
     AssertStoreBufferContainsEdge(trc->edges, loc, dst);
 }
-#endif
 
 bool
 js::gc::GCRuntime::endVerifyPostBarriers()
 {
-#ifdef JSGC_GENERATIONAL
     VerifyPostTracer *trc = (VerifyPostTracer *)verifyPostData;
     if (!trc)
         return false;
 
     VerifyPostTracer::EdgeSet edges;
     AutoPrepareForTracing prep(rt, SkipAtoms);
 
     /* Visit every entry in the store buffer and put the edges in a hash set. */
@@ -518,19 +513,16 @@ js::gc::GCRuntime::endVerifyPostBarriers
             }
         }
     }
 
 oom:
     js_delete(trc);
     verifyPostData = nullptr;
     return true;
-#else
-    return false;
-#endif
 }
 
 /*** Barrier Verifier Scheduling ***/
 
 void
 gc::GCRuntime::verifyPreBarriers()
 {
     if (verifyPreData)
@@ -574,31 +566,29 @@ gc::GCRuntime::maybeVerifyPreBarriers(bo
     }
 
     startVerifyPreBarriers();
 }
 
 void
 gc::GCRuntime::maybeVerifyPostBarriers(bool always)
 {
-#ifdef JSGC_GENERATIONAL
     if (zealMode != ZealVerifierPostValue)
         return;
 
     if (rt->mainThread.suppressGC || !storeBuffer.isEnabled())
         return;
 
     if (VerifyPostTracer *trc = (VerifyPostTracer *)verifyPostData) {
         if (++trc->count < zealFrequency && !always)
             return;
 
         endVerifyPostBarriers();
     }
     startVerifyPostBarriers();
-#endif
 }
 
 void
 js::gc::MaybeVerifyBarriers(JSContext *cx, bool always)
 {
     GCRuntime *gc = &cx->runtime()->gc;
     gc->maybeVerifyPreBarriers(always);
     gc->maybeVerifyPostBarriers(always);
@@ -606,17 +596,15 @@ js::gc::MaybeVerifyBarriers(JSContext *c
 
 void
 js::gc::GCRuntime::finishVerifier()
 {
     if (VerifyPreTracer *trc = (VerifyPreTracer *)verifyPreData) {
         js_delete(trc);
         verifyPreData = nullptr;
     }
-#ifdef JSGC_GENERATIONAL
     if (VerifyPostTracer *trc = (VerifyPostTracer *)verifyPostData) {
         js_delete(trc);
         verifyPostData = nullptr;
     }
-#endif
 }
 
 #endif /* JS_GC_ZEAL */
--- a/js/src/jit/BaselineCompiler.cpp
+++ b/js/src/jit/BaselineCompiler.cpp
@@ -98,20 +98,18 @@ BaselineCompiler::compile()
 
     MethodStatus status = emitBody();
     if (status != Method_Compiled)
         return status;
 
     if (!emitEpilogue())
         return Method_Error;
 
-#ifdef JSGC_GENERATIONAL
     if (!emitOutOfLinePostBarrierSlot())
         return Method_Error;
-#endif
 
     if (masm.oom())
         return Method_Error;
 
     Linker linker(masm);
     AutoFlushICache afc("Baseline");
     JitCode *code = linker.newCode<CanGC>(cx, BASELINE_CODE);
     if (!code)
@@ -443,17 +441,16 @@ BaselineCompiler::emitEpilogue()
 
     masm.mov(BaselineFrameReg, BaselineStackReg);
     masm.pop(BaselineFrameReg);
 
     masm.ret();
     return true;
 }
 
-#ifdef JSGC_GENERATIONAL
 // On input:
 //  R2.scratchReg() contains object being written to.
 //  Called with the baseline stack synced, except for R0 which is preserved.
 //  All other registers are usable as scratch.
 // This calls:
 //    void PostWriteBarrier(JSRuntime *rt, JSObject *obj);
 bool
 BaselineCompiler::emitOutOfLinePostBarrierSlot()
@@ -480,17 +477,16 @@ BaselineCompiler::emitOutOfLinePostBarri
     masm.passABIArg(scratch);
     masm.passABIArg(objReg);
     masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, PostWriteBarrier));
 
     masm.popValue(R0);
     masm.ret();
     return true;
 }
-#endif // JSGC_GENERATIONAL
 
 bool
 BaselineCompiler::emitIC(ICStub *stub, ICEntry::Kind kind)
 {
     ICEntry *entry = allocateICEntry(stub, kind);
     if (!entry)
         return false;
 
@@ -2242,30 +2238,27 @@ BaselineCompiler::emit_JSOP_SETALIASEDVA
     Register objReg = R2.scratchReg();
 
     getScopeCoordinateObject(objReg);
     Address address = getScopeCoordinateAddressFromObject(objReg, R1.scratchReg());
     masm.patchableCallPreBarrier(address, MIRType_Value);
     masm.storeValue(R0, address);
     frame.push(R0);
 
-#ifdef JSGC_GENERATIONAL
     // Only R0 is live at this point.
     // Scope coordinate object is already in R2.scratchReg().
     Register temp = R1.scratchReg();
 
     Label skipBarrier;
     masm.branchPtrInNurseryRange(Assembler::Equal, objReg, temp, &skipBarrier);
     masm.branchValueIsNurseryObject(Assembler::NotEqual, R0, temp, &skipBarrier);
 
     masm.call(&postBarrierSlot_); // Won't clobber R0
 
     masm.bind(&skipBarrier);
-#endif
-
     return true;
 }
 
 bool
 BaselineCompiler::emit_JSOP_GETNAME()
 {
     frame.syncStack(0);
 
@@ -2582,34 +2575,32 @@ BaselineCompiler::emitFormalArgAccess(ui
     if (get) {
         masm.loadValue(argAddr, R0);
         frame.push(R0);
     } else {
         masm.patchableCallPreBarrier(argAddr, MIRType_Value);
         masm.loadValue(frame.addressOfStackValue(frame.peek(-1)), R0);
         masm.storeValue(R0, argAddr);
 
-#ifdef JSGC_GENERATIONAL
         MOZ_ASSERT(frame.numUnsyncedSlots() == 0);
 
         Register temp = R1.scratchReg();
 
         // Reload the arguments object
         Register reg = R2.scratchReg();
         masm.loadPtr(Address(BaselineFrameReg, BaselineFrame::reverseOffsetOfArgsObj()), reg);
 
         Label skipBarrier;
 
         masm.branchPtrInNurseryRange(Assembler::Equal, reg, temp, &skipBarrier);
         masm.branchValueIsNurseryObject(Assembler::NotEqual, R0, temp, &skipBarrier);
 
         masm.call(&postBarrierSlot_);
 
         masm.bind(&skipBarrier);
-#endif
     }
 
     masm.bind(&done);
     return true;
 }
 
 bool
 BaselineCompiler::emit_JSOP_GETARG()
@@ -3374,27 +3365,25 @@ BaselineCompiler::emit_JSOP_INITIALYIELD
     masm.storeValue(Int32Value(0), Address(genObj, GeneratorObject::offsetOfYieldIndexSlot()));
 
     Register scopeObj = R0.scratchReg();
     Address scopeChainSlot(genObj, GeneratorObject::offsetOfScopeChainSlot());
     masm.loadPtr(frame.addressOfScopeChain(), scopeObj);
     masm.patchableCallPreBarrier(scopeChainSlot, MIRType_Value);
     masm.storeValue(JSVAL_TYPE_OBJECT, scopeObj, scopeChainSlot);
 
-#ifdef JSGC_GENERATIONAL
     Register temp = R1.scratchReg();
     Label skipBarrier;
     masm.branchPtrInNurseryRange(Assembler::Equal, genObj, temp, &skipBarrier);
     masm.branchPtrInNurseryRange(Assembler::NotEqual, scopeObj, temp, &skipBarrier);
     masm.push(genObj);
     MOZ_ASSERT(genObj == R2.scratchReg());
     masm.call(&postBarrierSlot_);
     masm.pop(genObj);
     masm.bind(&skipBarrier);
-#endif
 
     masm.tagValue(JSVAL_TYPE_OBJECT, genObj, JSReturnOperand);
     return emitReturn();
 }
 
 typedef bool (*NormalSuspendFn)(JSContext *, HandleObject, BaselineFrame *, jsbytecode *, uint32_t);
 static const VMFunction NormalSuspendInfo = FunctionInfo<NormalSuspendFn>(jit::NormalSuspend);
 
@@ -3433,25 +3422,23 @@ BaselineCompiler::emit_JSOP_YIELD()
                         Address(genObj, GeneratorObject::offsetOfYieldIndexSlot()));
 
         Register scopeObj = R0.scratchReg();
         Address scopeChainSlot(genObj, GeneratorObject::offsetOfScopeChainSlot());
         masm.loadPtr(frame.addressOfScopeChain(), scopeObj);
         masm.patchableCallPreBarrier(scopeChainSlot, MIRType_Value);
         masm.storeValue(JSVAL_TYPE_OBJECT, scopeObj, scopeChainSlot);
 
-#ifdef JSGC_GENERATIONAL
         Register temp = R1.scratchReg();
         Label skipBarrier;
         masm.branchPtrInNurseryRange(Assembler::Equal, genObj, temp, &skipBarrier);
         masm.branchPtrInNurseryRange(Assembler::NotEqual, scopeObj, temp, &skipBarrier);
         MOZ_ASSERT(genObj == R2.scratchReg());
         masm.call(&postBarrierSlot_);
         masm.bind(&skipBarrier);
-#endif
     } else {
         masm.loadBaselineFramePtr(BaselineFrameReg, R1.scratchReg());
 
         prepareVMCall();
         pushArg(Imm32(frame.stackDepth()));
         pushArg(ImmPtr(pc));
         pushArg(R1.scratchReg());
         pushArg(genObj);
--- a/js/src/jit/BaselineCompiler.h
+++ b/js/src/jit/BaselineCompiler.h
@@ -191,19 +191,17 @@ namespace jit {
     _(JSOP_SETRVAL)            \
     _(JSOP_RETRVAL)            \
     _(JSOP_RETURN)
 
 class BaselineCompiler : public BaselineCompilerSpecific
 {
     FixedList<Label>            labels_;
     NonAssertingLabel           return_;
-#ifdef JSGC_GENERATIONAL
     NonAssertingLabel           postBarrierSlot_;
-#endif
 
     // Native code offset right before the scope chain is initialized.
     CodeOffsetLabel prologueOffset_;
 
     // Native code offset right before the frame is popped and the method
     // returned from.
     CodeOffsetLabel epilogueOffset_;
 
@@ -236,19 +234,17 @@ class BaselineCompiler : public Baseline
     MethodStatus compile();
 
   private:
     MethodStatus emitBody();
 
     void emitInitializeLocals(size_t n, const Value &v);
     bool emitPrologue();
     bool emitEpilogue();
-#ifdef JSGC_GENERATIONAL
     bool emitOutOfLinePostBarrierSlot();
-#endif
     bool emitIC(ICStub *stub, ICEntry::Kind kind);
     bool emitOpIC(ICStub *stub) {
         return emitIC(stub, ICEntry::Kind_Op);
     }
     bool emitNonOpIC(ICStub *stub) {
         return emitIC(stub, ICEntry::Kind_NonOp);
     }
 
--- a/js/src/jit/BaselineDebugModeOSR.cpp
+++ b/js/src/jit/BaselineDebugModeOSR.cpp
@@ -814,20 +814,18 @@ jit::RecompileOnStackBaselineScriptsForD
             if (!CollectInterpreterStackScripts(cx, obs, iter, entries))
                 return false;
         }
     }
 
     if (entries.empty())
         return true;
 
-#ifdef JSGC_GENERATIONAL
     // Scripts can entrain nursery things. See note in js::ReleaseAllJITCode.
     cx->runtime()->gc.evictNursery();
-#endif
 
     // When the profiler is enabled, we need to have suppressed sampling,
     // since the basline jit scripts are in a state of flux.
     MOZ_ASSERT(!cx->runtime()->isProfilerSamplingEnabled());
 
     // Invalidate all scripts we are recompiling.
     if (Zone *zone = obs.singleZone()) {
         if (!InvalidateScriptsInZone(cx, zone, entries))
--- a/js/src/jit/BaselineIC.cpp
+++ b/js/src/jit/BaselineIC.cpp
@@ -769,17 +769,16 @@ ICStubCompiler::emitProfilingUpdate(Macr
 
 void
 ICStubCompiler::emitProfilingUpdate(MacroAssembler &masm, GeneralRegisterSet regs,
                                     uint32_t stubPcOffset)
 {
     emitProfilingUpdate(masm, regs.takeAny(), regs.takeAny(), stubPcOffset);
 }
 
-#ifdef JSGC_GENERATIONAL
 inline bool
 ICStubCompiler::emitPostWriteBarrierSlot(MacroAssembler &masm, Register obj, ValueOperand val,
                                          Register scratch, GeneralRegisterSet saveRegs)
 {
     Label skipBarrier;
     masm.branchPtrInNurseryRange(Assembler::Equal, obj, scratch, &skipBarrier);
     masm.branchValueIsNurseryObject(Assembler::NotEqual, val, scratch, &skipBarrier);
 
@@ -794,17 +793,16 @@ ICStubCompiler::emitPostWriteBarrierSlot
     masm.passABIArg(scratch);
     masm.passABIArg(obj);
     masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, PostWriteBarrier));
     masm.PopRegsInMask(saveRegs);
 
     masm.bind(&skipBarrier);
     return true;
 }
-#endif // JSGC_GENERATIONAL
 
 //
 // WarmUpCounter_Fallback
 //
 
 static bool
 EnsureCanEnterIon(JSContext *cx, ICWarmUpCounter_Fallback *stub, BaselineFrame *frame,
                   HandleScript script, jsbytecode *pc, void **jitcodePtr)
@@ -3472,24 +3470,22 @@ IsCacheableGetPropCall(JSContext *cx, JS
     if (!shape->hasGetterValue())
         return false;
 
     if (!shape->getterValue().isObject() || !shape->getterObject()->is<JSFunction>())
         return false;
 
     JSFunction *func = &shape->getterObject()->as<JSFunction>();
 
-#ifdef JSGC_GENERATIONAL
     // Information from get prop call ICs may be used directly from Ion code,
     // and should not be nursery allocated.
     if (IsInsideNursery(holder) || IsInsideNursery(func)) {
         *isTemporarilyUnoptimizable = true;
         return false;
     }
-#endif
 
     if (func->isNative()) {
         *isScripted = false;
         return true;
     }
 
     if (!func->hasJITCode()) {
         *isTemporarilyUnoptimizable = true;
@@ -3596,24 +3592,22 @@ IsCacheableSetPropCall(JSContext *cx, JS
     if (!shape->hasSetterValue())
         return false;
 
     if (!shape->setterValue().isObject() || !shape->setterObject()->is<JSFunction>())
         return false;
 
     JSFunction *func = &shape->setterObject()->as<JSFunction>();
 
-#ifdef JSGC_GENERATIONAL
     // Information from set prop call ICs may be used directly from Ion code,
     // and should not be nursery allocated.
     if (IsInsideNursery(holder) || IsInsideNursery(func)) {
         *isTemporarilyUnoptimizable = true;
         return false;
     }
-#endif
 
     if (func->isNative()) {
         *isScripted = false;
         return true;
     }
 
     if (!func->hasJITCode()) {
         *isTemporarilyUnoptimizable = true;
@@ -5428,24 +5422,22 @@ ICSetElem_Dense::Compiler::generateStubC
 
     // Don't overwrite R0 becuase |obj| might overlap with it, and it's needed
     // for post-write barrier later.
     ValueOperand tmpVal = regs.takeAnyValue();
     masm.loadValue(valueAddr, tmpVal);
     EmitPreBarrier(masm, element, MIRType_Value);
     masm.storeValue(tmpVal, element);
     regs.add(key);
-#ifdef JSGC_GENERATIONAL
     if (cx->runtime()->gc.nursery.exists()) {
         Register r = regs.takeAny();
         GeneralRegisterSet saveRegs;
         emitPostWriteBarrierSlot(masm, obj, tmpVal, r, saveRegs);
         regs.add(r);
     }
-#endif
     EmitReturnFromIC(masm);
 
 
     // Failure case - fail but first unstow R0 and R1
     masm.bind(&failureUnstow);
     EmitUnstowICValues(masm, 2);
 
     // Failure case - jump to next stub
@@ -5616,24 +5608,22 @@ ICSetElemDenseAddCompiler::generateStubC
     masm.bind(&dontConvertDoubles);
 
     // Write the value.  No need for pre-barrier since we're not overwriting an old value.
     ValueOperand tmpVal = regs.takeAnyValue();
     BaseIndex element(scratchReg, key, TimesEight);
     masm.loadValue(valueAddr, tmpVal);
     masm.storeValue(tmpVal, element);
     regs.add(key);
-#ifdef JSGC_GENERATIONAL
     if (cx->runtime()->gc.nursery.exists()) {
         Register r = regs.takeAny();
         GeneralRegisterSet saveRegs;
         emitPostWriteBarrierSlot(masm, obj, tmpVal, r, saveRegs);
         regs.add(r);
     }
-#endif
     EmitReturnFromIC(masm);
 
     // Failure case - fail but first unstow R0 and R1
     masm.bind(&failureUnstow);
     EmitUnstowICValues(masm, 2);
 
     // Failure case - jump to next stub
     masm.bind(&failure);
@@ -8409,25 +8399,23 @@ ICSetProp_Native::Compiler::generateStub
     }
 
     // Perform the store.
     masm.load32(Address(BaselineStubReg, ICSetProp_Native::offsetOfOffset()), scratch);
     EmitPreBarrier(masm, BaseIndex(holderReg, scratch, TimesOne), MIRType_Value);
     masm.storeValue(R1, BaseIndex(holderReg, scratch, TimesOne));
     if (holderReg != objReg)
         regs.add(holderReg);
-#ifdef JSGC_GENERATIONAL
     if (cx->runtime()->gc.nursery.exists()) {
         Register scr = regs.takeAny();
         GeneralRegisterSet saveRegs;
         saveRegs.add(R1);
         emitPostWriteBarrierSlot(masm, objReg, R1, scr, saveRegs);
         regs.add(scr);
     }
-#endif
 
     // The RHS has to be in R0.
     masm.moveValue(R1, R0);
     EmitReturnFromIC(masm);
 
     // Failure case - jump to next stub
     masm.bind(&failure);
     EmitStubGuardFailure(masm);
@@ -8555,24 +8543,22 @@ ICSetPropNativeAddCompiler::generateStub
     // Perform the store.  No write barrier required since this is a new
     // initialization.
     masm.load32(Address(BaselineStubReg, ICSetProp_NativeAdd::offsetOfOffset()), scratch);
     masm.storeValue(R1, BaseIndex(holderReg, scratch, TimesOne));
 
     if (holderReg != objReg)
         regs.add(holderReg);
 
-#ifdef JSGC_GENERATIONAL
     if (cx->runtime()->gc.nursery.exists()) {
         Register scr = regs.takeAny();
         GeneralRegisterSet saveRegs;
         saveRegs.add(R1);
         emitPostWriteBarrierSlot(masm, objReg, R1, scr, saveRegs);
     }
-#endif
 
     // The RHS has to be in R0.
     masm.moveValue(R1, R0);
     EmitReturnFromIC(masm);
 
     // Failure case - jump to next stub
     masm.bind(&failureUnstow);
     EmitUnstowICValues(masm, 2);
--- a/js/src/jit/BaselineIC.h
+++ b/js/src/jit/BaselineIC.h
@@ -1147,20 +1147,18 @@ class ICStubCompiler
             break;
           default:
             MOZ_CRASH("Invalid numInputs");
         }
 
         return regs;
     }
 
-#ifdef JSGC_GENERATIONAL
     inline bool emitPostWriteBarrierSlot(MacroAssembler &masm, Register obj, ValueOperand val,
                                          Register scratch, GeneralRegisterSet saveRegs);
-#endif
 
   public:
     virtual ICStub *getStub(ICStubSpace *space) = 0;
 
     static ICStubSpace *StubSpaceForKind(ICStub::Kind kind, JSScript *script) {
         if (ICStub::CanMakeCalls(kind))
             return script->baselineScript()->fallbackStubSpace();
         return script->zone()->jitZone()->optimizedStubSpace();
--- a/js/src/jit/BaselineJIT.cpp
+++ b/js/src/jit/BaselineJIT.cpp
@@ -424,26 +424,24 @@ void
 BaselineScript::Trace(JSTracer *trc, BaselineScript *script)
 {
     script->trace(trc);
 }
 
 void
 BaselineScript::Destroy(FreeOp *fop, BaselineScript *script)
 {
-#ifdef JSGC_GENERATIONAL
     /*
      * When the script contains pointers to nursery things, the store buffer
      * will contain entries refering to the referenced things. Since we can
      * destroy scripts outside the context of a GC, this situation can result
      * in invalid store buffer entries. Assert that if we do destroy scripts
      * outside of a GC that we at least emptied the nursery first.
      */
     MOZ_ASSERT(fop->runtime()->gc.nursery.isEmpty());
-#endif
 
     script->unlinkDependentAsmJSModules(fop);
 
     fop->delete_(script);
 }
 
 void
 BaselineScript::unlinkDependentAsmJSModules(FreeOp *fop)
--- a/js/src/jit/CodeGenerator.cpp
+++ b/js/src/jit/CodeGenerator.cpp
@@ -14,19 +14,17 @@
 #include "jslibmath.h"
 #include "jsmath.h"
 #include "jsnum.h"
 #include "jsprf.h"
 
 #include "asmjs/AsmJSModule.h"
 #include "builtin/Eval.h"
 #include "builtin/TypedObject.h"
-#ifdef JSGC_GENERATIONAL
-# include "gc/Nursery.h"
-#endif
+#include "gc/Nursery.h"
 #include "irregexp/NativeRegExpMacroAssembler.h"
 #include "jit/BaselineCompiler.h"
 #include "jit/IonBuilder.h"
 #include "jit/IonCaches.h"
 #include "jit/IonOptimizationLevels.h"
 #include "jit/JitcodeMap.h"
 #include "jit/JitSpewer.h"
 #include "jit/Linker.h"
@@ -2536,17 +2534,16 @@ CodeGenerator::visitMonitorTypes(LMonito
     ValueOperand operand = ToValue(lir, LMonitorTypes::Input);
     Register scratch = ToTempUnboxRegister(lir->temp());
 
     Label matched, miss;
     masm.guardTypeSet(operand, lir->mir()->typeSet(), lir->mir()->barrierKind(), scratch, &miss);
     bailoutFrom(&miss, lir->snapshot());
 }
 
-#ifdef JSGC_GENERATIONAL
 // Out-of-line path to update the store buffer.
 class OutOfLineCallPostWriteBarrier : public OutOfLineCodeBase<CodeGenerator>
 {
     LInstruction *lir_;
     const LAllocation *object_;
 
   public:
     OutOfLineCallPostWriteBarrier(LInstruction *lir, const LAllocation *object)
@@ -2594,22 +2591,20 @@ CodeGenerator::visitOutOfLineCallPostWri
     masm.passABIArg(runtimereg);
     masm.passABIArg(objreg);
     masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, fun));
 
     restoreLiveVolatile(ool->lir());
 
     masm.jump(ool->rejoin());
 }
-#endif
 
 void
 CodeGenerator::visitPostWriteBarrierO(LPostWriteBarrierO *lir)
 {
-#ifdef JSGC_GENERATIONAL
     OutOfLineCallPostWriteBarrier *ool = new(alloc()) OutOfLineCallPostWriteBarrier(lir, lir->object());
     addOutOfLineCode(ool, lir->mir());
 
     Register temp = ToTempRegisterOrInvalid(lir->temp());
 
     if (lir->object()->isConstant()) {
 #ifdef DEBUG
         MOZ_ASSERT(!IsInsideNursery(&lir->object()->toConstant()->toObject()));
@@ -2617,23 +2612,21 @@ CodeGenerator::visitPostWriteBarrierO(LP
     } else {
         masm.branchPtrInNurseryRange(Assembler::Equal, ToRegister(lir->object()), temp,
                                      ool->rejoin());
     }
 
     masm.branchPtrInNurseryRange(Assembler::Equal, ToRegister(lir->value()), temp, ool->entry());
 
     masm.bind(ool->rejoin());
-#endif
 }
 
 void
 CodeGenerator::visitPostWriteBarrierV(LPostWriteBarrierV *lir)
 {
-#ifdef JSGC_GENERATIONAL
     OutOfLineCallPostWriteBarrier *ool = new(alloc()) OutOfLineCallPostWriteBarrier(lir, lir->object());
     addOutOfLineCode(ool, lir->mir());
 
     Register temp = ToTempRegisterOrInvalid(lir->temp());
 
     if (lir->object()->isConstant()) {
 #ifdef DEBUG
         MOZ_ASSERT(!IsInsideNursery(&lir->object()->toConstant()->toObject()));
@@ -2642,17 +2635,16 @@ CodeGenerator::visitPostWriteBarrierV(LP
         masm.branchPtrInNurseryRange(Assembler::Equal, ToRegister(lir->object()), temp,
                                      ool->rejoin());
     }
 
     ValueOperand value = ToValue(lir, LPostWriteBarrierV::Input);
     masm.branchValueIsNurseryObject(Assembler::Equal, value, temp, ool->entry());
 
     masm.bind(ool->rejoin());
-#endif
 }
 
 void
 CodeGenerator::visitCallNative(LCallNative *call)
 {
     JSFunction *target = call->getSingleTarget();
     MOZ_ASSERT(target);
     MOZ_ASSERT(target->isNative());
@@ -6956,31 +6948,21 @@ CodeGenerator::visitIteratorStart(LItera
     masm.branchPtr(Assembler::NotEqual,
                    Address(obj, NativeObject::offsetOfElements()),
                    ImmPtr(js::emptyObjectElements),
                    ool->entry());
 
     // Write barrier for stores to the iterator. We only need to take a write
     // barrier if NativeIterator::obj is actually going to change.
     {
-#ifdef JSGC_GENERATIONAL
-        // Bug 867815: When using a nursery, we unconditionally take this out-
-        // of-line so that we do not have to post-barrier the store to
-        // NativeIter::obj. This just needs JIT support for the Cell* buffer.
+        // Bug 867815: Unconditionally take this out- of-line so that we do not
+        // have to post-barrier the store to NativeIter::obj. This just needs
+        // JIT support for the Cell* buffer.
         Address objAddr(niTemp, offsetof(NativeIterator, obj));
         masm.branchPtr(Assembler::NotEqual, objAddr, obj, ool->entry());
-#else
-        Label noBarrier;
-        masm.branchTestNeedsIncrementalBarrier(Assembler::Zero, &noBarrier);
-
-        Address objAddr(niTemp, offsetof(NativeIterator, obj));
-        masm.branchPtr(Assembler::NotEqual, objAddr, obj, ool->entry());
-
-        masm.bind(&noBarrier);
-#endif // !JSGC_GENERATIONAL
     }
 
     // Mark iterator as active.
     masm.storePtr(obj, Address(niTemp, offsetof(NativeIterator, obj)));
     masm.or32(Imm32(JSITER_ACTIVE), Address(niTemp, offsetof(NativeIterator, flags)));
 
     // Chain onto the active iterator stack.
     masm.loadPtr(AbsoluteAddress(gen->compartment->addressOfEnumerators()), temp1);
--- a/js/src/jit/CompileWrappers.cpp
+++ b/js/src/jit/CompileWrappers.cpp
@@ -171,23 +171,21 @@ CompileRuntime::DOMcallbacks()
 }
 
 const MathCache *
 CompileRuntime::maybeGetMathCache()
 {
     return runtime()->maybeGetMathCache();
 }
 
-#ifdef JSGC_GENERATIONAL
 const Nursery &
 CompileRuntime::gcNursery()
 {
     return runtime()->gc.nursery;
 }
-#endif
 
 Zone *
 CompileZone::zone()
 {
     return reinterpret_cast<Zone *>(this);
 }
 
 /* static */ CompileZone *
--- a/js/src/jit/CompileWrappers.h
+++ b/js/src/jit/CompileWrappers.h
@@ -75,19 +75,17 @@ class CompileRuntime
     bool isInsideNursery(gc::Cell *cell);
 #endif
 
     // DOM callbacks must be threadsafe (and will hopefully be removed soon).
     const DOMCallbacks *DOMcallbacks();
 
     const MathCache *maybeGetMathCache();
 
-#ifdef JSGC_GENERATIONAL
     const Nursery &gcNursery();
-#endif
 };
 
 class CompileZone
 {
     Zone *zone();
 
   public:
     static CompileZone *get(Zone *zone);
--- a/js/src/jit/IonBuilder.cpp
+++ b/js/src/jit/IonBuilder.cpp
@@ -7060,20 +7060,18 @@ jit::TypeSetIncludes(types::TypeSet *typ
         MOZ_CRASH("Bad input type");
     }
 }
 
 // Whether a write of the given value may need a post-write barrier for GC purposes.
 bool
 jit::NeedsPostBarrier(CompileInfo &info, MDefinition *value)
 {
-#ifdef JSGC_GENERATIONAL
     if (!GetJitContext()->runtime->gcNursery().exists())
         return false;
-#endif
     return info.executionMode() != ParallelExecution && value->mightBeType(MIRType_Object);
 }
 
 bool
 IonBuilder::setStaticName(JSObject *staticObject, PropertyName *name)
 {
     jsid id = NameToId(name);
 
@@ -8044,21 +8042,17 @@ IonBuilder::addTypedArrayLengthAndData(M
         tarr = &obj->toConstant()->value().toObject();
     else if (obj->resultTypeSet())
         tarr = obj->resultTypeSet()->getSingleton();
 
     if (tarr) {
         void *data = AnyTypedArrayViewData(tarr);
         // Bug 979449 - Optimistically embed the elements and use TI to
         //              invalidate if we move them.
-#ifdef JSGC_GENERATIONAL
         bool isTenured = !tarr->runtimeFromMainThread()->gc.nursery.isInside(data);
-#else
-        bool isTenured = true;
-#endif
         if (isTenured && tarr->hasSingletonType()) {
             // The 'data' pointer of TypedArrayObject can change in rare circumstances
             // (ArrayBufferObject::changeContents).
             types::TypeObjectKey *tarrType = types::TypeObjectKey::get(tarr);
             if (!tarrType->unknownProperties()) {
                 if (tarr->is<TypedArrayObject>())
                     tarrType->watchStateChangeForTypedArrayData(constraints());
 
@@ -8392,20 +8386,18 @@ IonBuilder::setElemTryTypedStatic(bool *
         return true;
 
     if (!object->resultTypeSet())
         return true;
     JSObject *tarrObj = object->resultTypeSet()->getSingleton();
     if (!tarrObj)
         return true;
 
-#ifdef JSGC_GENERATIONAL
     if (tarrObj->runtimeFromMainThread()->gc.nursery.isInside(AnyTypedArrayViewData(tarrObj)))
         return true;
-#endif
 
     types::TypeObjectKey *tarrType = types::TypeObjectKey::get(tarrObj);
     if (tarrType->unknownProperties())
         return true;
 
     Scalar::Type viewType = AnyTypedArrayType(tarrObj);
     MDefinition *ptr = convertShiftToMaskForStaticTypedArray(index, viewType);
     if (!ptr)
--- a/js/src/jit/JitFrames.cpp
+++ b/js/src/jit/JitFrames.cpp
@@ -1099,17 +1099,16 @@ MarkBailoutFrame(JSTracer *trc, const Ji
 
         if (!snapIter.moreInstructions())
             break;
         snapIter.nextInstruction();
     };
 
 }
 
-#ifdef JSGC_GENERATIONAL
 template <typename T>
 void
 UpdateIonJSFrameForMinorGC(JSTracer *trc, const JitFrameIterator &frame)
 {
     // Minor GCs may move slots/elements allocated in the nursery. Update
     // any slots/elements pointers stored in this frame.
 
     JitFrameLayout *layout = (JitFrameLayout *)frame.fp();
@@ -1149,17 +1148,16 @@ UpdateIonJSFrameForMinorGC(JSTracer *trc
         if (trc->callback == gc::ForkJoinNursery::MinorGCCallback) {
             gc::ForkJoinNursery::forwardBufferPointer(trc, slots);
             continue;
         }
 #endif
         trc->runtime()->gc.nursery.forwardBufferPointer(slots);
     }
 }
-#endif
 
 static void
 MarkBaselineStubFrame(JSTracer *trc, const JitFrameIterator &frame)
 {
     // Mark the ICStub pointer stored in the stub frame. This is necessary
     // so that we don't destroy the stub code after unlinking the stub.
 
     MOZ_ASSERT(frame.type() == JitFrame_BaselineStub);
@@ -1469,17 +1467,16 @@ TopmostIonActivationCompartment(JSRuntim
         for (JitFrameIterator frames(activations); !frames.done(); ++frames) {
             if (frames.type() == JitFrame_IonJS)
                 return activations.activation()->compartment();
         }
     }
     return nullptr;
 }
 
-#ifdef JSGC_GENERATIONAL
 template <typename T>
 void UpdateJitActivationsForMinorGC(PerThreadData *ptd, JSTracer *trc)
 {
 #ifdef JSGC_FJGENERATIONAL
     MOZ_ASSERT(trc->runtime()->isHeapMinorCollecting() || trc->runtime()->isFJMinorCollecting());
 #else
     MOZ_ASSERT(trc->runtime()->isHeapMinorCollecting());
 #endif
@@ -1494,18 +1491,16 @@ void UpdateJitActivationsForMinorGC(PerT
 template
 void UpdateJitActivationsForMinorGC<Nursery>(PerThreadData *ptd, JSTracer *trc);
 
 #ifdef JSGC_FJGENERATIONAL
 template
 void UpdateJitActivationsForMinorGC<gc::ForkJoinNursery>(PerThreadData *ptd, JSTracer *trc);
 #endif
 
-#endif
-
 void
 GetPcScript(JSContext *cx, JSScript **scriptRes, jsbytecode **pcRes)
 {
     JitSpew(JitSpew_IonSnapshots, "Recover PC & Script from the last frame.");
 
     JSRuntime *rt = cx->runtime();
 
     // Recover the return address.
--- a/js/src/jit/JitFrames.h
+++ b/js/src/jit/JitFrames.h
@@ -274,20 +274,18 @@ void HandleParallelFailure(ResumeFromExc
 void EnsureExitFrame(CommonFrameLayout *frame);
 
 void MarkJitActivations(PerThreadData *ptd, JSTracer *trc);
 void MarkIonCompilerRoots(JSTracer *trc);
 
 JSCompartment *
 TopmostIonActivationCompartment(JSRuntime *rt);
 
-#ifdef JSGC_GENERATIONAL
 template<typename T>
 void UpdateJitActivationsForMinorGC(PerThreadData *ptd, JSTracer *trc);
-#endif
 
 static inline uint32_t
 MakeFrameDescriptor(uint32_t frameSize, FrameType type)
 {
     return (frameSize << FRAMESIZE_SHIFT) | type;
 }
 
 // Returns the JSScript associated with the topmost JIT frame.
--- a/js/src/jit/Linker.h
+++ b/js/src/jit/Linker.h
@@ -57,20 +57,18 @@ class Linker
         JitCode *code = JitCode::New<allowGC>(cx, codeStart, bytesNeeded - headerSize,
                                               headerSize, pool, kind);
         if (!code)
             return nullptr;
         if (masm.oom())
             return fail(cx);
         code->copyFrom(masm);
         masm.link(code);
-#ifdef JSGC_GENERATIONAL
         if (masm.embedsNurseryPointers())
             cx->runtime()->gc.storeBuffer.putWholeCellFromMainThread(code);
-#endif
         return code;
     }
 
   public:
     explicit Linker(MacroAssembler &masm)
       : masm(masm)
     {
         masm.finish();
--- a/js/src/jit/Lowering.cpp
+++ b/js/src/jit/Lowering.cpp
@@ -2488,17 +2488,16 @@ LIRGenerator::visitMonitorTypes(MMonitor
     useBox(lir, LMonitorTypes::Input, ins->input());
     assignSnapshot(lir, Bailout_MonitorTypes);
     add(lir, ins);
 }
 
 void
 LIRGenerator::visitPostWriteBarrier(MPostWriteBarrier *ins)
 {
-#ifdef JSGC_GENERATIONAL
     switch (ins->value()->type()) {
       case MIRType_Object:
       case MIRType_ObjectOrNull: {
         LDefinition tmp = needTempForPostBarrier() ? temp() : LDefinition::BogusTemp();
         LPostWriteBarrierO *lir =
             new(alloc()) LPostWriteBarrierO(useRegisterOrConstant(ins->object()),
                                             useRegister(ins->value()), tmp);
         add(lir, ins);
@@ -2514,17 +2513,16 @@ LIRGenerator::visitPostWriteBarrier(MPos
         assignSafepoint(lir, ins);
         break;
       }
       default:
         // Currently, only objects can be in the nursery. Other instruction
         // types cannot hold nursery pointers.
         break;
     }
-#endif // JSGC_GENERATIONAL
 }
 
 void
 LIRGenerator::visitArrayLength(MArrayLength *ins)
 {
     MOZ_ASSERT(ins->elements()->type() == MIRType_Elements);
     define(new(alloc()) LArrayLength(useRegisterAtStart(ins->elements())), ins);
 }
--- a/js/src/jit/MacroAssembler.cpp
+++ b/js/src/jit/MacroAssembler.cpp
@@ -17,19 +17,17 @@
 #include "jit/BaselineIC.h"
 #include "jit/BaselineJIT.h"
 #include "jit/Lowering.h"
 #include "jit/MIR.h"
 #include "jit/ParallelFunctions.h"
 #include "vm/ForkJoin.h"
 #include "vm/TraceLogging.h"
 
-#ifdef JSGC_GENERATIONAL
-# include "jsgcinlines.h"
-#endif
+#include "jsgcinlines.h"
 #include "jsinferinlines.h"
 #include "jsobjinlines.h"
 #include "vm/Interpreter-inl.h"
 
 using namespace js;
 using namespace js::jit;
 
 using JS::GenericNaN;
@@ -634,34 +632,29 @@ MacroAssembler::checkAllocatorState(Labe
     if (GetJitContext()->compartment->hasObjectMetadataCallback())
         jump(fail);
 }
 
 // Inline version of ShouldNurseryAllocate.
 bool
 MacroAssembler::shouldNurseryAllocate(gc::AllocKind allocKind, gc::InitialHeap initialHeap)
 {
-#ifdef JSGC_GENERATIONAL
     // Note that Ion elides barriers on writes to objects known to be in the
     // nursery, so any allocation that can be made into the nursery must be made
     // into the nursery, even if the nursery is disabled. At runtime these will
     // take the out-of-line path, which is required to insert a barrier for the
     // initializing writes.
     return IsNurseryAllocable(allocKind) && initialHeap != gc::TenuredHeap;
-#else
-    return false;
-#endif
 }
 
 // Inline version of Nursery::allocateObject.
 void
 MacroAssembler::nurseryAllocate(Register result, Register slots, gc::AllocKind allocKind,
                                 size_t nDynamicSlots, gc::InitialHeap initialHeap, Label *fail)
 {
-#ifdef JSGC_GENERATIONAL
     MOZ_ASSERT(IsNurseryAllocable(allocKind));
     MOZ_ASSERT(initialHeap != gc::TenuredHeap);
 
     // We still need to allocate in the nursery, per the comment in
     // shouldNurseryAllocate; however, we need to insert into hugeSlots, so
     // bail to do the nursery allocation in the interpreter.
     if (nDynamicSlots >= Nursery::MaxNurserySlots) {
         jump(fail);
@@ -676,17 +669,16 @@ MacroAssembler::nurseryAllocate(Register
     int totalSize = thingSize + nDynamicSlots * sizeof(HeapSlot);
     loadPtr(AbsoluteAddress(nursery.addressOfPosition()), result);
     computeEffectiveAddress(Address(result, totalSize), temp);
     branchPtr(Assembler::Below, AbsoluteAddress(nursery.addressOfCurrentEnd()), temp, fail);
     storePtr(temp, AbsoluteAddress(nursery.addressOfPosition()));
 
     if (nDynamicSlots)
         computeEffectiveAddress(Address(result, thingSize), slots);
-#endif // JSGC_GENERATIONAL
 }
 
 // Inlined version of FreeList::allocate.
 void
 MacroAssembler::freeListAllocate(Register result, Register temp, gc::AllocKind allocKind, Label *fail)
 {
     CompileZone *zone = GetJitContext()->compartment->zone();
     int thingSize = int(gc::Arena::thingSize(allocKind));
--- a/js/src/jit/VMFunctions.cpp
+++ b/js/src/jit/VMFunctions.cpp
@@ -538,42 +538,38 @@ MallocWrapper(JSRuntime *rt, size_t nbyt
 
 JSObject *
 NewCallObject(JSContext *cx, HandleShape shape, HandleTypeObject type, uint32_t lexicalBegin)
 {
     JSObject *obj = CallObject::create(cx, shape, type, lexicalBegin);
     if (!obj)
         return nullptr;
 
-#ifdef JSGC_GENERATIONAL
     // The JIT creates call objects in the nursery, so elides barriers for
     // the initializing writes. The interpreter, however, may have allocated
     // the call object tenured, so barrier as needed before re-entering.
     if (!IsInsideNursery(obj))
         cx->runtime()->gc.storeBuffer.putWholeCellFromMainThread(obj);
-#endif
 
     return obj;
 }
 
 JSObject *
 NewSingletonCallObject(JSContext *cx, HandleShape shape, uint32_t lexicalBegin)
 {
     JSObject *obj = CallObject::createSingleton(cx, shape, lexicalBegin);
     if (!obj)
         return nullptr;
 
-#ifdef JSGC_GENERATIONAL
     // The JIT creates call objects in the nursery, so elides barriers for
     // the initializing writes. The interpreter, however, may have allocated
     // the call object tenured, so barrier as needed before re-entering.
     MOZ_ASSERT(!IsInsideNursery(obj),
                "singletons are created in the tenured heap");
     cx->runtime()->gc.storeBuffer.putWholeCellFromMainThread(obj);
-#endif
 
     return obj;
 }
 
 JSObject *
 NewStringObject(JSContext *cx, HandleString str)
 {
     return StringObject::create(cx, str);
@@ -699,34 +695,32 @@ FilterArgumentsOrEval(JSContext *cx, JSS
 
     static const char16_t arguments[] = {'a', 'r', 'g', 'u', 'm', 'e', 'n', 't', 's'};
     static const char16_t eval[] = {'e', 'v', 'a', 'l'};
 
     return !StringHasPattern(linear, arguments, mozilla::ArrayLength(arguments)) &&
         !StringHasPattern(linear, eval, mozilla::ArrayLength(eval));
 }
 
-#ifdef JSGC_GENERATIONAL
 void
 PostWriteBarrier(JSRuntime *rt, JSObject *obj)
 {
     MOZ_ASSERT(!IsInsideNursery(obj));
     rt->gc.storeBuffer.putWholeCellFromMainThread(obj);
 }
 
 void
 PostGlobalWriteBarrier(JSRuntime *rt, JSObject *obj)
 {
     MOZ_ASSERT(obj->is<GlobalObject>());
     if (!obj->compartment()->globalWriteBarriered) {
         PostWriteBarrier(rt, obj);
         obj->compartment()->globalWriteBarriered = true;
     }
 }
-#endif
 
 uint32_t
 GetIndexFromString(JSString *str)
 {
     // Masks the return value UINT32_MAX as failure to get the index.
     // I.e. it is impossible to distinguish between failing to get the index
     // or the actual index UINT32_MAX.
 
--- a/js/src/jit/VMFunctions.h
+++ b/js/src/jit/VMFunctions.h
@@ -721,20 +721,18 @@ bool OperatorInI(JSContext *cx, uint32_t
 bool GetIntrinsicValue(JSContext *cx, HandlePropertyName name, MutableHandleValue rval);
 
 bool CreateThis(JSContext *cx, HandleObject callee, MutableHandleValue rval);
 
 void GetDynamicName(JSContext *cx, JSObject *scopeChain, JSString *str, Value *vp);
 
 bool FilterArgumentsOrEval(JSContext *cx, JSString *str);
 
-#ifdef JSGC_GENERATIONAL
 void PostWriteBarrier(JSRuntime *rt, JSObject *obj);
 void PostGlobalWriteBarrier(JSRuntime *rt, JSObject *obj);
-#endif
 
 uint32_t GetIndexFromString(JSString *str);
 
 bool DebugPrologue(JSContext *cx, BaselineFrame *frame, jsbytecode *pc, bool *mustReturn);
 bool DebugEpilogue(JSContext *cx, BaselineFrame *frame, jsbytecode *pc, bool ok);
 bool DebugEpilogueOnBaselineReturn(JSContext *cx, BaselineFrame *frame, jsbytecode *pc);
 
 JSObject *CreateGenerator(JSContext *cx, BaselineFrame *frame);
--- a/js/src/jit/arm/MacroAssembler-arm.cpp
+++ b/js/src/jit/arm/MacroAssembler-arm.cpp
@@ -4682,18 +4682,16 @@ MacroAssemblerARMCompat::jumpWithPatch(R
     ARMBuffer::PoolEntry pe;
     BufferOffset bo = as_BranchPool(0xdeadbeef, label, &pe, cond);
     // Fill in a new CodeOffset with both the load and the pool entry that the
     // instruction loads from.
     CodeOffsetJump ret(bo.getOffset(), pe.index());
     return ret;
 }
 
-#ifdef JSGC_GENERATIONAL
-
 void
 MacroAssemblerARMCompat::branchPtrInNurseryRange(Condition cond, Register ptr, Register temp,
                                                  Label *label)
 {
     MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
     MOZ_ASSERT(ptr != temp);
     MOZ_ASSERT(ptr != secondScratchReg_);
 
@@ -4990,10 +4988,8 @@ js::jit::MacroAssemblerARMCompat::atomic
 template void
 js::jit::MacroAssemblerARMCompat::atomicFetchOp(int nbytes, bool signExtend, AtomicOp op,
                                                 const Register &value, const Address &mem,
                                                 Register temp, Register output);
 template void
 js::jit::MacroAssemblerARMCompat::atomicFetchOp(int nbytes, bool signExtend, AtomicOp op,
                                                 const Register &value, const BaseIndex &mem,
                                                 Register temp, Register output);
-
-#endif
--- a/js/src/jit/arm/MacroAssembler-arm.h
+++ b/js/src/jit/arm/MacroAssembler-arm.h
@@ -1817,20 +1817,18 @@ class MacroAssemblerARMCompat : public M
     BufferOffset ma_BoundsCheck(Register bounded) {
         return as_cmp(bounded, Imm8(0));
     }
 
     void moveFloat32(FloatRegister src, FloatRegister dest) {
         as_vmov(VFPRegister(dest).singleOverlay(), VFPRegister(src).singleOverlay());
     }
 
-#ifdef JSGC_GENERATIONAL
     void branchPtrInNurseryRange(Condition cond, Register ptr, Register temp, Label *label);
     void branchValueIsNurseryObject(Condition cond, ValueOperand value, Register temp, Label *label);
-#endif
 
     void loadAsmJSActivation(Register dest) {
         loadPtr(Address(GlobalReg, AsmJSActivationGlobalDataOffset - AsmJSGlobalRegBias), dest);
     }
     void loadAsmJSHeapRegisterFromGlobalData() {
         loadPtr(Address(GlobalReg, AsmJSHeapGlobalDataOffset - AsmJSGlobalRegBias), HeapReg);
     }
     void pushReturnAddress() {
--- a/js/src/jit/mips/MacroAssembler-mips.cpp
+++ b/js/src/jit/mips/MacroAssembler-mips.cpp
@@ -3636,18 +3636,16 @@ MacroAssemblerMIPSCompat::toggledCall(Ji
     } else {
         as_nop();
         as_nop();
     }
     MOZ_ASSERT(nextOffset().getOffset() - offset.offset() == ToggledCallSize(nullptr));
     return offset;
 }
 
-#ifdef JSGC_GENERATIONAL
-
 void
 MacroAssemblerMIPSCompat::branchPtrInNurseryRange(Condition cond, Register ptr, Register temp,
                                                   Label *label)
 {
     MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
     MOZ_ASSERT(ptr != temp);
     MOZ_ASSERT(ptr != SecondScratchReg);
 
@@ -3666,10 +3664,8 @@ MacroAssemblerMIPSCompat::branchValueIsN
 
     Label done;
 
     branchTestObject(Assembler::NotEqual, value, cond == Assembler::Equal ? &done : label);
     branchPtrInNurseryRange(cond, value.payloadReg(), temp, label);
 
     bind(&done);
 }
-
-#endif
--- a/js/src/jit/mips/MacroAssembler-mips.h
+++ b/js/src/jit/mips/MacroAssembler-mips.h
@@ -1448,21 +1448,19 @@ public:
         ma_liPatchable(bounded, Imm32(0));
         return bo;
     }
 
     void moveFloat32(FloatRegister src, FloatRegister dest) {
         as_movs(dest, src);
     }
 
-#ifdef JSGC_GENERATIONAL
     void branchPtrInNurseryRange(Condition cond, Register ptr, Register temp, Label *label);
     void branchValueIsNurseryObject(Condition cond, ValueOperand value, Register temp,
                                     Label *label);
-#endif
 
     void loadAsmJSActivation(Register dest) {
         loadPtr(Address(GlobalReg, AsmJSActivationGlobalDataOffset - AsmJSGlobalRegBias), dest);
     }
     void loadAsmJSHeapRegisterFromGlobalData() {
         MOZ_ASSERT(Imm16::IsInSignedRange(AsmJSHeapGlobalDataOffset - AsmJSGlobalRegBias));
         loadPtr(Address(GlobalReg, AsmJSHeapGlobalDataOffset - AsmJSGlobalRegBias), HeapReg);
     }
--- a/js/src/jit/none/MacroAssembler-none.h
+++ b/js/src/jit/none/MacroAssembler-none.h
@@ -408,20 +408,18 @@ class MacroAssemblerNone : public Assemb
     void convertUInt32ToDouble(Register, FloatRegister) { MOZ_CRASH(); }
     void convertUInt32ToFloat32(Register, FloatRegister) { MOZ_CRASH(); }
     void inc64(AbsoluteAddress) { MOZ_CRASH(); }
     void incrementInt32Value(Address) { MOZ_CRASH(); }
     void ensureDouble(ValueOperand, FloatRegister, Label *) { MOZ_CRASH(); }
     void handleFailureWithHandler(void *) { MOZ_CRASH(); }
     void makeFrameDescriptor(Register, FrameType) { MOZ_CRASH(); }
 
-#ifdef JSGC_GENERATIONAL
     void branchPtrInNurseryRange(Condition, Register, Register, Label *) { MOZ_CRASH(); }
     void branchValueIsNurseryObject(Condition, ValueOperand, Register, Label *) { MOZ_CRASH(); }
-#endif
 
     void buildFakeExitFrame(Register, uint32_t *) { MOZ_CRASH(); }
     bool buildOOLFakeExitFrame(void *) { MOZ_CRASH(); }
     void loadAsmJSActivation(Register) { MOZ_CRASH(); }
     void loadAsmJSHeapRegisterFromGlobalData() { MOZ_CRASH(); }
     void memIntToValue(Address, Address) { MOZ_CRASH(); }
 
     void setPrinter(Sprinter *) { MOZ_CRASH(); }
--- a/js/src/jit/shared/Assembler-shared.h
+++ b/js/src/jit/shared/Assembler-shared.h
@@ -918,25 +918,23 @@ class AssemblerShared
         return !enoughMemory_;
     }
 
     bool embedsNurseryPointers() const {
         return embedsNurseryPointers_;
     }
 
     ImmGCPtr noteMaybeNurseryPtr(ImmMaybeNurseryPtr ptr) {
-#ifdef JSGC_GENERATIONAL
         if (ptr.value && gc::IsInsideNursery(ptr.value)) {
             // FIXME: Ideally we'd assert this in all cases, but PJS needs to
             //        compile IC's from off-main-thread; it will not touch
             //        nursery pointers, however.
             MOZ_ASSERT(GetJitContext()->runtime->onMainThread());
             embedsNurseryPointers_ = true;
         }
-#endif
         return ImmGCPtr(ptr);
     }
 
     void append(const CallSiteDesc &desc, size_t currentOffset, size_t framePushed) {
         // framePushed does not include sizeof(AsmJSFrame), so add it in here (see
         // CallSite::stackDepth).
         CallSite callsite(desc, currentOffset, framePushed + sizeof(AsmJSFrame));
         enoughMemory_ &= callsites_.append(callsite);
--- a/js/src/jit/x64/MacroAssembler-x64.cpp
+++ b/js/src/jit/x64/MacroAssembler-x64.cpp
@@ -492,18 +492,16 @@ MacroAssemblerX64::storeUnboxedValue(Con
 template void
 MacroAssemblerX64::storeUnboxedValue(ConstantOrRegister value, MIRType valueType, const Address &dest,
                                      MIRType slotType);
 
 template void
 MacroAssemblerX64::storeUnboxedValue(ConstantOrRegister value, MIRType valueType, const BaseIndex &dest,
                                      MIRType slotType);
 
-#ifdef JSGC_GENERATIONAL
-
 void
 MacroAssemblerX64::branchPtrInNurseryRange(Condition cond, Register ptr, Register temp, Label *label)
 {
     MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
     MOZ_ASSERT(ptr != temp);
     MOZ_ASSERT(ptr != ScratchReg);
 
     const Nursery &nursery = GetJitContext()->runtime->gcNursery();
@@ -523,10 +521,8 @@ MacroAssemblerX64::branchValueIsNurseryO
     const Nursery &nursery = GetJitContext()->runtime->gcNursery();
     Value start = ObjectValue(*reinterpret_cast<JSObject *>(nursery.start()));
 
     movePtr(ImmWord(-ptrdiff_t(start.asRawBits())), ScratchReg);
     addPtr(value.valueReg(), ScratchReg);
     branchPtr(cond == Assembler::Equal ? Assembler::Below : Assembler::AboveOrEqual,
               ScratchReg, Imm32(nursery.nurserySize()), label);
 }
-
-#endif
--- a/js/src/jit/x64/MacroAssembler-x64.h
+++ b/js/src/jit/x64/MacroAssembler-x64.h
@@ -1428,20 +1428,18 @@ class MacroAssemblerX64 : public MacroAs
         uint8_t *target = globalData + globalDataOffset;
         ((int32_t *)nextInsn)[-1] = target - nextInsn;
     }
     void memIntToValue(Address Source, Address Dest) {
         load32(Source, ScratchReg);
         storeValue(JSVAL_TYPE_INT32, ScratchReg, Dest);
     }
 
-#ifdef JSGC_GENERATIONAL
     void branchPtrInNurseryRange(Condition cond, Register ptr, Register temp, Label *label);
     void branchValueIsNurseryObject(Condition cond, ValueOperand value, Register temp, Label *label);
-#endif
 };
 
 typedef MacroAssemblerX64 MacroAssemblerSpecific;
 
 } // namespace jit
 } // namespace js
 
 #endif /* jit_x64_MacroAssembler_x64_h */
--- a/js/src/jit/x86/MacroAssembler-x86.cpp
+++ b/js/src/jit/x86/MacroAssembler-x86.cpp
@@ -488,18 +488,16 @@ MacroAssemblerX86::storeUnboxedValue(Con
 template void
 MacroAssemblerX86::storeUnboxedValue(ConstantOrRegister value, MIRType valueType, const Address &dest,
                                      MIRType slotType);
 
 template void
 MacroAssemblerX86::storeUnboxedValue(ConstantOrRegister value, MIRType valueType, const BaseIndex &dest,
                                      MIRType slotType);
 
-#ifdef JSGC_GENERATIONAL
-
 void
 MacroAssemblerX86::branchPtrInNurseryRange(Condition cond, Register ptr, Register temp,
                                            Label *label)
 {
     MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
     MOZ_ASSERT(ptr != temp);
     MOZ_ASSERT(temp != InvalidReg);  // A temp register is required for x86.
 
@@ -518,10 +516,8 @@ MacroAssemblerX86::branchValueIsNurseryO
 
     Label done;
 
     branchTestObject(Assembler::NotEqual, value, cond == Assembler::Equal ? &done : label);
     branchPtrInNurseryRange(cond, value.payloadReg(), temp, label);
 
     bind(&done);
 }
-
-#endif
--- a/js/src/jit/x86/MacroAssembler-x86.h
+++ b/js/src/jit/x86/MacroAssembler-x86.h
@@ -1163,20 +1163,18 @@ class MacroAssemblerX86 : public MacroAs
 
     void callWithExitFrame(JitCode *target, Register dynStack) {
         addPtr(Imm32(framePushed()), dynStack);
         makeFrameDescriptor(dynStack, JitFrame_IonJS);
         Push(dynStack);
         call(target);
     }
 
-#ifdef JSGC_GENERATIONAL
     void branchPtrInNurseryRange(Condition cond, Register ptr, Register temp, Label *label);
     void branchValueIsNurseryObject(Condition cond, ValueOperand value, Register temp, Label *label);
-#endif
 };
 
 typedef MacroAssemblerX86 MacroAssemblerSpecific;
 
 } // namespace jit
 } // namespace js
 
 #endif /* jit_x86_MacroAssembler_x86_h */
--- a/js/src/js-config.h.in
+++ b/js/src/js-config.h.in
@@ -26,19 +26,16 @@
 
 /* Define to 1 if SpiderMonkey should support the ability to perform
    entirely too much GC.  */
 #undef JS_GC_ZEAL
 
 /* Define to 1 if SpiderMonkey should use small chunks. */
 #undef JS_GC_SMALL_CHUNK_SIZE
 
-/* Define to 1 if SpiderMonkey should use Generational GC. */
-#undef JSGC_GENERATIONAL
-
 /* Define to 1 if SpiderMonkey should use Compacting GC. */
 #undef JSGC_COMPACTING
 
 /* Define to 1 if the <endian.h> header is present and
    useable.  See jscpucfg.h.  */
 #undef JS_HAVE_ENDIAN_H
 
 /* Define to 1 if the <machine/endian.h> header is present and
--- a/js/src/jsapi-tests/testGCHeapPostBarriers.cpp
+++ b/js/src/jsapi-tests/testGCHeapPostBarriers.cpp
@@ -1,17 +1,15 @@
 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
 * vim: set ts=8 sts=4 et sw=4 tw=99:
 */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
-#ifdef JSGC_GENERATIONAL
-
 #include "js/RootingAPI.h"
 #include "jsapi-tests/tests.h"
 
 BEGIN_TEST(testGCHeapPostBarriers)
 {
     /* Sanity check - objects start in the nursery and then become tenured. */
     JS_GC(cx->runtime());
     JS::RootedObject obj(cx, NurseryObject());
@@ -80,10 +78,8 @@ JSFunction *NurseryFunction()
     /*
      * We don't actually use the function as a function, so here we cheat and
      * cast a JSObject.
      */
     return static_cast<JSFunction *>(NurseryObject());
 }
 
 END_TEST(testGCHeapPostBarriers)
-
-#endif
--- a/js/src/jsapi-tests/testGCStoreBufferRemoval.cpp
+++ b/js/src/jsapi-tests/testGCStoreBufferRemoval.cpp
@@ -1,17 +1,15 @@
 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
 * vim: set ts=8 sts=4 et sw=4 tw=99:
 */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
-#ifdef JSGC_GENERATIONAL
-
 #include "gc/Barrier.h"
 #include "jsapi-tests/tests.h"
 
 using namespace JS;
 using namespace js;
 
 struct AutoIgnoreRootingHazards {
     // Force a nontrivial destructor so the compiler sees the whole RAII scope
@@ -116,10 +114,8 @@ BEGIN_TEST(testGCStoreBufferRemoval)
     return true;
 }
 
 JSObject *NurseryObject()
 {
     return JS_NewObject(cx, nullptr, JS::NullPtr(), JS::NullPtr());
 }
 END_TEST(testGCStoreBufferRemoval)
-
-#endif
--- a/js/src/jsapi-tests/testIsInsideNursery.cpp
+++ b/js/src/jsapi-tests/testIsInsideNursery.cpp
@@ -2,18 +2,16 @@
 * vim: set ts=8 sts=4 et sw=4 tw=99:
 */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "jsapi-tests/tests.h"
 
-#ifdef JSGC_GENERATIONAL
-
 BEGIN_TEST(testIsInsideNursery)
 {
     /* Non-GC things are never inside the nursery. */
     CHECK(!rt->gc.nursery.isInside(rt));
     CHECK(!rt->gc.nursery.isInside((void *)nullptr));
 
     JS_GC(rt);
 
@@ -25,10 +23,8 @@ BEGIN_TEST(testIsInsideNursery)
     JS_GC(rt);
 
     /* And are tenured if still live after a GC. */
     CHECK(!js::gc::IsInsideNursery(object));
 
     return true;
 }
 END_TEST(testIsInsideNursery)
-
-#endif
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -530,21 +530,19 @@ struct JSContext : public js::ExclusiveC
     enum MaybeAllowCrossCompartment {
         DONT_ALLOW_CROSS_COMPARTMENT = false,
         ALLOW_CROSS_COMPARTMENT = true
     };
     inline JSScript *currentScript(jsbytecode **pc = nullptr,
                                    MaybeAllowCrossCompartment = DONT_ALLOW_CROSS_COMPARTMENT) const;
 
     // The generational GC nursery may only be used on the main thread.
-#ifdef JSGC_GENERATIONAL
     inline js::Nursery &nursery() {
         return runtime_->gc.nursery;
     }
-#endif
 
     void minorGC(JS::gcreason::Reason reason) {
         runtime_->gc.minorGC(this, reason);
     }
 
     void gcIfNeeded() {
         runtime_->gc.gcIfNeeded(this);
     }
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -177,18 +177,16 @@ JSCompartment::ensureJitCompartmentExist
         js_delete(jitCompartment_);
         jitCompartment_ = nullptr;
         return false;
     }
 
     return true;
 }
 
-#ifdef JSGC_GENERATIONAL
-
 /*
  * This class is used to add a post barrier on the crossCompartmentWrappers map,
  * as the key is calculated based on objects which may be moved by generational
  * GC.
  */
 class WrapperMapRef : public BufferableRef
 {
     WrapperMap *map;
@@ -233,38 +231,34 @@ JSCompartment::checkWrapperMapAfterMovin
         CheckGCThingAfterMovingGC(static_cast<Cell *>(e.front().value().get().toGCThing()));
 
         WrapperMap::Ptr ptr = crossCompartmentWrappers.lookup(key);
         MOZ_ASSERT(ptr.found() && &*ptr == &e.front());
     }
 }
 #endif
 
-#endif
-
 bool
 JSCompartment::putWrapper(JSContext *cx, const CrossCompartmentKey &wrapped, const js::Value &wrapper)
 {
     MOZ_ASSERT(wrapped.wrapped);
     MOZ_ASSERT(!IsPoisonedPtr(wrapped.wrapped));
     MOZ_ASSERT(!IsPoisonedPtr(wrapped.debugger));
     MOZ_ASSERT(!IsPoisonedPtr(wrapper.toGCThing()));
     MOZ_ASSERT_IF(wrapped.kind == CrossCompartmentKey::StringWrapper, wrapper.isString());
     MOZ_ASSERT_IF(wrapped.kind != CrossCompartmentKey::StringWrapper, wrapper.isObject());
     bool success = crossCompartmentWrappers.put(wrapped, ReadBarriered<Value>(wrapper));
 
-#ifdef JSGC_GENERATIONAL
     /* There's no point allocating wrappers in the nursery since we will tenure them anyway. */
     MOZ_ASSERT(!IsInsideNursery(static_cast<gc::Cell *>(wrapper.toGCThing())));
 
     if (success && (IsInsideNursery(wrapped.wrapped) || IsInsideNursery(wrapped.debugger))) {
         WrapperMapRef ref(&crossCompartmentWrappers, wrapped);
         cx->runtime()->gc.storeBuffer.putGeneric(ref);
     }
-#endif
 
     return success;
 }
 
 static JSString *
 CopyStringPure(JSContext *cx, JSString *str)
 {
     /*
--- a/js/src/jsfriendapi.cpp
+++ b/js/src/jsfriendapi.cpp
@@ -1015,20 +1015,18 @@ DumpHeapVisitRoot(JSTracer *trc, void **
     char buffer[1024];
     fprintf(dtrc->output, "%p %c %s\n", *thingp, MarkDescriptor(*thingp),
             dtrc->getTracingEdgeName(buffer, sizeof(buffer)));
 }
 
 void
 js::DumpHeapComplete(JSRuntime *rt, FILE *fp, js::DumpHeapNurseryBehaviour nurseryBehaviour)
 {
-#ifdef JSGC_GENERATIONAL
     if (nurseryBehaviour == js::CollectNurseryBeforeDump)
         rt->gc.evictNursery(JS::gcreason::API);
-#endif
 
     DumpHeapTracer dtrc(fp, rt, DumpHeapVisitRoot, TraceWeakMapKeysValues);
     TraceRuntime(&dtrc);
 
     fprintf(dtrc.output, "==========\n");
 
     dtrc.setTraceCallback(DumpHeapVisitChild);
     IterateZonesCompartmentsArenasCells(rt, &dtrc,
@@ -1140,30 +1138,30 @@ JS::IsCompactingGCEnabled(JSRuntime *rt)
     return rt->gc.isCompactingGCEnabled();
 #else
     return false;
 #endif
 }
 
 JS::AutoDisableGenerationalGC::AutoDisableGenerationalGC(JSRuntime *rt)
   : gc(&rt->gc)
-#if defined(JSGC_GENERATIONAL) && defined(JS_GC_ZEAL)
+#ifdef JS_GC_ZEAL
   , restartVerifier(false)
 #endif
 {
-#if defined(JSGC_GENERATIONAL) && defined(JS_GC_ZEAL)
+#ifdef JS_GC_ZEAL
     restartVerifier = gc->endVerifyPostBarriers();
 #endif
     gc->disableGenerationalGC();
 }
 
 JS::AutoDisableGenerationalGC::~AutoDisableGenerationalGC()
 {
     gc->enableGenerationalGC();
-#if defined(JSGC_GENERATIONAL) && defined(JS_GC_ZEAL)
+#ifdef JS_GC_ZEAL
     if (restartVerifier) {
         MOZ_ASSERT(gc->isGenerationalGCEnabled());
         gc->startVerifyPostBarriers();
     }
 #endif
 }
 
 extern JS_FRIEND_API(bool)
@@ -1442,17 +1440,16 @@ js::IsInRequest(JSContext *cx)
 }
 
 bool
 js::HasObjectMovedOp(JSObject *obj) {
     return !!GetObjectClass(obj)->ext.objectMovedOp;
 }
 #endif
 
-#ifdef JSGC_GENERATIONAL
 JS_FRIEND_API(void)
 JS_StoreObjectPostBarrierCallback(JSContext* cx,
                                   void (*callback)(JSTracer *trc, JSObject *key, void *data),
                                   JSObject *key, void *data)
 {
     JSRuntime *rt = cx->runtime();
     if (IsInsideNursery(key))
         rt->gc.storeBuffer.putCallback(callback, key, data);
@@ -1462,15 +1459,14 @@ extern JS_FRIEND_API(void)
 JS_StoreStringPostBarrierCallback(JSContext* cx,
                                   void (*callback)(JSTracer *trc, JSString *key, void *data),
                                   JSString *key, void *data)
 {
     JSRuntime *rt = cx->runtime();
     if (IsInsideNursery(key))
         rt->gc.storeBuffer.putCallback(callback, key, data);
 }
-#endif /* JSGC_GENERATIONAL */
 
 JS_FRIEND_API(bool)
 js::ForwardToNative(JSContext *cx, JSNative native, const CallArgs &args)
 {
     return native(cx, args.length(), args.base());
 }
--- a/js/src/jsfriendapi.h
+++ b/js/src/jsfriendapi.h
@@ -2679,31 +2679,19 @@ GetObjectEnvironmentObjectForFunction(JS
 
 extern JS_FRIEND_API(bool)
 js_DefineOwnProperty(JSContext *cx, JSObject *objArg, jsid idArg,
                      JS::Handle<JSPropertyDescriptor> descriptor, bool *bp);
 
 extern JS_FRIEND_API(bool)
 js_ReportIsNotFunction(JSContext *cx, JS::HandleValue v);
 
-#ifdef JSGC_GENERATIONAL
 extern JS_FRIEND_API(void)
 JS_StoreObjectPostBarrierCallback(JSContext* cx,
                                   void (*callback)(JSTracer *trc, JSObject *key, void *data),
                                   JSObject *key, void *data);
 
 extern JS_FRIEND_API(void)
 JS_StoreStringPostBarrierCallback(JSContext* cx,
                                   void (*callback)(JSTracer *trc, JSString *key, void *data),
                                   JSString *key, void *data);
-#else
-inline void
-JS_StoreObjectPostBarrierCallback(JSContext* cx,
-                                  void (*callback)(JSTracer *trc, JSObject *key, void *data),
-                                  JSObject *key, void *data) {}
-
-inline void
-JS_StoreStringPostBarrierCallback(JSContext* cx,
-                                  void (*callback)(JSTracer *trc, JSString *key, void *data),
-                                  JSString *key, void *data) {}
-#endif /* JSGC_GENERATIONAL */
 
 #endif /* jsfriendapi_h */
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -1101,40 +1101,36 @@ GCRuntime::releaseArena(ArenaHeader *ahe
     if (isBackgroundSweeping())
         aheader->zone->threshold.updateForRemovedArena(tunables);
     return aheader->chunk()->releaseArena(rt, aheader, lock);
 }
 
 GCRuntime::GCRuntime(JSRuntime *rt) :
     rt(rt),
     systemZone(nullptr),
-#ifdef JSGC_GENERATIONAL
     nursery(rt),
     storeBuffer(rt, nursery),
-#endif
     stats(rt),
     marker(rt),
     usage(nullptr),
     maxMallocBytes(0),
     numArenasFreeCommitted(0),
     verifyPreData(nullptr),
     verifyPostData(nullptr),
     chunkAllocationSinceLastGC(false),
     nextFullGCTime(0),
     lastGCTime(0),
     mode(JSGC_MODE_INCREMENTAL),
     decommitThreshold(32 * 1024 * 1024),
     cleanUpEverything(false),
     grayBitsValid(false),
     majorGCRequested(0),
     majorGCTriggerReason(JS::gcreason::NO_REASON),
-#ifdef JSGC_GENERATIONAL
     minorGCRequested(false),
     minorGCTriggerReason(JS::gcreason::NO_REASON),
-#endif
     majorGCNumber(0),
     jitReleaseNumber(0),
     number(0),
     startNumber(0),
     isFull(false),
 #ifdef DEBUG
     disableStrictProxyCheckingCount(0),
 #endif
@@ -1214,25 +1210,23 @@ const char *gc::ZealModeHelpText =
 void
 GCRuntime::setZeal(uint8_t zeal, uint32_t frequency)
 {
     if (verifyPreData)
         VerifyBarriers(rt, PreBarrierVerifier);
     if (verifyPostData)
         VerifyBarriers(rt, PostBarrierVerifier);
 
-#ifdef JSGC_GENERATIONAL
     if (zealMode == ZealGenerationalGCValue) {
         evictNursery(JS::gcreason::DEBUG_GC);
         nursery.leaveZealMode();
     }
 
     if (zeal == ZealGenerationalGCValue)
         nursery.enterZealMode();
-#endif
 
     bool schedule = zeal >= js::gc::ZealAllocValue;
     zealMode = zeal;
     zealFrequency = frequency;
     nextScheduled = schedule ? frequency : 0;
 }
 
 void
@@ -1294,29 +1288,27 @@ GCRuntime::init(uint32_t maxbytes, uint3
      * Separate gcMaxMallocBytes from gcMaxBytes but initialize to maxbytes
      * for default backward API compatibility.
      */
     tunables.setParameter(JSGC_MAX_BYTES, maxbytes);
     setMaxMallocBytes(maxbytes);
 
     jitReleaseNumber = majorGCNumber + JIT_SCRIPT_RELEASE_TYPES_PERIOD;
 
-#ifdef JSGC_GENERATIONAL
     if (!nursery.init(maxNurseryBytes))
         return false;
 
     if (!nursery.isEnabled()) {
         MOZ_ASSERT(nursery.nurserySize() == 0);
         ++rt->gc.generationalDisabled;
     } else {
         MOZ_ASSERT(nursery.nurserySize() > 0);
         if (!storeBuffer.enable())
             return false;
     }
-#endif
 
 #ifdef JS_GC_ZEAL
     const char *zealSpec = getenv("JS_GC_ZEAL");
     if (zealSpec && zealSpec[0] && !parseAndSetZeal(zealSpec))
         return false;
 #endif
 
     if (!InitTrace(*this))
@@ -5651,19 +5643,17 @@ GCRuntime::finishCollection()
 AutoTraceSession::AutoTraceSession(JSRuntime *rt, js::HeapState heapState)
   : lock(rt),
     runtime(rt),
     prevState(rt->gc.heapState)
 {
     MOZ_ASSERT(rt->gc.isAllocAllowed());
     MOZ_ASSERT(rt->gc.heapState == Idle);
     MOZ_ASSERT(heapState != Idle);
-#ifdef JSGC_GENERATIONAL
     MOZ_ASSERT_IF(heapState == MajorCollecting, rt->gc.nursery.isEmpty());
-#endif
 
     // Threads with an exclusive context can hit refillFreeList while holding
     // the exclusive access lock. To avoid deadlocking when we try to acquire
     // this lock during GC and the other thread is waiting, make sure we hold
     // the exclusive access lock during GC sessions.
     MOZ_ASSERT(rt->currentThreadHasExclusiveAccess());
 
     if (rt->exclusiveThreadsPresent()) {
@@ -6058,38 +6048,31 @@ GCRuntime::budgetIncrementalGC(SliceBudg
     }
 
     if (reset)
         resetIncrementalGC("zone change");
 }
 
 namespace {
 
-#ifdef JSGC_GENERATIONAL
 class AutoDisableStoreBuffer
 {
     StoreBuffer &sb;
     bool prior;
 
   public:
     explicit AutoDisableStoreBuffer(GCRuntime *gc) : sb(gc->storeBuffer) {
         prior = sb.isEnabled();
         sb.disable();
     }
     ~AutoDisableStoreBuffer() {
         if (prior)
             sb.enable();
     }
 };
-#else
-struct AutoDisableStoreBuffer
-{
-    AutoDisableStoreBuffer(GCRuntime *gc) {}
-};
-#endif
 
 } /* anonymous namespace */
 
 /*
  * Run one GC "cycle" (either a slice of incremental GC or an entire
  * non-incremental GC. We disable inlining to ensure that the bottom of the
  * stack with possible GC roots recorded in MarkRuntime excludes any pointers we
  * use during the marking implementation.
@@ -6468,83 +6451,73 @@ GCRuntime::onOutOfMallocMemory(const Aut
     // might let the OS scrape together enough pages to satisfy the failing
     // malloc request.
     decommitAllWithoutUnlocking(lock);
 }
 
 void
 GCRuntime::minorGC(JS::gcreason::Reason reason)
 {
-#ifdef JSGC_GENERATIONAL
     minorGCRequested = false;
     TraceLogger *logger = TraceLoggerForMainThread(rt);
     AutoTraceLog logMinorGC(logger, TraceLogger::MinorGC);
     nursery.collect(rt, reason, nullptr);
     MOZ_ASSERT_IF(!rt->mainThread.suppressGC, nursery.isEmpty());
-#endif
 }
 
 void
 GCRuntime::minorGC(JSContext *cx, JS::gcreason::Reason reason)
 {
     // Alternate to the runtime-taking form above which allows marking type
     // objects as needing pretenuring.
-#ifdef JSGC_GENERATIONAL
     minorGCRequested = false;
     TraceLogger *logger = TraceLoggerForMainThread(rt);
     AutoTraceLog logMinorGC(logger, TraceLogger::MinorGC);
     Nursery::TypeObjectList pretenureTypes;
     nursery.collect(rt, reason, &pretenureTypes);
     for (size_t i = 0; i < pretenureTypes.length(); i++) {
         if (pretenureTypes[i]->canPreTenure())
             pretenureTypes[i]->setShouldPreTenure(cx);
     }
     MOZ_ASSERT_IF(!rt->mainThread.suppressGC, nursery.isEmpty());
-#endif
 }
 
 void
 GCRuntime::disableGenerationalGC()
 {
-#ifdef JSGC_GENERATIONAL
     if (isGenerationalGCEnabled()) {
         minorGC(JS::gcreason::API);
         nursery.disable();
         storeBuffer.disable();
     }
-#endif
     ++rt->gc.generationalDisabled;
 }
 
 void
 GCRuntime::enableGenerationalGC()
 {
     MOZ_ASSERT(generationalDisabled > 0);
     --generationalDisabled;
-#ifdef JSGC_GENERATIONAL
     if (generationalDisabled == 0) {
         nursery.enable();
         storeBuffer.enable();
     }
-#endif
 }
 
 bool
 GCRuntime::gcIfNeeded(JSContext *cx /* = nullptr */)
 {
     // This method returns whether a major GC was performed.
 
-#ifdef JSGC_GENERATIONAL
     if (minorGCRequested) {
         if (cx)
             minorGC(cx, minorGCTriggerReason);
         else
             minorGC(minorGCTriggerReason);
     }
-#endif
 
     if (majorGCRequested) {
         gcSlice(GC_NORMAL, rt->gc.majorGCTriggerReason);
         return true;
     }
 
     return false;
 }
@@ -6771,23 +6744,21 @@ void PreventGCDuringInteractiveDebug()
     TlsPerThreadData.get()->suppressGC++;
 }
 
 #endif
 
 void
 js::ReleaseAllJITCode(FreeOp *fop)
 {
-#ifdef JSGC_GENERATIONAL
     /*
      * Scripts can entrain nursery things, inserting references to the script
      * into the store buffer. Clear the store buffer before discarding scripts.
      */
     fop->runtime()->gc.evictNursery();
-#endif
 
     for (ZonesIter zone(fop->runtime(), SkipAtoms); !zone.done(); zone.next()) {
         if (!zone->jitZone())
             continue;
 
 #ifdef DEBUG
         /* Assert no baseline scripts are marked as active. */
         for (ZoneCellIter i(zone, FINALIZE_SCRIPT); !i.done(); i.next()) {
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -70,17 +70,16 @@ template <> struct MapTypeToFinalizeKind
 template <> struct MapTypeToFinalizeKind<BaseShape>         { static const AllocKind kind = FINALIZE_BASE_SHAPE; };
 template <> struct MapTypeToFinalizeKind<types::TypeObject> { static const AllocKind kind = FINALIZE_TYPE_OBJECT; };
 template <> struct MapTypeToFinalizeKind<JSFatInlineString> { static const AllocKind kind = FINALIZE_FAT_INLINE_STRING; };
 template <> struct MapTypeToFinalizeKind<JSString>          { static const AllocKind kind = FINALIZE_STRING; };
 template <> struct MapTypeToFinalizeKind<JSExternalString>  { static const AllocKind kind = FINALIZE_EXTERNAL_STRING; };
 template <> struct MapTypeToFinalizeKind<JS::Symbol>        { static const AllocKind kind = FINALIZE_SYMBOL; };
 template <> struct MapTypeToFinalizeKind<jit::JitCode>      { static const AllocKind kind = FINALIZE_JITCODE; };
 
-#if defined(JSGC_GENERATIONAL) || defined(DEBUG)
 static inline bool
 IsNurseryAllocable(AllocKind kind)
 {
     MOZ_ASSERT(kind >= 0 && unsigned(kind) < FINALIZE_LIMIT);
     static const bool map[] = {
         false,     /* FINALIZE_OBJECT0 */
         true,      /* FINALIZE_OBJECT0_BACKGROUND */
         false,     /* FINALIZE_OBJECT2 */
@@ -103,17 +102,16 @@ IsNurseryAllocable(AllocKind kind)
         false,     /* FINALIZE_STRING */
         false,     /* FINALIZE_EXTERNAL_STRING */
         false,     /* FINALIZE_SYMBOL */
         false,     /* FINALIZE_JITCODE */
     };
     JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map) == FINALIZE_LIMIT);
     return map[kind];
 }
-#endif
 
 #if defined(JSGC_FJGENERATIONAL)
 // This is separate from IsNurseryAllocable() so that the latter can evolve
 // without worrying about what the ForkJoinNursery's needs are, and vice
 // versa to some extent.
 static inline bool
 IsFJNurseryAllocable(AllocKind kind)
 {
@@ -1215,18 +1213,16 @@ namespace gc {
 
 /*
  * Merge all contents of source into target. This can only be used if source is
  * the only compartment in its zone.
  */
 void
 MergeCompartments(JSCompartment *source, JSCompartment *target);
 
-#if defined(JSGC_GENERATIONAL) || defined(JSGC_COMPACTING)
-
 /*
  * This structure overlays a Cell in the Nursery and re-purposes its memory
  * for managing the Nursery collection process.
  */
 class RelocationOverlay
 {
     friend class MinorCollectionTracer;
     friend class ForkJoinNursery;
@@ -1324,24 +1320,16 @@ Forwarded(const JS::Value &value)
 
 template <typename T>
 inline T
 MaybeForwarded(T t)
 {
     return IsForwarded(t) ? Forwarded(t) : t;
 }
 
-#else
-
-template <typename T> inline bool IsForwarded(T t) { return false; }
-template <typename T> inline T Forwarded(T t) { return t; }
-template <typename T> inline T MaybeForwarded(T t) { return t; }
-
-#endif // JSGC_GENERATIONAL || JSGC_COMPACTING
-
 #ifdef JSGC_HASH_TABLE_CHECKS
 
 template <typename T>
 inline void
 CheckGCThingAfterMovingGC(T *t)
 {
     MOZ_ASSERT_IF(t, !IsInsideNursery(t));
 #ifdef JSGC_COMPACTING
--- a/js/src/jsgcinlines.h
+++ b/js/src/jsgcinlines.h
@@ -62,41 +62,37 @@ GetGCObjectKind(const Class *clasp)
     if (clasp == FunctionClassPtr)
         return JSFunction::FinalizeKind;
     uint32_t nslots = JSCLASS_RESERVED_SLOTS(clasp);
     if (clasp->flags & JSCLASS_HAS_PRIVATE)
         nslots++;
     return GetGCObjectKind(nslots);
 }
 
-#ifdef JSGC_GENERATIONAL
 inline bool
 ShouldNurseryAllocate(const Nursery &nursery, AllocKind kind, InitialHeap heap)
 {
     return nursery.isEnabled() && IsNurseryAllocable(kind) && heap != TenuredHeap;
 }
-#endif
 
 #ifdef JSGC_FJGENERATIONAL
 inline bool
 ShouldFJNurseryAllocate(const ForkJoinNursery &nursery, AllocKind kind, InitialHeap heap)
 {
     return IsFJNurseryAllocable(kind) && heap != TenuredHeap;
 }
 #endif
 
 inline JSGCTraceKind
 GetGCThingTraceKind(const void *thing)
 {
     MOZ_ASSERT(thing);
     const Cell *cell = static_cast<const Cell *>(thing);
-#ifdef JSGC_GENERATIONAL
     if (IsInsideNursery(cell))
         return JSTRACE_OBJECT;
-#endif
     return MapAllocToTraceKind(cell->asTenured().getAllocKind());
 }
 
 inline void
 GCRuntime::poke()
 {
     poked = true;
 
@@ -312,19 +308,17 @@ class ZoneCellIterImpl
         }
     }
 };
 
 class ZoneCellIterUnderGC : public ZoneCellIterImpl
 {
   public:
     ZoneCellIterUnderGC(JS::Zone *zone, AllocKind kind) {
-#ifdef JSGC_GENERATIONAL
         MOZ_ASSERT(zone->runtimeFromAnyThread()->gc.nursery.isEmpty());
-#endif
         MOZ_ASSERT(zone->runtimeFromAnyThread()->isHeapBusy());
         init(zone, kind);
     }
 };
 
 class ZoneCellIter : public ZoneCellIterImpl
 {
     JS::AutoAssertNoAlloc noAlloc;
@@ -343,21 +337,19 @@ class ZoneCellIter : public ZoneCellIter
          * currently active.
          */
         if (IsBackgroundFinalized(kind) &&
             zone->allocator.arenas.needBackgroundFinalizeWait(kind))
         {
             zone->runtimeFromMainThread()->gc.waitBackgroundSweepEnd();
         }
 
-#ifdef JSGC_GENERATIONAL
         /* Evict the nursery before iterating so we can see all things. */
         JSRuntime *rt = zone->runtimeFromMainThread();
         rt->gc.evictNursery();
-#endif
 
         if (lists->isSynchronizedFreeList(kind)) {
             lists = nullptr;
         } else {
             MOZ_ASSERT(!zone->runtimeFromMainThread()->isHeapBusy());
             lists->copyFreeListToArena(kind);
         }
 
@@ -430,17 +422,16 @@ class GCZoneGroupIter {
     }
 
     operator JS::Zone *() const { return get(); }
     JS::Zone *operator->() const { return get(); }
 };
 
 typedef CompartmentsIterT<GCZoneGroupIter> GCCompartmentGroupIter;
 
-#ifdef JSGC_GENERATIONAL
 /*
  * Attempt to allocate a new GC thing out of the nursery. If there is not enough
  * room in the nursery or there is an OOM, this method will return nullptr.
  */
 template <AllowGC allowGC>
 inline JSObject *
 TryNewNurseryObject(JSContext *cx, size_t thingSize, size_t nDynamicSlots)
 {
@@ -457,17 +448,16 @@ TryNewNurseryObject(JSContext *cx, size_
         if (nursery.isEnabled()) {
             JSObject *obj = nursery.allocateObject(cx, thingSize, nDynamicSlots);
             MOZ_ASSERT(obj);
             return obj;
         }
     }
     return nullptr;
 }
-#endif /* JSGC_GENERATIONAL */
 
 #ifdef JSGC_FJGENERATIONAL
 template <AllowGC allowGC>
 inline JSObject *
 TryNewNurseryObject(ForkJoinContext *cx, size_t thingSize, size_t nDynamicSlots)
 {
     ForkJoinNursery &nursery = cx->nursery();
     bool tooLarge = false;
@@ -568,24 +558,22 @@ AllocateObject(ThreadSafeContext *cx, Al
     MOZ_ASSERT(thingSize == Arena::thingSize(kind));
     MOZ_ASSERT(thingSize >= sizeof(JSObject));
     static_assert(sizeof(JSObject) >= CellSize,
                   "All allocations must be at least the allocator-imposed minimum size.");
 
     if (!CheckAllocatorState<allowGC>(cx, kind))
         return nullptr;
 
-#ifdef JSGC_GENERATIONAL
     if (cx->isJSContext() &&
         ShouldNurseryAllocate(cx->asJSContext()->nursery(), kind, heap)) {
         JSObject *obj = TryNewNurseryObject<allowGC>(cx->asJSContext(), thingSize, nDynamicSlots);
         if (obj)
             return obj;
     }
-#endif
 #ifdef JSGC_FJGENERATIONAL
     if (cx->isForkJoinContext() &&
         ShouldFJNurseryAllocate(cx->asForkJoinContext()->nursery(), kind, heap))
     {
         JSObject *obj =
             TryNewNurseryObject<allowGC>(cx->asForkJoinContext(), thingSize, nDynamicSlots);
         if (obj)
             return obj;
@@ -649,57 +637,51 @@ AllocateNonObject(ThreadSafeContext *cx)
  * fail the allocation, forcing the non-cached path.
  *
  * Observe this won't be used for ForkJoin allocation, as it takes a JSContext*
  */
 template <AllowGC allowGC>
 inline JSObject *
 AllocateObjectForCacheHit(JSContext *cx, AllocKind kind, InitialHeap heap)
 {
-#ifdef JSGC_GENERATIONAL
     if (ShouldNurseryAllocate(cx->nursery(), kind, heap)) {
         size_t thingSize = Arena::thingSize(kind);
 
         MOZ_ASSERT(thingSize == Arena::thingSize(kind));
         if (!CheckAllocatorState<NoGC>(cx, kind))
             return nullptr;
 
         JSObject *obj = TryNewNurseryObject<NoGC>(cx, thingSize, 0);
         if (!obj && allowGC) {
             cx->minorGC(JS::gcreason::OUT_OF_NURSERY);
             return nullptr;
         }
         return obj;
     }
-#endif
 
     JSObject *obj = AllocateObject<NoGC>(cx, kind, 0, heap);
     if (!obj && allowGC) {
         cx->runtime()->gc.maybeGC(cx->zone());
         return nullptr;
     }
 
     return obj;
 }
 
 inline bool
 IsInsideGGCNursery(const js::gc::Cell *cell)
 {
-#ifdef JSGC_GENERATIONAL
     if (!cell)
         return false;
     uintptr_t addr = uintptr_t(cell);
     addr &= ~js::gc::ChunkMask;
     addr |= js::gc::ChunkLocationOffset;
     uint32_t location = *reinterpret_cast<uint32_t *>(addr);
     MOZ_ASSERT(location != 0);
     return location & js::gc::ChunkLocationBitNursery;
-#else
-    return false;
-#endif
 }
 
 } /* namespace gc */
 
 template <js::AllowGC allowGC>
 inline JSObject *
 NewGCObject(js::ThreadSafeContext *cx, js::gc::AllocKind kind, size_t nDynamicSlots, js::gc::InitialHeap heap)
 {
--- a/js/src/jshashutil.h
+++ b/js/src/jshashutil.h
@@ -20,44 +20,38 @@ template <class T>
 struct DependentAddPtr
 {
     typedef typename T::AddPtr AddPtr;
     typedef typename T::Entry Entry;
 
     template <class Lookup>
     DependentAddPtr(const ExclusiveContext *cx, const T &table, const Lookup &lookup)
       : addPtr(table.lookupForAdd(lookup))
-#ifdef JSGC_GENERATIONAL
       , originalGcNumber(cx->zone()->gcNumber())
-#endif
-        {}
+    {}
 
     template <class KeyInput, class ValueInput>
     bool add(const ExclusiveContext *cx, T &table, const KeyInput &key, const ValueInput &value) {
-#ifdef JSGC_GENERATIONAL
         bool gcHappened = originalGcNumber != cx->zone()->gcNumber();
         if (gcHappened)
             addPtr = table.lookupForAdd(key);
-#endif
         return table.relookupOrAdd(addPtr, key, value);
     }
 
     typedef void (DependentAddPtr::* ConvertibleToBool)();
     void nonNull() {}
 
     bool found() const                 { return addPtr.found(); }
     operator ConvertibleToBool() const { return found() ? &DependentAddPtr::nonNull : 0; }
     const Entry &operator*() const     { return *addPtr; }
     const Entry *operator->() const    { return &*addPtr; }
 
   private:
     AddPtr addPtr ;
-#ifdef JSGC_GENERATIONAL
     const uint64_t originalGcNumber;
-#endif
 
     DependentAddPtr() MOZ_DELETE;
     DependentAddPtr(const DependentAddPtr&) MOZ_DELETE;
     DependentAddPtr& operator=(const DependentAddPtr&) MOZ_DELETE;
 };
 
 } // namespace js
 
--- a/js/src/jsinfer.cpp
+++ b/js/src/jsinfer.cpp
@@ -4368,17 +4368,16 @@ JSObject::setNewTypeUnknown(JSContext *c
         Rooted<TaggedProto> taggedProto(cx, TaggedProto(obj));
         if (TypeObjectWithNewScriptSet::Ptr p = table.lookup(TypeObjectWithNewScriptSet::Lookup(clasp, taggedProto, nullptr)))
             MarkTypeObjectUnknownProperties(cx, p->object);
     }
 
     return true;
 }
 
-#ifdef JSGC_GENERATIONAL
 /*
  * This class is used to add a post barrier on the newTypeObjects set, as the
  * key is calculated from a prototype object which may be moved by generational
  * GC.
  */
 class NewTypeObjectsSetRef : public BufferableRef
 {
     TypeObjectWithNewScriptSet *set;
@@ -4420,17 +4419,16 @@ TypeObjectTablePostBarrier(ExclusiveCont
         return;
     }
 
     if (IsInsideNursery(proto.toObject())) {
         StoreBuffer &sb = cx->asJSContext()->runtime()->gc.storeBuffer;
         sb.putGeneric(NewTypeObjectsSetRef(table, clasp, proto.toObject(), fun));
     }
 }
-#endif
 
 TypeObject *
 ExclusiveContext::getNewType(const Class *clasp, TaggedProto proto, JSFunction *fun)
 {
     MOZ_ASSERT_IF(fun, proto.isObject());
     MOZ_ASSERT_IF(proto.isObject(), isInsideCurrentCompartment(proto.toObject()));
 
     TypeObjectWithNewScriptSet &newTypeObjects = compartment()->newTypeObjects;
@@ -4476,19 +4474,17 @@ ExclusiveContext::getNewType(const Class
     Rooted<TaggedProto> protoRoot(this, proto);
     TypeObject *type = compartment()->types.newTypeObject(this, clasp, protoRoot, initialFlags);
     if (!type)
         return nullptr;
 
     if (!newTypeObjects.add(p, TypeObjectWithNewScriptEntry(type, fun)))
         return nullptr;
 
-#ifdef JSGC_GENERATIONAL
     TypeObjectTablePostBarrier(this, &newTypeObjects, clasp, proto, fun);
-#endif
 
     if (proto.isObject()) {
         RootedObject obj(this, proto.toObject());
 
         if (fun)
             TypeNewScript::make(asJSContext(), type, fun);
 
         /*
@@ -4544,19 +4540,17 @@ ExclusiveContext::getSingletonType(const
     Rooted<TaggedProto> protoRoot(this, proto);
     TypeObject *type = compartment()->types.newTypeObject(this, clasp, protoRoot);
     if (!type)
         return nullptr;
 
     if (!table.add(p, TypeObjectWithNewScriptEntry(type, nullptr)))
         return nullptr;
 
-#ifdef JSGC_GENERATIONAL
     TypeObjectTablePostBarrier(this, &table, clasp, proto, nullptr);
-#endif
 
     type->initSingleton((JSObject *) TypeObject::LAZY_SINGLETON);
     MOZ_ASSERT(type->singleton(), "created type must be a proper singleton");
 
     return type;
 }
 
 /////////////////////////////////////////////////////////////////////
--- a/js/src/jsinfer.h
+++ b/js/src/jsinfer.h
@@ -1328,25 +1328,23 @@ struct TypeObjectWithNewScriptEntry
         TaggedProto hashProto;
         TaggedProto matchProto;
         JSFunction *newFunction;
 
         Lookup(const Class *clasp, TaggedProto proto, JSFunction *newFunction)
           : clasp(clasp), hashProto(proto), matchProto(proto), newFunction(newFunction)
         {}
 
-#ifdef JSGC_GENERATIONAL
         /*
          * For use by generational post barriers only.  Look up an entry whose
          * proto has been moved, but was hashed with the original value.
          */
         Lookup(const Class *clasp, TaggedProto hashProto, TaggedProto matchProto, JSFunction *newFunction)
             : clasp(clasp), hashProto(hashProto), matchProto(matchProto), newFunction(newFunction)
         {}
-#endif
 
     };
 
     static inline HashNumber hash(const Lookup &lookup);
     static inline bool match(const TypeObjectWithNewScriptEntry &key, const Lookup &lookup);
     static void rekey(TypeObjectWithNewScriptEntry &k, const TypeObjectWithNewScriptEntry& newKey) { k = newKey; }
 };
 typedef HashSet<TypeObjectWithNewScriptEntry,
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -2289,21 +2289,19 @@ JSObject::swap(JSContext *cx, HandleObje
         CrashAtUnhandlableOOM("JSObject::swap");
     if (!b->getType(cx))
         CrashAtUnhandlableOOM("JSObject::swap");
 
     /*
      * Neither object may be in the nursery, but ensure we update any embedded
      * nursery pointers in either object.
      */
-#ifdef JSGC_GENERATIONAL
     MOZ_ASSERT(!IsInsideNursery(a) && !IsInsideNursery(b));
     cx->runtime()->gc.storeBuffer.putWholeCellFromMainThread(a);
     cx->runtime()->gc.storeBuffer.putWholeCellFromMainThread(b);
-#endif
 
     unsigned r = NotifyGCPreSwap(a, b);
 
     // Do the fundamental swapping of the contents of two objects.
     MOZ_ASSERT(a->compartment() == b->compartment());
     MOZ_ASSERT(a->is<JSFunction>() == b->is<JSFunction>());
 
     // Don't try to swap functions with different sizes.
--- a/js/src/jsobj.h
+++ b/js/src/jsobj.h
@@ -823,49 +823,43 @@ JSObject::writeBarrierPre(JSObject *obj)
     if (!isNullLike(obj) && obj->isTenured())
         obj->asTenured().writeBarrierPre(&obj->asTenured());
 }
 
 /* static */ MOZ_ALWAYS_INLINE void
 JSObject::writeBarrierPost(JSObject *obj, void *cellp)
 {
     MOZ_ASSERT(cellp);
-#ifdef JSGC_GENERATIONAL
     if (IsNullTaggedPointer(obj))
         return;
     MOZ_ASSERT(obj == *static_cast<JSObject **>(cellp));
     js::gc::StoreBuffer *storeBuffer = obj->storeBuffer();
     if (storeBuffer)
         storeBuffer->putCellFromAnyThread(static_cast<js::gc::Cell **>(cellp));
-#endif
 }
 
 /* static */ MOZ_ALWAYS_INLINE void
 JSObject::writeBarrierPostRelocate(JSObject *obj, void *cellp)
 {
     MOZ_ASSERT(cellp);
     MOZ_ASSERT(obj);
     MOZ_ASSERT(obj == *static_cast<JSObject **>(cellp));
-#ifdef JSGC_GENERATIONAL
     js::gc::StoreBuffer *storeBuffer = obj->storeBuffer();
     if (storeBuffer)
         storeBuffer->putRelocatableCellFromAnyThread(static_cast<js::gc::Cell **>(cellp));
-#endif
 }
 
 /* static */ MOZ_ALWAYS_INLINE void
 JSObject::writeBarrierPostRemove(JSObject *obj, void *cellp)
 {
     MOZ_ASSERT(cellp);
     MOZ_ASSERT(obj);
     MOZ_ASSERT(obj == *static_cast<JSObject **>(cellp));
-#ifdef JSGC_GENERATIONAL
     obj->shadowRuntimeFromAnyThread()->gcStoreBufferPtr()->removeRelocatableCellFromAnyThread(
         static_cast<js::gc::Cell **>(cellp));
-#endif
 }
 
 namespace js {
 
 inline bool
 IsCallable(const Value &v)
 {
     return v.isObject() && v.toObject().isCallable();
--- a/js/src/jspropertytree.cpp
+++ b/js/src/jspropertytree.cpp
@@ -342,17 +342,16 @@ Shape::fixupAfterMovingGC()
     if (inDictionary())
         fixupDictionaryShapeAfterMovingGC();
     else
         fixupShapeTreeAfterMovingGC();
 }
 
 #endif // JSGC_COMPACTING
 
-#ifdef JSGC_GENERATIONAL
 void
 ShapeGetterSetterRef::mark(JSTracer *trc)
 {
     // Update the current shape's entry in the parent KidsHash table if needed.
     // This is necessary as the computed hash includes the getter/setter
     // pointers.
 
     JSObject *obj = *objp;
@@ -371,17 +370,16 @@ ShapeGetterSetterRef::mark(JSTracer *trc
         return;
     }
 
     KidsHash *kh = parent->kids.toHash();
     kh->remove(StackShape(shape));
     *objp = obj;
     MOZ_ALWAYS_TRUE(kh->putNew(StackShape(shape), shape));
 }
-#endif
 
 #ifdef DEBUG
 
 void
 KidsPointer::checkConsistency(Shape *aKid) const
 {
     if (isShape()) {
         MOZ_ASSERT(toShape() == aKid);
--- a/js/src/jspubtd.h
+++ b/js/src/jspubtd.h
@@ -16,17 +16,17 @@
 #include "mozilla/NullPtr.h"
 #include "mozilla/PodOperations.h"
 
 #include "jsprototypes.h"
 #include "jstypes.h"
 
 #include "js/TypeDecls.h"
 
-#if (defined(JSGC_GENERATIONAL) && defined(JS_GC_ZEAL)) || \
+#if (defined(JS_GC_ZEAL)) || \
     (defined(JSGC_COMPACTING) && defined(DEBUG))
 # define JSGC_HASH_TABLE_CHECKS
 #endif
 
 namespace JS {
 
 class AutoIdVector;
 class CallArgs;
@@ -155,40 +155,30 @@ typedef void (*OffThreadCompileCallback)
 
 namespace shadow {
 
 struct Runtime
 {
     /* Restrict zone access during Minor GC. */
     bool needsIncrementalBarrier_;
 
-#ifdef JSGC_GENERATIONAL
   private:
     js::gc::StoreBuffer *gcStoreBufferPtr_;
-#endif
 
   public:
-    explicit Runtime(
-#ifdef JSGC_GENERATIONAL
-        js::gc::StoreBuffer *storeBuffer
-#endif
-    )
+    explicit Runtime(js::gc::StoreBuffer *storeBuffer)
       : needsIncrementalBarrier_(false)
-#ifdef JSGC_GENERATIONAL
       , gcStoreBufferPtr_(storeBuffer)
-#endif
     {}
 
     bool needsIncrementalBarrier() const {
         return needsIncrementalBarrier_;
     }
 
-#ifdef JSGC_GENERATIONAL
     js::gc::StoreBuffer *gcStoreBufferPtr() { return gcStoreBufferPtr_; }
-#endif
 
     static JS::shadow::Runtime *asShadowRuntime(JSRuntime *rt) {
         return reinterpret_cast<JS::shadow::Runtime*>(rt);
     }
 
     /* Allow inlining of PersistentRooted constructors and destructors. */
   private:
     template <typename Referent> friend class JS::PersistentRooted;
--- a/js/src/jsweakmap.cpp
+++ b/js/src/jsweakmap.cpp
@@ -348,22 +348,20 @@ TryPreserveReflector(JSContext *cx, Hand
         }
     }
     return true;
 }
 
 static inline void
 WeakMapPostWriteBarrier(JSRuntime *rt, ObjectValueMap *weakMap, JSObject *key)
 {
-#ifdef JSGC_GENERATIONAL
     // Strip the barriers from the type before inserting into the store buffer.
     // This will automatically ensure that barriers do not fire during GC.
     if (key && IsInsideNursery(key))
         rt->gc.storeBuffer.putGeneric(UnbarrieredRef(weakMap, key));
-#endif
 }
 
 static MOZ_ALWAYS_INLINE bool
 SetWeakMapEntryInternal(JSContext *cx, Handle<WeakMapObject*> mapObj,
                         HandleObject key, HandleValue value)
 {
     ObjectValueMap *map = mapObj->getMap();
     if (!map) {
--- a/js/src/moz.build
+++ b/js/src/moz.build
@@ -470,17 +470,17 @@ if CONFIG['MOZ_ETW']:
     ]
     # This will get the ETW provider resources into the library mozjs.dll
     RESFILE = 'ETWProvider.res'
 
 if CONFIG['NIGHTLY_BUILD']:
     DEFINES['ENABLE_PARALLEL_JS'] = True
     DEFINES['ENABLE_BINARYDATA'] = True
     DEFINES['ENABLE_SHARED_ARRAY_BUFFER'] = True
-    if CONFIG['ENABLE_ION'] and CONFIG['JSGC_GENERATIONAL_CONFIGURED']:
+    if CONFIG['ENABLE_ION']:
         DEFINES['JSGC_FJGENERATIONAL'] = True
 
 DEFINES['EXPORT_JS_API'] = True
 
 if CONFIG['JS_HAS_CTYPES']:
     DEFINES['JS_HAS_CTYPES'] = True
     for var in ('DLL_PREFIX', 'DLL_SUFFIX'):
         DEFINES[var] = '"%s"' % CONFIG[var]
--- a/js/src/shell/js.cpp
+++ b/js/src/shell/js.cpp
@@ -5848,19 +5848,17 @@ main(int argc, char **argv, char **envp)
                              "to test JIT codegen (no-op on platforms other than x86 and x64).")
         || !op.addBoolOption('\0', "fuzzing-safe", "Don't expose functions that aren't safe for "
                              "fuzzers to call")
         || !op.addBoolOption('\0', "no-threads", "Disable helper threads and PJS threads")
 #ifdef DEBUG
         || !op.addBoolOption('\0', "dump-entrained-variables", "Print variables which are "
                              "unnecessarily entrained by inner functions")
 #endif
-#ifdef JSGC_GENERATIONAL
         || !op.addBoolOption('\0', "no-ggc", "Disable Generational GC")
-#endif
         || !op.addBoolOption('\0', "no-incremental-gc", "Disable Incremental GC")
         || !op.addIntOption('\0', "available-memory", "SIZE",
                             "Select GC settings based on available memory (MB)", 0)
 #if defined(JS_CODEGEN_ARM)
         || !op.addStringOption('\0', "arm-hwcap", "[features]",
                                "Specify ARM code generation features, or 'help' to list all features.")
         || !op.addIntOption('\0', "arm-asm-nop-fill", "SIZE",
                             "Insert the given number of NOP instructions at all possible pool locations.", 0)
@@ -5873,19 +5871,17 @@ main(int argc, char **argv, char **envp)
         || !op.addIntOption('\0', "arm-sim-stop-at", "NUMBER", "Stop the ARM simulator after the given "
                             "NUMBER of instructions.", -1)
 #elif defined(JS_MIPS_SIMULATOR)
 	|| !op.addBoolOption('\0', "mips-sim-icache-checks", "Enable icache flush checks in the MIPS "
                              "simulator.")
         || !op.addIntOption('\0', "mips-sim-stop-at", "NUMBER", "Stop the MIPS simulator after the given "
                             "NUMBER of instructions.", -1)
 #endif
-#ifdef JSGC_GENERATIONAL
         || !op.addIntOption('\0', "nursery-size", "SIZE-MB", "Set the maximum nursery size in MB", 16)
-#endif
 #ifdef JS_GC_ZEAL
         || !op.addStringOption('z', "gc-zeal", "LEVEL[,N]",
                                "Specifies zealous garbage collection, overriding the environement "
                                "variable JS_GC_ZEAL.")
 #endif
     )
     {
         return EXIT_FAILURE;
@@ -5946,38 +5942,34 @@ main(int argc, char **argv, char **envp)
     if (threadCount >= 0)
         SetFakeCPUCount(threadCount);
 
     // Start the engine.
     if (!JS_Init())
         return 1;
 
     size_t nurseryBytes = JS::DefaultNurseryBytes;
-#ifdef JSGC_GENERATIONAL
     nurseryBytes = op.getIntOption("nursery-size") * 1024L * 1024L;
-#endif
 
     /* Use the same parameters as the browser in xpcjsruntime.cpp. */
     rt = JS_NewRuntime(JS::DefaultHeapMaxBytes, nurseryBytes);
     if (!rt)
         return 1;
 
     JS_SetErrorReporter(rt, my_ErrorReporter);
     JS::SetOutOfMemoryCallback(rt, my_OOMCallback, nullptr);
     if (!SetRuntimeOptions(rt, op))
         return 1;
 
     gInterruptFunc.emplace(rt, NullValue());
 
     JS_SetGCParameter(rt, JSGC_MAX_BYTES, 0xffffffff);
-#ifdef JSGC_GENERATIONAL
     Maybe<JS::AutoDisableGenerationalGC> noggc;
     if (op.getBoolOption("no-ggc"))
         noggc.emplace(rt);
-#endif
 
     size_t availMem = op.getIntOption("available-memory");
     if (availMem > 0)
         JS_SetGCParametersBasedOnAvailableMemory(rt, availMem);
 
     JS_SetTrustedPrincipals(rt, &ShellPrincipals::fullyTrusted);
     JS_SetSecurityCallbacks(rt, &ShellPrincipals::securityCallbacks);
     JS_InitDestroyPrincipalsCallback(rt, ShellPrincipals::destroy);
@@ -6031,16 +6023,14 @@ main(int argc, char **argv, char **envp)
     KillWatchdog();
 
     gInterruptFunc.reset();
 
     MOZ_ASSERT_IF(!CanUseExtraThreads(), workerThreads.empty());
     for (size_t i = 0; i < workerThreads.length(); i++)
         PR_JoinThread(workerThreads[i]);
 
-#ifdef JSGC_GENERATIONAL
     noggc.reset();
-#endif
 
     JS_DestroyRuntime(rt);
     JS_ShutDown();
     return result;
 }
--- a/js/src/vm/Debugger.cpp
+++ b/js/src/vm/Debugger.cpp
@@ -1883,20 +1883,18 @@ AppendAndInvalidateScriptIfObservable(JS
 
 static bool
 UpdateExecutionObservabilityOfScriptsInZone(JSContext *cx, Zone *zone,
                                             const Debugger::ExecutionObservableSet &obs,
                                             Debugger::IsObserving observing)
 {
     using namespace js::jit;
 
-#ifdef JSGC_GENERATIONAL
     // See note in js::ReleaseAllJITCode.
     cx->runtime()->gc.evictNursery();
-#endif
 
     AutoSuppressProfilerSampling suppressProfilerSampling(cx);
 
     JSRuntime *rt = cx->runtime();
     FreeOp *fop = cx->runtime()->defaultFreeOp();
 
     // Mark active baseline scripts in the observable set so that they don't
     // get discarded. They will be recompiled.
--- a/js/src/vm/NativeObject-inl.h
+++ b/js/src/vm/NativeObject-inl.h
@@ -291,17 +291,17 @@ NativeObject::getDenseOrTypedArrayElemen
 inline void
 NativeObject::initDenseElementsUnbarriered(uint32_t dstStart, const Value *src, uint32_t count) {
     /*
      * For use by parallel threads, which since they cannot see nursery
      * things do not require a barrier.
      */
     MOZ_ASSERT(dstStart + count <= getDenseCapacity());
     MOZ_ASSERT(!denseElementsAreCopyOnWrite());
-#if defined(DEBUG) && defined(JSGC_GENERATIONAL)
+#ifdef DEBUG
     /*
      * This asserts a global invariant: parallel code does not
      * observe objects inside the generational GC's nursery.
      */
     MOZ_ASSERT(!gc::IsInsideGGCNursery(this));
     for (uint32_t index = 0; index < count; ++index) {
         const Value& value = src[index];
         if (value.isMarkable())
--- a/js/src/vm/NativeObject.cpp
+++ b/js/src/vm/NativeObject.cpp
@@ -407,41 +407,37 @@ NativeObject::setSlotSpan(ThreadSafeCont
     return true;
 }
 
 // This will not run the garbage collector.  If a nursery cannot accomodate the slot array
 // an attempt will be made to place the array in the tenured area.
 static HeapSlot *
 AllocateSlots(ThreadSafeContext *cx, JSObject *obj, uint32_t nslots)
 {
-#ifdef JSGC_GENERATIONAL
     if (cx->isJSContext())
         return cx->asJSContext()->runtime()->gc.nursery.allocateSlots(obj, nslots);
-#endif
 #ifdef JSGC_FJGENERATIONAL
     if (cx->isForkJoinContext())
         return cx->asForkJoinContext()->nursery().allocateSlots(obj, nslots);
 #endif
     return obj->zone()->pod_malloc<HeapSlot>(nslots);
 }
 
 // This will not run the garbage collector.  If a nursery cannot accomodate the slot array
 // an attempt will be made to place the array in the tenured area.
 //
 // If this returns null then the old slots will be left alone.
 static HeapSlot *
 ReallocateSlots(ThreadSafeContext *cx, JSObject *obj, HeapSlot *oldSlots,
                 uint32_t oldCount, uint32_t newCount)
 {
-#ifdef JSGC_GENERATIONAL
     if (cx->isJSContext()) {
         return cx->asJSContext()->runtime()->gc.nursery.reallocateSlots(obj, oldSlots,
                                                                         oldCount, newCount);
     }
-#endif
 #ifdef JSGC_FJGENERATIONAL
     if (cx->isForkJoinContext()) {
         return cx->asForkJoinContext()->nursery().reallocateSlots(obj, oldSlots,
                                                                   oldCount, newCount);
     }
 #endif
     return obj->zone()->pod_realloc<HeapSlot>(oldSlots, oldCount, newCount);
 }
@@ -478,21 +474,19 @@ NativeObject::growSlots(ThreadSafeContex
     Debug_SetSlotRangeToCrashOnTouch(obj->slots_ + oldCount, newCount - oldCount);
 
     return true;
 }
 
 static void
 FreeSlots(ThreadSafeContext *cx, HeapSlot *slots)
 {
-#ifdef JSGC_GENERATIONAL
     // Note: threads without a JSContext do not have access to GGC nursery allocated things.
     if (cx->isJSContext())
         return cx->asJSContext()->runtime()->gc.nursery.freeSlots(slots);
-#endif
 #ifdef JSGC_FJGENERATIONAL
     if (cx->isForkJoinContext())
         return cx->asForkJoinContext()->nursery().freeSlots(slots);
 #endif
     js_free(slots);
 }
 
 /* static */ void
@@ -717,40 +711,36 @@ NativeObject::maybeDensifySparseElements
     return ED_OK;
 }
 
 // This will not run the garbage collector.  If a nursery cannot accomodate the element array
 // an attempt will be made to place the array in the tenured area.
 static ObjectElements *
 AllocateElements(ThreadSafeContext *cx, JSObject *obj, uint32_t nelems)
 {
-#ifdef JSGC_GENERATIONAL
     if (cx->isJSContext())
         return cx->asJSContext()->runtime()->gc.nursery.allocateElements(obj, nelems);
-#endif
 #ifdef JSGC_FJGENERATIONAL
     if (cx->isForkJoinContext())
         return cx->asForkJoinContext()->nursery().allocateElements(obj, nelems);
 #endif
 
     return reinterpret_cast<js::ObjectElements *>(obj->zone()->pod_malloc<HeapSlot>(nelems));
 }
 
 // This will not run the garbage collector.  If a nursery cannot accomodate the element array
 // an attempt will be made to place the array in the tenured area.
 static ObjectElements *
 ReallocateElements(ThreadSafeContext *cx, JSObject *obj, ObjectElements *oldHeader,
                    uint32_t oldCount, uint32_t newCount)
 {
-#ifdef JSGC_GENERATIONAL
     if (cx->isJSContext()) {
         return cx->asJSContext()->runtime()->gc.nursery.reallocateElements(obj, oldHeader,
                                                                            oldCount, newCount);
     }
-#endif
 #ifdef JSGC_FJGENERATIONAL
     if (cx->isForkJoinContext()) {
         return cx->asForkJoinContext()->nursery().reallocateElements(obj, oldHeader,
                                                                      oldCount, newCount);
     }
 #endif
 
     return reinterpret_cast<js::ObjectElements *>(
--- a/js/src/vm/NativeObject.h
+++ b/js/src/vm/NativeObject.h
@@ -306,22 +306,20 @@ IsObjectValueInCompartment(Value v, JSCo
  * NOTE: This is a placeholder for bug 619558.
  *
  * Run a post write barrier that encompasses multiple contiguous slots in a
  * single step.
  */
 inline void
 DenseRangeWriteBarrierPost(JSRuntime *rt, NativeObject *obj, uint32_t start, uint32_t count)
 {
-#ifdef JSGC_GENERATIONAL
     if (count > 0) {
         JS::shadow::Runtime *shadowRuntime = JS::shadow::Runtime::asShadowRuntime(rt);
         shadowRuntime->gcStoreBufferPtr()->putSlotFromAnyThread(obj, HeapSlot::Element, start, count);
     }
-#endif
 }
 
 /*
  * NativeObject specifies the internal implementation of a native object.
  *
  * Native objects extend the base implementation of an object with storage
  * for the object's named properties and indexed elements.
  *
@@ -1159,24 +1157,22 @@ class NativeObject : public JSObject
      * following this object, for use with objects which allocate a larger size
      * class than they need and store non-elements data inline.
      */
     inline uint8_t *fixedData(size_t nslots) const;
 
     inline void privateWriteBarrierPre(void **oldval);
 
     void privateWriteBarrierPost(void **pprivate) {
-#ifdef JSGC_GENERATIONAL
         gc::Cell **cellp = reinterpret_cast<gc::Cell **>(pprivate);
         MOZ_ASSERT(cellp);
         MOZ_ASSERT(*cellp);
         gc::StoreBuffer *storeBuffer = (*cellp)->storeBuffer();
         if (storeBuffer)
             storeBuffer->putCellFromAnyThread(cellp);
-#endif
     }
 
     /* Private data accessors. */
 
     inline void *&privateRef(uint32_t nfixed) const { /* XXX should be private, not protected! */
         /*
          * The private pointer of an object can hold any word sized value.
          * Private pointers are stored immediately after the last fixed slot of
--- a/js/src/vm/Runtime.cpp
+++ b/js/src/vm/Runtime.cpp
@@ -125,21 +125,17 @@ static const JSWrapObjectCallbacks Defau
 
 static size_t
 ReturnZeroSize(const void *p)
 {
     return 0;
 }
 
 JSRuntime::JSRuntime(JSRuntime *parentRuntime)
-  : JS::shadow::Runtime(
-#ifdef JSGC_GENERATIONAL
-        &gc.storeBuffer
-#endif
-    ),
+  : JS::shadow::Runtime(&gc.storeBuffer),
     mainThread(this),
     parentRuntime(parentRuntime),
     interrupt_(false),
     interruptPar_(false),
     telemetryCallback(nullptr),
     handlingSignal(false),
     interruptCallback(nullptr),
     exclusiveAccessLock(nullptr),
@@ -433,20 +429,18 @@ JSRuntime::~JSRuntime()
 
     js_free(defaultLocale);
     js_delete(mathCache_);
     js_delete(jitRuntime_);
     js_delete(execAlloc_);  /* Delete after jitRuntime_. */
 
     js_delete(ionPcScriptCache);
 
-#ifdef JSGC_GENERATIONAL
     gc.storeBuffer.disable();
     gc.nursery.disable();
-#endif
 
 #if defined(JS_ARM_SIMULATOR) || defined(JS_MIPS_SIMULATOR)
     js::jit::DestroySimulatorRuntime(simulatorRuntime_);
 #endif
 
     DebugOnly<size_t> oldCount = liveRuntimesCount--;
     MOZ_ASSERT(oldCount > 0);
 
@@ -469,28 +463,26 @@ void
 JSRuntime::setTelemetryCallback(JSRuntime *rt, JSAccumulateTelemetryDataCallback callback)
 {
     rt->telemetryCallback = callback;
 }
 
 void
 NewObjectCache::clearNurseryObjects(JSRuntime *rt)
 {
-#ifdef JSGC_GENERATIONAL
     for (unsigned i = 0; i < mozilla::ArrayLength(entries); ++i) {
         Entry &e = entries[i];
         NativeObject *obj = reinterpret_cast<NativeObject *>(&e.templateObject);
         if (IsInsideNursery(e.key) ||
             rt->gc.nursery.isInside(obj->slots_) ||
             rt->gc.nursery.isInside(obj->elements_))
         {
             PodZero(&e);
         }
     }
-#endif
 }
 
 void
 JSRuntime::addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf, JS::RuntimeSizes *rtSizes)
 {
     // Several tables in the runtime enumerated below can be used off thread.
     AutoLockForExclusiveAccess lock(this);
 
@@ -525,22 +517,20 @@ JSRuntime::addSizeOfIncludingThis(mozill
 
     if (execAlloc_)
         execAlloc_->addSizeOfCode(&rtSizes->code);
 
     if (jitRuntime() && jitRuntime()->ionAlloc(this))
         jitRuntime()->ionAlloc(this)->addSizeOfCode(&rtSizes->code);
 
     rtSizes->gc.marker += gc.marker.sizeOfExcludingThis(mallocSizeOf);
-#ifdef JSGC_GENERATIONAL
     rtSizes->gc.nurseryCommitted += gc.nursery.sizeOfHeapCommitted();
     rtSizes->gc.nurseryDecommitted += gc.nursery.sizeOfHeapDecommitted();
     rtSizes->gc.nurseryHugeSlots += gc.nursery.sizeOfHugeSlots(mallocSizeOf);
     gc.storeBuffer.addSizeOfExcludingThis(mallocSizeOf, &rtSizes->gc);
-#endif
 }
 
 static bool
 InvokeInterruptCallback(JSContext *cx)
 {
     MOZ_ASSERT(cx->runtime()->requestDepth >= 1);
 
     cx->gcIfNeeded();
--- a/js/src/vm/Runtime.h
+++ b/js/src/vm/Runtime.h
@@ -342,20 +342,18 @@ class NewObjectCache
         entry->kind = kind;
 
         entry->nbytes = gc::Arena::thingSize(kind);
         js_memcpy(&entry->templateObject, obj, entry->nbytes);
     }
 
     static void copyCachedToObject(JSObject *dst, JSObject *src, gc::AllocKind kind) {
         js_memcpy(dst, src, gc::Arena::thingSize(kind));
-#ifdef JSGC_GENERATIONAL
         Shape::writeBarrierPost(dst->shape_, &dst->shape_);
         types::TypeObject::writeBarrierPost(dst->type_, &dst->type_);
-#endif
     }
 };
 
 /*
  * A FreeOp can do one thing: free memory. For convenience, it has delete_
  * convenience methods that also call destructors.
  *
  * FreeOp is passed to finalizers and other sweep-phase hooks so that we do not
--- a/js/src/vm/ScopeObject.cpp
+++ b/js/src/vm/ScopeObject.cpp
@@ -1874,23 +1874,20 @@ js_IsDebugScopeSlow(ProxyObject *proxy)
 }
 
 /*****************************************************************************/
 
 /* static */ MOZ_ALWAYS_INLINE void
 DebugScopes::proxiedScopesPostWriteBarrier(JSRuntime *rt, ObjectWeakMap *map,
                                            const PreBarrieredObject &key)
 {
-#ifdef JSGC_GENERATIONAL
     if (key && IsInsideNursery(key))
         rt->gc.storeBuffer.putGeneric(UnbarrieredRef(map, key.get()));
-#endif
 }
 
-#ifdef JSGC_GENERATIONAL
 class DebugScopes::MissingScopesRef : public gc::BufferableRef
 {
     MissingScopeMap *map;
     ScopeIterKey key;
 
   public:
     MissingScopesRef(MissingScopeMap *m, const ScopeIterKey &k) : map(m), key(k) {}
 
@@ -1899,42 +1896,37 @@ class DebugScopes::MissingScopesRef : pu
         MissingScopeMap::Ptr p = map->lookup(key);
         if (!p)
             return;
         trc->setTracingLocation(&const_cast<ScopeIterKey &>(p->key()).enclosingScope());
         Mark(trc, &key.enclosingScope(), "MissingScopesRef");
         map->rekeyIfMoved(prior, key);
     }
 };
-#endif
 
 /* static */ MOZ_ALWAYS_INLINE void
 DebugScopes::missingScopesPostWriteBarrier(JSRuntime *rt, MissingScopeMap *map,
                                            const ScopeIterKey &key)
 {
-#ifdef JSGC_GENERATIONAL
     if (key.enclosingScope() && IsInsideNursery(key.enclosingScope()))
         rt->gc.storeBuffer.putGeneric(MissingScopesRef(map, key));
-#endif
 }
 
 /* static */ MOZ_ALWAYS_INLINE void
 DebugScopes::liveScopesPostWriteBarrier(JSRuntime *rt, LiveScopeMap *map, ScopeObject *key)
 {
-#ifdef JSGC_GENERATIONAL
     // As above.  Otherwise, barriers could fire during GC when moving the
     // value.
     typedef HashMap<ScopeObject *,
                     ScopeIterKey,
                     DefaultHasher<ScopeObject *>,
                     RuntimeAllocPolicy> UnbarrieredLiveScopeMap;
     typedef gc::HashKeyRef<UnbarrieredLiveScopeMap, ScopeObject *> Ref;
     if (key && IsInsideNursery(key))
         rt->gc.storeBuffer.putGeneric(Ref(reinterpret_cast<UnbarrieredLiveScopeMap *>(map), key));
-#endif
 }
 
 DebugScopes::DebugScopes(JSContext *cx)
  : proxiedScopes(cx),
    missingScopes(cx->runtime()),
    liveScopes(cx->runtime())
 {}
 
--- a/js/src/vm/ScopeObject.h
+++ b/js/src/vm/ScopeObject.h
@@ -924,17 +924,17 @@ class DebugScopes
   private:
     bool init();
 
     static DebugScopes *ensureCompartmentData(JSContext *cx);
 
   public:
     void mark(JSTracer *trc);
     void sweep(JSRuntime *rt);
-#if defined(JSGC_GENERATIONAL) && defined(JS_GC_ZEAL)
+#ifdef JS_GC_ZEAL
     void checkHashTablesAfterMovingGC(JSRuntime *rt);
 #endif
 
     static DebugScopeObject *hasDebugScope(JSContext *cx, ScopeObject &scope);
     static bool addDebugScope(JSContext *cx, ScopeObject &scope, DebugScopeObject &debugScope);
 
     static DebugScopeObject *hasDebugScope(JSContext *cx, const ScopeIter &si);
     static bool addDebugScope(JSContext *cx, const ScopeIter &si, DebugScopeObject &debugScope);
--- a/js/src/vm/Shape.cpp
+++ b/js/src/vm/Shape.cpp
@@ -1585,18 +1585,16 @@ InitialShapeEntry::match(const InitialSh
     return lookup.clasp == shape->getObjectClass()
         && lookup.matchProto.toWord() == key.proto.toWord()
         && lookup.matchParent == shape->getObjectParent()
         && lookup.matchMetadata == shape->getObjectMetadata()
         && lookup.nfixed == shape->numFixedSlots()
         && lookup.baseFlags == shape->getObjectFlags();
 }
 
-#ifdef JSGC_GENERATIONAL
-
 /*
  * This class is used to add a post barrier on the initialShapes set, as the key
  * is calculated based on several objects which may be moved by generational GC.
  */
 class InitialShapeSetRef : public BufferableRef
 {
     InitialShapeSet *set;
     const Class *clasp;
@@ -1651,18 +1649,16 @@ class InitialShapeSetRef : public Buffer
 
         /* Rekey the entry. */
         set->rekeyAs(lookup,
                      InitialShapeEntry::Lookup(clasp, proto, parent, metadata, nfixed, objectFlags),
                      *p);
     }
 };
 
-#endif // JSGC_GENERATIONAL
-
 #ifdef JSGC_HASH_TABLE_CHECKS
 
 void
 JSCompartment::checkInitialShapesTableAfterMovingGC()
 {
     if (!initialShapes.initialized())
         return;
 
@@ -1740,28 +1736,27 @@ EmptyShape::getInitialShape(ExclusiveCon
     Shape *shape = EmptyShape::new_(cx, nbase, nfixed);
     if (!shape)
         return nullptr;
 
     Lookup lookup(clasp, protoRoot, parentRoot, metadataRoot, nfixed, objectFlags);
     if (!p.add(cx, table, lookup, InitialShapeEntry(ReadBarrieredShape(shape), protoRoot)))
         return nullptr;
 
-#ifdef JSGC_GENERATIONAL
+    // Post-barrier for the initial shape table update.
     if (cx->isJSContext()) {
         if ((protoRoot.isObject() && IsInsideNursery(protoRoot.toObject())) ||
             IsInsideNursery(parentRoot.get()) ||
             IsInsideNursery(metadataRoot.get()))
         {
             InitialShapeSetRef ref(
                 &table, clasp, protoRoot, parentRoot, metadataRoot, nfixed, objectFlags);
             cx->asJSContext()->runtime()->gc.storeBuffer.putGeneric(ref);
         }
     }
-#endif
 
     return shape;
 }
 
 /* static */ Shape *
 EmptyShape::getInitialShape(ExclusiveContext *cx, const Class *clasp, TaggedProto proto,
                             JSObject *parent, JSObject *metadata,
                             AllocKind kind, uint32_t objectFlags)
--- a/js/src/vm/Shape.h
+++ b/js/src/vm/Shape.h
@@ -258,53 +258,47 @@ class AccessorShape;
 class Shape;
 class UnownedBaseShape;
 struct StackBaseShape;
 
 namespace gc {
 void MergeCompartments(JSCompartment *source, JSCompartment *target);
 }
 
-#ifdef JSGC_GENERATIONAL
 // This class is used to add a post barrier on the AccessorShape's getter/setter
 // objects. It updates the shape's entry in the parent's KidsHash table.
 class ShapeGetterSetterRef : public gc::BufferableRef
 {
     AccessorShape *shape;
     JSObject **objp;
 
   public:
     ShapeGetterSetterRef(AccessorShape *shape, JSObject **objp)
       : shape(shape), objp(objp)
     {}
 
     void mark(JSTracer *trc);
 };
-#endif
 
 static inline void
 GetterSetterWriteBarrierPost(AccessorShape *shape, JSObject **objp)
 {
-#ifdef JSGC_GENERATIONAL
     MOZ_ASSERT(shape);
     MOZ_ASSERT(objp);
     MOZ_ASSERT(*objp);
     gc::Cell **cellp = reinterpret_cast<gc::Cell **>(objp);
     if (gc::StoreBuffer *sb = (*cellp)->storeBuffer())
         sb->putGeneric(ShapeGetterSetterRef(shape, objp));
-#endif
 }
 
 static inline void
 GetterSetterWriteBarrierPostRemove(JSRuntime *rt, JSObject **objp)
 {
-#ifdef JSGC_GENERATIONAL
     JS::shadow::Runtime *shadowRuntime = JS::shadow::Runtime::asShadowRuntime(rt);
     shadowRuntime->gcStoreBufferPtr()->removeRelocatableCellFromAnyThread(reinterpret_cast<gc::Cell **>(objp));
-#endif
 }
 
 class BaseShape : public gc::TenuredCell
 {
   public:
     friend class Shape;
     friend struct StackBaseShape;
     friend struct StackShape;
@@ -1171,33 +1165,31 @@ struct InitialShapeEntry
                uint32_t nfixed, uint32_t baseFlags)
           : clasp(clasp),
             hashProto(proto), matchProto(proto),
             hashParent(parent), matchParent(parent),
             hashMetadata(metadata), matchMetadata(metadata),
             nfixed(nfixed), baseFlags(baseFlags)
         {}
 
-#ifdef JSGC_GENERATIONAL
         /*
          * For use by generational GC post barriers. Look up an entry whose
          * parent and metadata fields may have been moved, but was hashed with
          * the original values.
          */
         Lookup(const Class *clasp, TaggedProto proto,
                JSObject *hashParent, JSObject *matchParent,
                JSObject *hashMetadata, JSObject *matchMetadata,
                uint32_t nfixed, uint32_t baseFlags)
           : clasp(clasp),
             hashProto(proto), matchProto(proto),
             hashParent(hashParent), matchParent(matchParent),
             hashMetadata(hashMetadata), matchMetadata(matchMetadata),
             nfixed(nfixed), baseFlags(baseFlags)
         {}
-#endif
     };
 
     inline InitialShapeEntry();
     inline InitialShapeEntry(const ReadBarrieredShape &shape, TaggedProto proto);
 
     inline Lookup getLookup() const;
 
     static inline HashNumber hash(const Lookup &lookup);
--- a/mobile/android/app/mobile.js
+++ b/mobile/android/app/mobile.js
@@ -376,21 +376,17 @@ pref("javascript.options.gc_on_memory_pr
 pref("javascript.options.mem.gc_high_frequency_heap_growth_max", 120);
 pref("javascript.options.mem.gc_high_frequency_heap_growth_min", 120);
 pref("javascript.options.mem.gc_high_frequency_high_limit_mb", 40);
 pref("javascript.options.mem.gc_high_frequency_low_limit_mb", 10);
 pref("javascript.options.mem.gc_low_frequency_heap_growth", 120);
 pref("javascript.options.mem.high_water_mark", 16);
 pref("javascript.options.mem.gc_allocation_threshold_mb", 3);
 pref("javascript.options.mem.gc_decommit_threshold_mb", 1);
-#ifdef JSGC_GENERATIONAL
 pref("javascript.options.mem.gc_min_empty_chunk_count", 1);
-#else
-pref("javascript.options.mem.gc_min_empty_chunk_count", 0);
-#endif
 pref("javascript.options.mem.gc_max_empty_chunk_count", 2);
 #else
 pref("javascript.options.mem.high_water_mark", 32);
 #endif
 
 pref("dom.max_chrome_script_run_time", 0); // disable slow script dialog for chrome
 pref("dom.max_script_run_time", 20);
 
--- a/mobile/android/confvars.sh
+++ b/mobile/android/confvars.sh
@@ -85,13 +85,10 @@ MOZ_ANDROID_SHARE_OVERLAY=1
 # Enable the Mozilla Location Service stumbler.
 MOZ_ANDROID_MLS_STUMBLER=1
 
 # Enable adding to the system downloads list in pre-release builds.
 if test ! "$RELEASE_BUILD"; then
   MOZ_ANDROID_DOWNLOADS_INTEGRATION=1
 fi
 
-# Enable generational GC on mobile.
-export JSGC_GENERATIONAL=1
-
 # Use the low-memory GC tuning.
 export JS_GC_SMALL_CHUNK_SIZE=1
--- a/modules/libpref/init/all.js
+++ b/modules/libpref/init/all.js
@@ -1036,21 +1036,17 @@ pref("javascript.options.mem.gc_high_fre
 pref("javascript.options.mem.gc_high_frequency_high_limit_mb", 500);
 pref("javascript.options.mem.gc_high_frequency_heap_growth_max", 300);
 pref("javascript.options.mem.gc_high_frequency_heap_growth_min", 150);
 pref("javascript.options.mem.gc_low_frequency_heap_growth", 150);
 pref("javascript.options.mem.gc_dynamic_heap_growth", true);
 pref("javascript.options.mem.gc_dynamic_mark_slice", true);
 pref("javascript.options.mem.gc_allocation_threshold_mb", 30);
 pref("javascript.options.mem.gc_decommit_threshold_mb", 32);
-#ifdef JSGC_GENERATIONAL
 pref("javascript.options.mem.gc_min_empty_chunk_count", 1);
-#else
-pref("javascript.options.mem.gc_min_empty_chunk_count", 0);
-#endif
 pref("javascript.options.mem.gc_max_empty_chunk_count", 30);
 
 pref("javascript.options.showInConsole", false);
 
 // advanced prefs
 pref("advanced.mailftp",                    false);
 pref("image.animation_mode",                "normal");
 
--- a/xpcom/glue/tests/gtest/TestGCPostBarriers.cpp
+++ b/xpcom/glue/tests/gtest/TestGCPostBarriers.cpp
@@ -50,21 +50,17 @@ RunTest(JSRuntime* rt, JSContext* cx, Ar
   /*
    * Create the array and fill it with new JS objects. With GGC these will be
    * allocated in the nursery.
    */
   RootedValue value(cx);
   const char* property = "foo";
   for (size_t i = 0; i < ElementCount; ++i) {
     RootedObject obj(cx, JS_NewObject(cx, nullptr, JS::NullPtr(), JS::NullPtr()));
-#ifdef JSGC_GENERATIONAL
     ASSERT_TRUE(js::gc::IsInsideNursery(AsCell(obj)));
-#else
-    ASSERT_FALSE(js::gc::IsInsideNursery(AsCell(obj)));
-#endif
     value = Int32Value(i);
     ASSERT_TRUE(JS_SetProperty(cx, obj, property, value));
     array->AppendElement(obj);
   }
 
   /*
    * If postbarriers are not working, we will crash here when we try to mark
    * objects that have been moved to the tenured heap.