Bug 650161 - Enable compacting GC on GC_SHRINK collections r=terrence r=glandium
authorJon Coppeard <jcoppeard@mozilla.com>
Fri, 16 Jan 2015 14:34:32 +0000
changeset 224203 ae2eec4a74ea1f8d323f6e09b56f7c07a2c94112
parent 224202 5b0f5677578a96c8a836433773e0ae6e9ca5e6ac
child 224204 56824531bfec29b5f4e9dcb4cd11d1eee8b27cba
push id54151
push userjcoppeard@mozilla.com
push dateFri, 16 Jan 2015 14:36:07 +0000
treeherdermozilla-inbound@ae2eec4a74ea [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersterrence, glandium
bugs650161
milestone38.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 650161 - Enable compacting GC on GC_SHRINK collections r=terrence r=glandium
js/src/configure.in
js/src/gc/GCInternals.h
js/src/gc/GCRuntime.h
js/src/gc/Heap.h
js/src/gc/Marking.cpp
js/src/js-config.h.in
js/src/jsapi-tests/testWeakMap.cpp
js/src/jscompartment.cpp
js/src/jscompartment.h
js/src/jsgc.cpp
js/src/jsgc.h
js/src/jsinfer.cpp
js/src/jsinfer.h
js/src/jspropertytree.cpp
js/src/jspubtd.h
js/src/vm/Shape-inl.h
js/src/vm/Shape.cpp
js/src/vm/Shape.h
--- a/js/src/configure.in
+++ b/js/src/configure.in
@@ -3050,28 +3050,16 @@ fi
 dnl ========================================================
 dnl = Location of malloc wrapper lib
 dnl ========================================================
 MOZ_ARG_WITH_STRING(wrap-malloc,
 [  --with-wrap-malloc=DIR  Location of malloc wrapper library],
     WRAP_LDFLAGS="${WRAP_LDFLAGS} $withval")
 
 dnl ========================================================
-dnl = Use compacting GC
-dnl ========================================================
-dnl Compact the heap by moving GC things when doing a shrinking colletion.
-MOZ_ARG_ENABLE_BOOL(gccompacting,
-[  --enable-gccompacting   Compact the heap by moving GC things],
-    JSGC_COMPACTING=1,
-    JSGC_COMPACTING= )
-if test -n "$JSGC_COMPACTING"; then
-    AC_DEFINE(JSGC_COMPACTING)
-fi
-
-dnl ========================================================
 dnl = Use a smaller chunk size for GC chunks
 dnl ========================================================
 dnl Use large (1MB) chunks by default.  For B2G this option is used to give
 dnl smaller (currently 256K) chunks.
 MOZ_ARG_ENABLE_BOOL(small-chunk-size,
 [  --enable-small-chunk-size  Allocate memory for JS GC things in smaller chunks],
     JS_GC_SMALL_CHUNK_SIZE=1,
     JS_GC_SMALL_CHUNK_SIZE= )
--- a/js/src/gc/GCInternals.h
+++ b/js/src/gc/GCInternals.h
@@ -138,23 +138,21 @@ struct AutoStopVerifyingBarriers
 };
 #endif /* JS_GC_ZEAL */
 
 #ifdef JSGC_HASH_TABLE_CHECKS
 void
 CheckHashTablesAfterMovingGC(JSRuntime *rt);
 #endif
 
-#ifdef JSGC_COMPACTING
 struct MovingTracer : JSTracer {
     explicit MovingTracer(JSRuntime *rt) : JSTracer(rt, Visit, TraceWeakMapKeysValues) {}
 
     static void Visit(JSTracer *jstrc, void **thingp, JSGCTraceKind kind);
     static bool IsMovingTracer(JSTracer *trc) {
         return trc->callback == Visit;
     }
 };
-#endif
 
 } /* namespace gc */
 } /* namespace js */
 
 #endif /* gc_GCInternals_h */
--- a/js/src/gc/GCRuntime.h
+++ b/js/src/gc/GCRuntime.h
@@ -27,21 +27,18 @@ class AutoLockGC;
 namespace gc {
 
 typedef Vector<JS::Zone *, 4, SystemAllocPolicy> ZoneVector;
 
 struct FinalizePhase;
 class MarkingValidator;
 struct AutoPrepareForTracing;
 class AutoTraceSession;
-
-#ifdef JSGC_COMPACTING
 struct ArenasToUpdate;
 struct MovingTracer;
-#endif
 
 class ChunkPool
 {
     Chunk *head_;
     size_t count_;
 
   public:
     ChunkPool() : head_(nullptr), count_(0) {}
@@ -290,21 +287,17 @@ class GCRuntime
 
     void setParameter(JSGCParamKey key, uint32_t value);
     uint32_t getParameter(JSGCParamKey key, const AutoLockGC &lock);
 
     bool isHeapBusy() { return heapState != js::Idle; }
     bool isHeapMajorCollecting() { return heapState == js::MajorCollecting; }
     bool isHeapMinorCollecting() { return heapState == js::MinorCollecting; }
     bool isHeapCollecting() { return isHeapMajorCollecting() || isHeapMinorCollecting(); }
-#ifdef JSGC_COMPACTING
     bool isHeapCompacting() { return isHeapMajorCollecting() && state() == COMPACT; }
-#else
-    bool isHeapCompacting() { return false; }
-#endif
 
     bool triggerGC(JS::gcreason::Reason reason);
     void maybeAllocTriggerZoneGC(Zone *zone, const AutoLockGC &lock);
     bool triggerZoneGC(Zone *zone, JS::gcreason::Reason reason);
     bool maybeGC(Zone *zone);
     void maybePeriodicFullGC();
     void minorGC(JS::gcreason::Reason reason) {
         gcstats::AutoPhase ap(stats, gcstats::PHASE_MINOR_GC);
@@ -430,21 +423,19 @@ class GCRuntime
 
     bool isIncrementalGCEnabled() { return mode == JSGC_MODE_INCREMENTAL && incrementalAllowed; }
     bool isIncrementalGCInProgress() { return state() != gc::NO_INCREMENTAL; }
 
     bool isGenerationalGCEnabled() { return generationalDisabled == 0; }
     void disableGenerationalGC();
     void enableGenerationalGC();
 
-#ifdef JSGC_COMPACTING
     void disableCompactingGC();
     void enableCompactingGC();
     bool isCompactingGCEnabled();
-#endif
 
     void setGrayRootsTracer(JSTraceDataOp traceOp, void *data);
     bool addBlackRootsTracer(JSTraceDataOp traceOp, void *data);
     void removeBlackRootsTracer(JSTraceDataOp traceOp, void *data);
 
     void setMaxMallocBytes(size_t value);
     void resetMallocBytes();
     bool isTooMuchMalloc() const { return mallocBytes <= 0; }
@@ -595,30 +586,28 @@ class GCRuntime
     void decommitAllWithoutUnlocking(const AutoLockGC &lock);
     void decommitArenas(AutoLockGC &lock);
     void expireChunksAndArenas(bool shouldShrink, AutoLockGC &lock);
     void queueZonesForBackgroundSweep(js::gc::ZoneList& zones);
     void sweepBackgroundThings(js::gc::ZoneList &zones, ThreadType threadType);
     void assertBackgroundSweepingFinished();
     bool shouldCompact();
     bool compactPhase(bool lastGC);
-#ifdef JSGC_COMPACTING
     void sweepTypesAfterCompacting(Zone *zone);
     void sweepZoneAfterCompacting(Zone *zone);
     ArenaHeader *relocateArenas();
     void updateAllCellPointersParallel(ArenasToUpdate &source);
     void updateAllCellPointersSerial(MovingTracer *trc, ArenasToUpdate &source);
     void updatePointersToRelocatedCells();
     void releaseRelocatedArenas(ArenaHeader *relocatedList);
     void releaseRelocatedArenasWithoutUnlocking(ArenaHeader *relocatedList, const AutoLockGC& lock);
 #ifdef DEBUG
     void protectRelocatedArenas(ArenaHeader *relocatedList);
     void unprotectRelocatedArenas(ArenaHeader *relocatedList);
 #endif
-#endif
     void finishCollection();
 
     void computeNonIncrementalMarkingForValidation();
     void validateIncrementalMarking();
     void finishMarkingValidation();
 
     void markConservativeStackRoots(JSTracer *trc, bool useSavedRoots);
 
@@ -804,23 +793,21 @@ class GCRuntime
      */
     bool incrementalAllowed;
 
     /*
      * GGC can be enabled from the command line while testing.
      */
     unsigned generationalDisabled;
 
-#ifdef JSGC_COMPACTING
     /*
      * Some code cannot tolerate compacting GC so it can be disabled with this
      * counter.
      */
     unsigned compactingDisabled;
-#endif
 
     /*
      * This is true if we are in the middle of a brain transplant (e.g.,
      * JS_TransplantObject) or some other operation that can manipulate
      * dead zones.
      */
     bool manipulatingDeadZones;
 
@@ -911,19 +898,17 @@ class GCRuntime
      * understand. In those cases, we trade the static analysis for a dynamic
      * analysis. When this is non-zero, we should assert if we trigger, or
      * might trigger, a GC.
      */
     int inUnsafeRegion;
 
     size_t noGCOrAllocationCheck;
 
-#ifdef JSGC_COMPACTING
     ArenaHeader* relocatedArenasToRelease;
-#endif
 
 #endif
 
     /* Synchronize GC heap access between main thread and GCHelperState. */
     PRLock *lock;
     mozilla::DebugOnly<PRThread *> lockOwner;
 
     BackgroundAllocTask allocTask;
--- a/js/src/gc/Heap.h
+++ b/js/src/gc/Heap.h
@@ -637,20 +637,18 @@ struct ArenaHeader
     inline void setNextAllocDuringSweep(ArenaHeader *aheader);
     inline void unsetAllocDuringSweep();
 
     inline void setNextArenaToUpdate(ArenaHeader *aheader);
     inline ArenaHeader *getNextArenaToUpdateAndUnlink();
 
     void unmarkAll();
 
-#ifdef JSGC_COMPACTING
     size_t countUsedCells();
     size_t countFreeCells();
-#endif
 };
 static_assert(ArenaZoneOffset == offsetof(ArenaHeader, zone),
               "The hardcoded API zone offset must match the actual offset.");
 
 struct Arena
 {
     /*
      * Layout of an arena:
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -156,46 +156,37 @@ CheckMarkedThing(JSTracer *trc, T **thin
 {
 #ifdef DEBUG
     MOZ_ASSERT(trc);
     MOZ_ASSERT(thingp);
 
     T *thing = *thingp;
     MOZ_ASSERT(*thingp);
 
-#ifdef JSGC_COMPACTING
     thing = MaybeForwarded(thing);
-#endif
 
     /* This function uses data that's not available in the nursery. */
     if (IsInsideNursery(thing))
         return;
 
-#ifdef JSGC_COMPACTING
     MOZ_ASSERT_IF(!MovingTracer::IsMovingTracer(trc) && !Nursery::IsMinorCollectionTracer(trc),
                   !IsForwarded(*thingp));
-#endif
 
     /*
      * Permanent atoms are not associated with this runtime, but will be ignored
      * during marking.
      */
     if (ThingIsPermanentAtom(thing))
         return;
 
     Zone *zone = thing->zoneFromAnyThread();
     JSRuntime *rt = trc->runtime();
 
-#ifdef JSGC_COMPACTING
     MOZ_ASSERT_IF(!MovingTracer::IsMovingTracer(trc), CurrentThreadCanAccessZone(zone));
     MOZ_ASSERT_IF(!MovingTracer::IsMovingTracer(trc), CurrentThreadCanAccessRuntime(rt));
-#else
-    MOZ_ASSERT(CurrentThreadCanAccessZone(zone));
-    MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
-#endif
 
     MOZ_ASSERT(zone->runtimeFromAnyThread() == trc->runtime());
     MOZ_ASSERT(trc->hasTracingDetails());
 
     bool isGcMarkingTracer = IS_GC_MARKING_TRACER(trc);
 
     MOZ_ASSERT_IF(zone->requireGCTracer(), isGcMarkingTracer);
 
@@ -432,20 +423,18 @@ IsMarkedFromAnyThread(T **thingp)
     if (IsInsideNursery(*thingp)) {
         Nursery &nursery = rt->gc.nursery;
         return nursery.getForwardedPointer(thingp);
     }
 
     Zone *zone = (*thingp)->asTenured().zoneFromAnyThread();
     if (!zone->isCollectingFromAnyThread() || zone->isGCFinished())
         return true;
-#ifdef JSGC_COMPACTING
     if (zone->isGCCompacting() && IsForwarded(*thingp))
         *thingp = Forwarded(*thingp);
-#endif
     return (*thingp)->asTenured().isMarked();
 }
 
 template <typename T>
 static bool
 IsAboutToBeFinalized(T **thingp)
 {
     MOZ_ASSERT_IF(!ThingIsPermanentAtom(*thingp),
@@ -476,22 +465,20 @@ IsAboutToBeFinalizedFromAnyThread(T **th
     }
 
     Zone *zone = thing->asTenured().zoneFromAnyThread();
     if (zone->isGCSweeping()) {
         if (thing->asTenured().arenaHeader()->allocatedDuringIncremental)
             return false;
         return !thing->asTenured().isMarked();
     }
-#ifdef JSGC_COMPACTING
     else if (zone->isGCCompacting() && IsForwarded(thing)) {
         *thingp = Forwarded(thing);
         return false;
     }
-#endif
 
     return false;
 }
 
 template <typename T>
 T *
 UpdateIfRelocated(JSRuntime *rt, T **thingp)
 {
@@ -499,21 +486,20 @@ UpdateIfRelocated(JSRuntime *rt, T **thi
     if (!*thingp)
         return nullptr;
 
     if (rt->isHeapMinorCollecting() && IsInsideNursery(*thingp)) {
         rt->gc.nursery.getForwardedPointer(thingp);
         return *thingp;
     }
 
-#ifdef JSGC_COMPACTING
     Zone *zone = (*thingp)->zone();
     if (zone->isGCCompacting() && IsForwarded(*thingp))
         *thingp = Forwarded(*thingp);
-#endif
+
     return *thingp;
 }
 
 #define DeclMarkerImpl(base, type)                                                                \
 void                                                                                              \
 Mark##base(JSTracer *trc, BarrieredBase<type*> *thing, const char *name)                          \
 {                                                                                                 \
     Mark<type>(trc, thing, name);                                                                 \
--- a/js/src/js-config.h.in
+++ b/js/src/js-config.h.in
@@ -26,19 +26,16 @@
 
 /* Define to 1 if SpiderMonkey should support the ability to perform
    entirely too much GC.  */
 #undef JS_GC_ZEAL
 
 /* Define to 1 if SpiderMonkey should use small chunks. */
 #undef JS_GC_SMALL_CHUNK_SIZE
 
-/* Define to 1 if SpiderMonkey should use Compacting GC. */
-#undef JSGC_COMPACTING
-
 /* Define to 1 if the <endian.h> header is present and
    useable.  See jscpucfg.h.  */
 #undef JS_HAVE_ENDIAN_H
 
 /* Define to 1 if the <machine/endian.h> header is present and
    useable.  See jscpucfg.h.  */
 #undef JS_HAVE_MACHINE_ENDIAN_H
 
--- a/js/src/jsapi-tests/testWeakMap.cpp
+++ b/js/src/jsapi-tests/testWeakMap.cpp
@@ -61,20 +61,16 @@ checkSize(JS::HandleObject map, uint32_t
     uint32_t length;
     CHECK(JS_GetArrayLength(cx, keys, &length));
     CHECK(length == expected);
 
     return true;
 }
 END_TEST(testWeakMap_basicOperations)
 
-// TODO: this test stores object pointers in a private slot which is not marked
-// and so doesn't work with compacting GC.
-#ifndef JSGC_COMPACTING
-
 BEGIN_TEST(testWeakMap_keyDelegates)
 {
     JS_SetGCParameter(rt, JSGC_MODE, JSGC_MODE_INCREMENTAL);
     JS_GC(rt);
 
     JS::RootedObject map(cx, JS::NewWeakMapObject(cx));
     CHECK(map);
 
@@ -248,10 +244,8 @@ checkSize(JS::HandleObject map, uint32_t
 
     uint32_t length;
     CHECK(JS_GetArrayLength(cx, keys, &length));
     CHECK(length == expected);
 
     return true;
 }
 END_TEST(testWeakMap_keyDelegates)
-
-#endif
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -644,36 +644,32 @@ JSCompartment::sweepCrossCompartmentWrap
         } else if (key.wrapped != e.front().key().wrapped ||
                    key.debugger != e.front().key().debugger)
         {
             e.rekeyFront(key);
         }
     }
 }
 
-#ifdef JSGC_COMPACTING
-
 void JSCompartment::fixupAfterMovingGC()
 {
     fixupGlobal();
     fixupNewTypeObjectTable(newTypeObjects);
     fixupNewTypeObjectTable(lazyTypeObjects);
     fixupInitialShapeTable();
 }
 
 void
 JSCompartment::fixupGlobal()
 {
     GlobalObject *global = *global_.unsafeGet();
     if (global)
         global_.set(MaybeForwarded(global));
 }
 
-#endif // JSGC_COMPACTING
-
 void
 JSCompartment::purge()
 {
     dtoaCache.purge();
 }
 
 void
 JSCompartment::clearTables()
--- a/js/src/jscompartment.h
+++ b/js/src/jscompartment.h
@@ -393,22 +393,20 @@ struct JSCompartment
     void sweepRegExps();
     void sweepDebugScopes();
     void sweepWeakMaps();
     void sweepNativeIterators();
 
     void purge();
     void clearTables();
 
-#ifdef JSGC_COMPACTING
     void fixupInitialShapeTable();
     void fixupNewTypeObjectTable(js::types::NewTypeObjectTable &table);
     void fixupAfterMovingGC();
     void fixupGlobal();
-#endif
 
     bool hasObjectMetadataCallback() const { return objectMetadataCallback; }
     void setObjectMetadataCallback(js::ObjectMetadataCallback callback);
     void forgetObjectMetadataCallback() {
         objectMetadataCallback = nullptr;
     }
     bool callObjectMetadataCallback(JSContext *cx, JSObject **obj) const {
         return objectMetadataCallback(cx, obj);
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -1109,19 +1109,17 @@ GCRuntime::GCRuntime(JSRuntime *rt) :
     arenasAllocatedDuringSweep(nullptr),
 #ifdef JS_GC_MARKING_VALIDATION
     markingValidator(nullptr),
 #endif
     interFrameGC(false),
     sliceBudget(SliceBudget::Unlimited),
     incrementalAllowed(true),
     generationalDisabled(0),
-#ifdef JSGC_COMPACTING
     compactingDisabled(0),
-#endif
     manipulatingDeadZones(false),
     objectsMarkedInDeadZones(0),
     poked(false),
     heapState(Idle),
 #ifdef JS_GC_ZEAL
     zealMode(0),
     zealFrequency(0),
     nextScheduled(0),
@@ -1131,20 +1129,18 @@ GCRuntime::GCRuntime(JSRuntime *rt) :
     validate(true),
     fullCompartmentChecks(false),
     mallocBytes(0),
     mallocGCTriggered(false),
     alwaysPreserveCode(false),
 #ifdef DEBUG
     inUnsafeRegion(0),
     noGCOrAllocationCheck(0),
-#ifdef JSGC_COMPACTING
     relocatedArenasToRelease(nullptr),
 #endif
-#endif
     lock(nullptr),
     lockOwner(nullptr),
     allocTask(rt, emptyChunks_),
     helperState(rt)
 {
     setGCMode(JSGC_MODE_GLOBAL);
 }
 
@@ -1927,24 +1923,18 @@ ArenaLists::allocateFromArenaInner(JS::Z
     return thing;
 }
 
 /* Compacting GC */
 
 bool
 GCRuntime::shouldCompact()
 {
-#ifdef JSGC_COMPACTING
     return invocationKind == GC_SHRINK && isCompactingGCEnabled();
-#else
-    return false;
-#endif
-}
-
-#ifdef JSGC_COMPACTING
+}
 
 void
 GCRuntime::disableCompactingGC()
 {
     MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
     ++rt->gc.compactingDisabled;
 }
 
@@ -2694,22 +2684,20 @@ GCRuntime::releaseRelocatedArenasWithout
                   JS_MOVED_TENURED_PATTERN, Arena::thingsSpan(thingSize));
 #endif
 
         releaseArena(aheader, lock);
         ++count;
     }
 }
 
-#endif // JSGC_COMPACTING
-
 void
 GCRuntime::releaseHeldRelocatedArenas()
 {
-#if defined(JSGC_COMPACTING) && defined(DEBUG)
+#ifdef DEBUG
     // In debug mode we don't release relocated arenas straight away.  Instead
     // we protect them and hold onto them until the next GC sweep phase to catch
     // any pointers to them that didn't get forwarded.
     unprotectRelocatedArenas(relocatedArenasToRelease);
     releaseRelocatedArenas(relocatedArenasToRelease);
     relocatedArenasToRelease = nullptr;
 #endif
 }
@@ -5462,19 +5450,16 @@ GCRuntime::endSweepPhase(bool lastGC)
         }
     }
 #endif
 }
 
 bool
 GCRuntime::compactPhase(bool lastGC)
 {
-#ifndef JSGC_COMPACTING
-    MOZ_CRASH();
-#else
     gcstats::AutoPhase ap(stats, gcstats::PHASE_COMPACT);
 
     if (isIncremental) {
         // Poll for end of background sweeping
         AutoLockGC lock(rt);
         if (isBackgroundSweeping())
             return false;
     } else {
@@ -5529,18 +5514,16 @@ GCRuntime::compactPhase(bool lastGC)
                     for (ArenaHeader *arena = al.arenaAfterCursor(); arena; arena = arena->next)
                         freeCells += arena->countFreeCells();
                     MOZ_ASSERT(freeCells < thingsPerArena);
                 }
             }
         }
     }
 #endif
-
-#endif // JSGC_COMPACTING
     return true;
 }
 
 void
 GCRuntime::finishCollection()
 {
     MOZ_ASSERT(marker.isDrained());
     marker.stop();
@@ -5690,33 +5673,31 @@ GCRuntime::resetIncrementalGC(const char
 
         {
             gcstats::AutoPhase ap(stats, gcstats::PHASE_WAIT_BACKGROUND_THREAD);
             rt->gc.waitBackgroundSweepOrAllocEnd();
         }
         break;
       }
 
-#ifdef JSGC_COMPACTING
       case COMPACT: {
         {
             gcstats::AutoPhase ap(stats, gcstats::PHASE_WAIT_BACKGROUND_THREAD);
             rt->gc.waitBackgroundSweepOrAllocEnd();
         }
 
         JSGCInvocationKind oldInvocationKind = invocationKind;
         invocationKind = GC_NORMAL;
 
         SliceBudget budget;
         incrementalCollectSlice(budget, JS::gcreason::RESET);
 
         invocationKind = oldInvocationKind;
         break;
       }
-#endif
 
       default:
         MOZ_CRASH("Invalid incremental GC state");
     }
 
     stats.reset(reason);
 
 #ifdef DEBUG
@@ -6394,17 +6375,17 @@ GCRuntime::onOutOfMallocMemory()
     onOutOfMallocMemory(lock);
 }
 
 void
 GCRuntime::onOutOfMallocMemory(const AutoLockGC &lock)
 {
     // Release any relocated arenas we may be holding on to, without releasing
     // the GC lock.
-#if defined(JSGC_COMPACTING) && defined(DEBUG)
+#ifdef DEBUG
     unprotectRelocatedArenas(relocatedArenasToRelease);
     releaseRelocatedArenasWithoutUnlocking(relocatedArenasToRelease, lock);
     relocatedArenasToRelease = nullptr;
 #endif
 
     // Throw away any excess chunks we have lying around.
     freeEmptyChunks(rt, lock);
 
@@ -7115,29 +7096,23 @@ JS_PUBLIC_API(bool)
 JS::IsIncrementalGCEnabled(JSRuntime *rt)
 {
     return rt->gc.isIncrementalGCEnabled();
 }
 
 JS_PUBLIC_API(void)
 JS::DisableCompactingGC(JSRuntime *rt)
 {
-#ifdef JSGC_COMPACTING
     rt->gc.disableCompactingGC();
-#endif
 }
 
 JS_PUBLIC_API(bool)
 JS::IsCompactingGCEnabled(JSRuntime *rt)
 {
-#ifdef JSGC_COMPACTING
     return rt->gc.isCompactingGCEnabled();
-#else
-    return false;
-#endif
 }
 
 JS_PUBLIC_API(bool)
 JS::IsIncrementalGCInProgress(JSRuntime *rt)
 {
     return rt->gc.isIncrementalGCInProgress() && !rt->gc.isVerifyPreBarriersEnabled();
 }
 
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -449,21 +449,19 @@ class ArenaList {
         // Insert the full arenas of |other| after those of |this|.
         *other.cursorp_ = *cursorp_;
         *cursorp_ = other.head_;
         cursorp_ = other.cursorp_;
         check();
         return *this;
     }
 
-#ifdef JSGC_COMPACTING
     ArenaHeader *removeRemainingArenas(ArenaHeader **arenap, const AutoLockGC &lock);
     ArenaHeader *pickArenasToRelocate(JSRuntime *runtime);
     ArenaHeader *relocateArenas(ArenaHeader *toRelocate, ArenaHeader *relocated);
-#endif
 };
 
 /*
  * A class that holds arenas in sorted order by appending arenas to specific
  * segments. Each segment has a head and a tail, which can be linked up to
  * other segments to create a contiguous ArenaList.
  */
 class SortedArenaList
@@ -780,19 +778,17 @@ class ArenaLists
             MOZ_ASSERT(freeLists[i].isEmpty());
 #endif
     }
 
     void checkEmptyFreeList(AllocKind kind) {
         MOZ_ASSERT(freeLists[kind].isEmpty());
     }
 
-#ifdef JSGC_COMPACTING
     ArenaHeader *relocateArenas(ArenaHeader *relocatedList);
-#endif
 
     void queueForegroundObjectsForSweep(FreeOp *fop);
     void queueForegroundThingsForSweep(FreeOp *fop);
 
     void mergeForegroundSweptObjectArenas();
 
     bool foregroundFinalize(FreeOp *fop, AllocKind thingKind, SliceBudget &sliceBudget,
                             SortedArenaList &sweepList);
@@ -1265,19 +1261,17 @@ MaybeForwarded(T t)
 
 #ifdef JSGC_HASH_TABLE_CHECKS
 
 template <typename T>
 inline void
 CheckGCThingAfterMovingGC(T *t)
 {
     MOZ_ASSERT_IF(t, !IsInsideNursery(t));
-#ifdef JSGC_COMPACTING
     MOZ_ASSERT_IF(t, !IsForwarded(t));
-#endif
 }
 
 inline void
 CheckValueAfterMovingGC(const JS::Value& value)
 {
     if (value.isObject())
         return CheckGCThingAfterMovingGC(&value.toObject());
     else if (value.isString())
@@ -1424,26 +1418,21 @@ class AutoDisableProxyCheck
 struct AutoDisableProxyCheck
 {
     explicit AutoDisableProxyCheck(JSRuntime *rt) {}
 };
 #endif
 
 struct AutoDisableCompactingGC
 {
-#ifdef JSGC_COMPACTING
     explicit AutoDisableCompactingGC(JSRuntime *rt);
     ~AutoDisableCompactingGC();
 
   private:
     gc::GCRuntime &gc;
-#else
-    explicit AutoDisableCompactingGC(JSRuntime *rt) {}
-    ~AutoDisableCompactingGC() {}
-#endif
 };
 
 void
 PurgeJITCaches(JS::Zone *zone);
 
 // This is the same as IsInsideNursery, but not inlined.
 bool
 UninlinedIsInsideNursery(const gc::Cell *cell);
--- a/js/src/jsinfer.cpp
+++ b/js/src/jsinfer.cpp
@@ -4886,18 +4886,16 @@ JSCompartment::sweepNewTypeObjectTable(N
                 /* Any rekeying necessary is handled by fixupNewTypeObjectTable() below. */
                 MOZ_ASSERT(entry.object.unbarrieredGet() == e.front().object.unbarrieredGet());
                 MOZ_ASSERT(entry.associated == e.front().associated);
             }
         }
     }
 }
 
-#ifdef JSGC_COMPACTING
-
 void
 JSCompartment::fixupNewTypeObjectTable(NewTypeObjectTable &table)
 {
     /*
      * Each entry's hash depends on the object's prototype and we can't tell
      * whether that has been moved or not in sweepNewTypeObjectTable().
      */
     MOZ_ASSERT(zone()->isCollecting());
@@ -4961,18 +4959,16 @@ TypeObject::fixupAfterMovingGC()
                 addendum_ = Forwarded(maybeInterpretedFunction());
             break;
           default:
             MOZ_CRASH();
         }
     }
 }
 
-#endif // JSGC_COMPACTING
-
 #ifdef JSGC_HASH_TABLE_CHECKS
 
 void
 JSCompartment::checkTypeObjectTablesAfterMovingGC()
 {
     checkTypeObjectTableAfterMovingGC(newTypeObjects);
     checkTypeObjectTableAfterMovingGC(lazyTypeObjects);
 }
--- a/js/src/jsinfer.h
+++ b/js/src/jsinfer.h
@@ -920,20 +920,17 @@ class TypeNewScript
     }
 
     TypeObject *initializedType() const {
         return initializedType_;
     }
 
     void trace(JSTracer *trc);
     void sweep();
-
-#ifdef JSGC_COMPACTING
     void fixupAfterMovingGC();
-#endif
 
     void registerNewObject(PlainObject *res);
     void unregisterNewObject(PlainObject *res);
     bool maybeAnalyze(JSContext *cx, TypeObject *type, bool *regenerate, bool force = false);
 
     void rollbackPartiallyInitializedObjects(JSContext *cx, TypeObject *type);
 
     static void make(JSContext *cx, TypeObject *type, JSFunction *fun);
@@ -1236,19 +1233,17 @@ struct TypeObject : public gc::TenuredCe
 
   public:
     void setGeneration(uint32_t generation) {
         MOZ_ASSERT(generation <= (OBJECT_FLAG_GENERATION_MASK >> OBJECT_FLAG_GENERATION_SHIFT));
         flags_ &= ~OBJECT_FLAG_GENERATION_MASK;
         flags_ |= generation << OBJECT_FLAG_GENERATION_SHIFT;
     }
 
-#ifdef JSGC_COMPACTING
     void fixupAfterMovingGC();
-#endif
 
     size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
 
     inline void finalize(FreeOp *fop);
 
     static inline ThingRootKind rootKind() { return THING_ROOT_TYPE_OBJECT; }
 
     static inline uint32_t offsetOfClasp() {
--- a/js/src/jspropertytree.cpp
+++ b/js/src/jspropertytree.cpp
@@ -230,18 +230,16 @@ Shape::sweep()
 
 void
 Shape::finalize(FreeOp *fop)
 {
     if (!inDictionary() && kids.isHash())
         fop->delete_(kids.toHash());
 }
 
-#ifdef JSGC_COMPACTING
-
 void
 Shape::fixupDictionaryShapeAfterMovingGC()
 {
     if (!listp)
         return;
 
     // It's possible that this shape is unreachable and that listp points to the
     // location of a dead object in the nursery, in which case we should never
@@ -317,18 +315,16 @@ void
 Shape::fixupAfterMovingGC()
 {
     if (inDictionary())
         fixupDictionaryShapeAfterMovingGC();
     else
         fixupShapeTreeAfterMovingGC();
 }
 
-#endif // JSGC_COMPACTING
-
 void
 ShapeGetterSetterRef::mark(JSTracer *trc)
 {
     // Update the current shape's entry in the parent KidsHash table if needed.
     // This is necessary as the computed hash includes the getter/setter
     // pointers.
 
     JSObject *obj = *objp;
--- a/js/src/jspubtd.h
+++ b/js/src/jspubtd.h
@@ -15,18 +15,17 @@
 #include "mozilla/LinkedList.h"
 #include "mozilla/PodOperations.h"
 
 #include "jsprototypes.h"
 #include "jstypes.h"
 
 #include "js/TypeDecls.h"
 
-#if (defined(JS_GC_ZEAL)) || \
-    (defined(JSGC_COMPACTING) && defined(DEBUG))
+#if (defined(JS_GC_ZEAL)) || defined(DEBUG)
 # define JSGC_HASH_TABLE_CHECKS
 #endif
 
 namespace JS {
 
 class AutoIdVector;
 class CallArgs;
 
--- a/js/src/vm/Shape-inl.h
+++ b/js/src/vm/Shape-inl.h
@@ -219,20 +219,18 @@ GetShapeAttributes(JSObject *obj, Shape 
         if (IsAnyTypedArray(obj))
             return JSPROP_ENUMERATE | JSPROP_PERMANENT;
         return JSPROP_ENUMERATE;
     }
 
     return shape->attributes();
 }
 
-#ifdef JSGC_COMPACTING
 inline void
 BaseShape::fixupAfterMovingGC()
 {
     if (hasTable())
         table().fixupAfterMovingGC();
 }
-#endif
 
 } /* namespace js */
 
 #endif /* vm_Shape_inl_h */
--- a/js/src/vm/Shape.cpp
+++ b/js/src/vm/Shape.cpp
@@ -251,29 +251,27 @@ ShapeTable::search(jsid id, bool adding)
             collisionFlag &= entry->hadCollision();
 #endif
         }
     }
 
     MOZ_CRASH("Shape::search failed to find an expected entry.");
 }
 
-#ifdef JSGC_COMPACTING
 void
 ShapeTable::fixupAfterMovingGC()
 {
     uint32_t size = capacity();
     for (HashNumber i = 0; i < size; i++) {
         Entry &entry = getEntry(i);
         Shape *shape = entry.shape();
         if (shape && IsForwarded(shape))
             entry.setPreservingCollision(Forwarded(shape));
     }
 }
-#endif
 
 bool
 ShapeTable::change(int log2Delta, ExclusiveContext *cx)
 {
     MOZ_ASSERT(entries_);
     MOZ_ASSERT(-1 <= log2Delta && log2Delta <= 1);
 
     /*
@@ -1688,17 +1686,16 @@ JSCompartment::sweepInitialShapeTable()
                     InitialShapeEntry newKey(readBarrieredShape, TaggedProto(proto));
                     e.rekeyFront(newKey.getLookup(), newKey);
                 }
             }
         }
     }
 }
 
-#ifdef JSGC_COMPACTING
 void
 JSCompartment::fixupInitialShapeTable()
 {
     if (!initialShapes.initialized())
         return;
 
     for (InitialShapeSet::Enum e(initialShapes); !e.empty(); e.popFront()) {
         InitialShapeEntry entry = e.front();
@@ -1727,17 +1724,16 @@ JSCompartment::fixupInitialShapeTable()
                                                parent,
                                                metadata,
                                                entry.shape->numFixedSlots(),
                                                entry.shape->getObjectFlags());
             e.rekeyFront(relookup, entry);
         }
     }
 }
-#endif // JSGC_COMPACTING
 
 void
 AutoRooterGetterSetter::Inner::trace(JSTracer *trc)
 {
     if ((attrs & JSPROP_GETTER) && *pgetter)
         gc::MarkObjectRoot(trc, (JSObject**) pgetter, "AutoRooterGetterSetter getter");
     if ((attrs & JSPROP_SETTER) && *psetter)
         gc::MarkObjectRoot(trc, (JSObject**) psetter, "AutoRooterGetterSetter setter");
--- a/js/src/vm/Shape.h
+++ b/js/src/vm/Shape.h
@@ -222,20 +222,18 @@ class ShapeTable {
      * NB: init and change are fallible but do not report OOM, so callers can
      * cope or ignore. They do however use the context's calloc method in
      * order to update the malloc counter on success.
      */
     bool init(ExclusiveContext *cx, Shape *lastProp);
     bool change(int log2Delta, ExclusiveContext *cx);
     Entry &search(jsid id, bool adding);
 
-#ifdef JSGC_COMPACTING
     /* Update entries whose shapes have been moved */
     void fixupAfterMovingGC();
-#endif
 
   private:
     Entry &getEntry(uint32_t i) const {
         MOZ_ASSERT(i < capacity());
         return entries_[i];
     }
     void decEntryCount() {
         MOZ_ASSERT(entryCount_ > 0);
@@ -525,19 +523,17 @@ class BaseShape : public gc::TenuredCell
 
         if (parent)
             gc::MarkObject(trc, &parent, "parent");
 
         if (metadata)
             gc::MarkObject(trc, &metadata, "metadata");
     }
 
-#ifdef JSGC_COMPACTING
     void fixupAfterMovingGC();
-#endif
 
   private:
     static void staticAsserts() {
         JS_STATIC_ASSERT(offsetof(BaseShape, clasp_) == offsetof(js::shadow::BaseShape, clasp_));
     }
 };
 
 class UnownedBaseShape : public BaseShape {};
@@ -1056,30 +1052,26 @@ class Shape : public gc::TenuredCell
 
     static inline ThingRootKind rootKind() { return THING_ROOT_SHAPE; }
 
     inline void markChildren(JSTracer *trc);
 
     inline Shape *search(ExclusiveContext *cx, jsid id);
     inline Shape *searchLinear(jsid id);
 
-#ifdef JSGC_COMPACTING
     void fixupAfterMovingGC();
-#endif
 
     /* For JIT usage */
     static inline size_t offsetOfBase() { return offsetof(Shape, base_); }
     static inline size_t offsetOfSlotInfo() { return offsetof(Shape, slotInfo); }
     static inline uint32_t fixedSlotsMask() { return FIXED_SLOTS_MASK; }
 
   private:
-#ifdef JSGC_COMPACTING
     void fixupDictionaryShapeAfterMovingGC();
     void fixupShapeTreeAfterMovingGC();
-#endif
 
     static void staticAsserts() {
         JS_STATIC_ASSERT(offsetof(Shape, base_) == offsetof(js::shadow::Shape, base));
         JS_STATIC_ASSERT(offsetof(Shape, slotInfo) == offsetof(js::shadow::Shape, slotInfo));
         JS_STATIC_ASSERT(FIXED_SLOTS_SHIFT == js::shadow::Shape::FIXED_SLOTS_SHIFT);
     }
 };