Bug 763800 - Changes GCHelperThread to work on the main thread in non-threadsafe builds and removes lots of henceforth redundant #ifdef JS_THREADSAFE's (r=billm)
authorTill Schneidereit <tschneidereit@gmail.com>
Wed, 13 Jun 2012 11:27:45 +0200
changeset 96634 4530efc8e2ecc805b7688fd998c7cfbab0b8e8d3
parent 96633 2da255c1aeb9568f3fb7fe30e5661c03b103542f
child 96635 87dbb95cde7ddacd65d46ed3f39b39cfe1e1560a
push id22925
push useremorley@mozilla.com
push dateThu, 14 Jun 2012 09:21:42 +0000
treeherdermozilla-central@85e31a4bdd41 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersbillm
bugs763800
milestone16.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 763800 - Changes GCHelperThread to work on the main thread in non-threadsafe builds and removes lots of henceforth redundant #ifdef JS_THREADSAFE's (r=billm)
js/src/jsapi.cpp
js/src/jsarray.cpp
js/src/jscntxt.cpp
js/src/jscntxt.h
js/src/jsfun.h
js/src/jsgc.cpp
js/src/jsgc.h
js/src/jsgcinlines.h
js/src/jslock.h
js/src/jsobjinlines.h
js/src/jstypedarray.cpp
js/src/vm/ScopeObject.cpp
js/src/vm/ScopeObject.h
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -761,20 +761,18 @@ JSRuntime::JSRuntime()
     positiveInfinityValue(UndefinedValue()),
     emptyString(NULL),
     debugMode(false),
     profilingScripts(false),
     alwaysPreserveCode(false),
     hadOutOfMemory(false),
     debugScopes(NULL),
     data(NULL),
-#ifdef JS_THREADSAFE
     gcLock(NULL),
     gcHelperThread(thisFromCtor()),
-#endif
     defaultFreeOp_(thisFromCtor(), false, false),
     debuggerMutations(0),
     securityCallbacks(const_cast<JSSecurityCallbacks *>(&NullSecurityCallbacks)),
     destroyPrincipals(NULL),
     structuredCloneCallbacks(NULL),
     telemetryCallback(NULL),
     propertyRemovals(0),
     thousandsSeparator(0),
--- a/js/src/jsarray.cpp
+++ b/js/src/jsarray.cpp
@@ -3732,21 +3732,18 @@ EnsureNewArrayElements(JSContext *cx, JS
     return true;
 }
 
 template<bool allocateCapacity>
 static JS_ALWAYS_INLINE JSObject *
 NewArray(JSContext *cx, uint32_t length, JSObject *proto_)
 {
     gc::AllocKind kind = GuessArrayGCKind(length);
-
-#ifdef JS_THREADSAFE
     JS_ASSERT(CanBeFinalizedInBackground(kind, &ArrayClass));
     kind = GetBackgroundAllocKind(kind);
-#endif
 
     GlobalObject *parent_ = GetCurrentGlobal(cx);
 
     NewObjectCache &cache = cx->runtime->newObjectCache;
 
     NewObjectCache::EntryIndex entry = -1;
     if (cache.lookupGlobal(&ArrayClass, parent_, kind, &entry)) {
         JSObject *obj = cache.newObjectFromHit(cx, entry);
--- a/js/src/jscntxt.cpp
+++ b/js/src/jscntxt.cpp
@@ -1141,22 +1141,20 @@ JSRuntime::onOutOfMemory(void *p, size_t
     if (gcRunning)
         return NULL;
 
     /*
      * Retry when we are done with the background sweeping and have stopped
      * all the allocations and released the empty GC chunks.
      */
     ShrinkGCBuffers(this);
-#ifdef JS_THREADSAFE
     {
         AutoLockGC lock(this);
         gcHelperThread.waitBackgroundSweepOrAllocEnd();
     }
-#endif
     if (!p)
         p = OffTheBooks::malloc_(nbytes);
     else if (p == reinterpret_cast<void *>(1))
         p = OffTheBooks::calloc_(nbytes);
     else
       p = OffTheBooks::realloc_(p, nbytes);
     if (p)
         return p;
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -680,22 +680,20 @@ struct JSRuntime : js::RuntimeFriendFiel
     JSCList             debuggerList;
 
     /* Bookkeeping information for debug scope objects. */
     js::DebugScopes     *debugScopes;
 
     /* Client opaque pointers */
     void                *data;
 
-#ifdef JS_THREADSAFE
     /* These combine to interlock the GC and new requests. */
     PRLock              *gcLock;
 
     js::GCHelperThread  gcHelperThread;
-#endif /* JS_THREADSAFE */
 
   private:
     js::FreeOp          defaultFreeOp_;
 
   public:
     js::FreeOp *defaultFreeOp() {
         return &defaultFreeOp_;
     }
@@ -1010,22 +1008,20 @@ VersionIsKnown(JSVersion version)
 }
 
 typedef HashSet<JSObject *,
                 DefaultHasher<JSObject *>,
                 SystemAllocPolicy> BusyArraysSet;
 
 inline void
 FreeOp::free_(void* p) {
-#ifdef JS_THREADSAFE
     if (shouldFreeLater()) {
         runtime()->gcHelperThread.freeLater(p);
         return;
     }
-#endif
     runtime()->free_(p);
 }
 
 } /* namespace js */
 
 struct JSContext : js::ContextFriendFields
 {
     explicit JSContext(JSRuntime *rt);
@@ -1391,41 +1387,37 @@ class AutoXMLRooter : private AutoGCRoot
     JS_DECL_USE_GUARD_OBJECT_NOTIFIER
 };
 #endif /* JS_HAS_XML_SUPPORT */
 
 #ifdef JS_THREADSAFE
 # define JS_LOCK_GC(rt)    PR_Lock((rt)->gcLock)
 # define JS_UNLOCK_GC(rt)  PR_Unlock((rt)->gcLock)
 #else
-# define JS_LOCK_GC(rt)
-# define JS_UNLOCK_GC(rt)
+# define JS_LOCK_GC(rt)    do { } while (0)
+# define JS_UNLOCK_GC(rt)  do { } while (0)
 #endif
 
 class AutoLockGC
 {
   public:
     explicit AutoLockGC(JSRuntime *rt = NULL
                         MOZ_GUARD_OBJECT_NOTIFIER_PARAM)
       : runtime(rt)
     {
         MOZ_GUARD_OBJECT_NOTIFIER_INIT;
         // Avoid MSVC warning C4390 for non-threadsafe builds.
-#ifdef JS_THREADSAFE
         if (rt)
             JS_LOCK_GC(rt);
-#endif
     }
 
     ~AutoLockGC()
     {
-#ifdef JS_THREADSAFE
         if (runtime)
             JS_UNLOCK_GC(runtime);
-#endif
     }
 
     bool locked() const {
         return !!runtime;
     }
 
     void lock(JSRuntime *rt) {
         JS_ASSERT(rt);
--- a/js/src/jsfun.h
+++ b/js/src/jsfun.h
@@ -138,31 +138,21 @@ struct JSFunction : public JSObject
 
     static unsigned offsetOfNativeOrScript() {
         JS_STATIC_ASSERT(offsetof(U, native) == offsetof(U, i.script_));
         JS_STATIC_ASSERT(offsetof(U, native) == offsetof(U, nativeOrScript));
         return offsetof(JSFunction, u.nativeOrScript);
     }
 
 #if JS_BITS_PER_WORD == 32
-# ifdef JS_THREADSAFE
     static const js::gc::AllocKind FinalizeKind = js::gc::FINALIZE_OBJECT2_BACKGROUND;
     static const js::gc::AllocKind ExtendedFinalizeKind = js::gc::FINALIZE_OBJECT4_BACKGROUND;
-# else
-    static const js::gc::AllocKind FinalizeKind = js::gc::FINALIZE_OBJECT2;
-    static const js::gc::AllocKind ExtendedFinalizeKind = js::gc::FINALIZE_OBJECT4;
-# endif
 #else
-# ifdef JS_THREADSAFE
     static const js::gc::AllocKind FinalizeKind = js::gc::FINALIZE_OBJECT4_BACKGROUND;
     static const js::gc::AllocKind ExtendedFinalizeKind = js::gc::FINALIZE_OBJECT8_BACKGROUND;
-# else
-    static const js::gc::AllocKind FinalizeKind = js::gc::FINALIZE_OBJECT4;
-    static const js::gc::AllocKind ExtendedFinalizeKind = js::gc::FINALIZE_OBJECT8;
-# endif
 #endif
 
     inline void trace(JSTracer *trc);
 
     /* Bound function accessors. */
 
     inline bool initBoundFunction(JSContext *cx, js::HandleValue thisArg,
                                   const js::Value *args, unsigned argslen);
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -416,30 +416,28 @@ AllocChunk() {
     return static_cast<Chunk *>(MapAlignedPages(ChunkSize, ChunkSize));
 }
 
 static inline void
 FreeChunk(Chunk *p) {
     UnmapPages(static_cast<void *>(p), ChunkSize);
 }
 
-#ifdef JS_THREADSAFE
 inline bool
 ChunkPool::wantBackgroundAllocation(JSRuntime *rt) const
 {
     /*
      * To minimize memory waste we do not want to run the background chunk
      * allocation if we have empty chunks or when the runtime needs just few
      * of them.
      */
     return rt->gcHelperThread.canBackgroundAllocate() &&
            emptyCount == 0 &&
            rt->gcChunkSet.count() >= 4;
 }
-#endif
 
 /* Must be called with the GC lock taken. */
 inline Chunk *
 ChunkPool::get(JSRuntime *rt)
 {
     JS_ASSERT(this == &rt->gcChunkPool);
 
     Chunk *chunk = emptyChunkListHead;
@@ -453,20 +451,18 @@ ChunkPool::get(JSRuntime *rt)
         if (!chunk)
             return NULL;
         JS_ASSERT(chunk->info.numArenasFreeCommitted == ArenasPerChunk);
         rt->gcNumArenasFreeCommitted += ArenasPerChunk;
     }
     JS_ASSERT(chunk->unused());
     JS_ASSERT(!rt->gcChunkSet.has(chunk));
 
-#ifdef JS_THREADSAFE
     if (wantBackgroundAllocation(rt))
         rt->gcHelperThread.startBackgroundAllocationIfIdle();
-#endif
 
     return chunk;
 }
 
 /* Must be called either during the GC or with the GC lock taken. */
 inline void
 ChunkPool::put(Chunk *chunk)
 {
@@ -748,29 +744,25 @@ Chunk::addArenaToFreeList(JSRuntime *rt,
 
 void
 Chunk::releaseArena(ArenaHeader *aheader)
 {
     JS_ASSERT(aheader->allocated());
     JS_ASSERT(!aheader->hasDelayedMarking);
     JSCompartment *comp = aheader->compartment;
     JSRuntime *rt = comp->rt;
-#ifdef JS_THREADSAFE
     AutoLockGC maybeLock;
     if (rt->gcHelperThread.sweeping())
         maybeLock.lock(rt);
-#endif
 
     Probes::resizeHeap(comp, rt->gcBytes, rt->gcBytes - ArenaSize);
     JS_ASSERT(rt->gcBytes >= ArenaSize);
     JS_ASSERT(comp->gcBytes >= ArenaSize);
-#ifdef JS_THREADSAFE
     if (rt->gcHelperThread.sweeping())
         comp->reduceGCTriggerBytes(GC_HEAP_GROWTH_FACTOR * ArenaSize);
-#endif
     rt->gcBytes -= ArenaSize;
     comp->gcBytes -= ArenaSize;
 
     aheader->setAsNotAllocated();
     addArenaToFreeList(rt, aheader);
 
     if (info.numArenasFree == 1) {
         JS_ASSERT(!info.prevp);
@@ -836,19 +828,19 @@ js_InitGC(JSRuntime *rt, uint32_t maxbyt
 
     if (!rt->gcLocksHash.init(256))
         return false;
 
 #ifdef JS_THREADSAFE
     rt->gcLock = PR_NewLock();
     if (!rt->gcLock)
         return false;
+#endif
     if (!rt->gcHelperThread.init())
         return false;
-#endif
 
     /*
      * Separate gcMaxMallocBytes from gcMaxBytes but initialize to maxbytes
      * for default backward API compatibility.
      */
     rt->gcMaxBytes = maxbytes;
     rt->setGCMaxMallocBytes(maxbytes);
 
@@ -1160,19 +1152,17 @@ CheckLeakedRoots(JSRuntime *rt);
 
 void
 js_FinishGC(JSRuntime *rt)
 {
     /*
      * Wait until the background finalization stops and the helper thread
      * shuts down before we forcefully release any remaining GC memory.
      */
-#ifdef JS_THREADSAFE
     rt->gcHelperThread.finish();
-#endif
 
 #ifdef JS_GC_ZEAL
     /* Free memory associated with GC verification. */
     FinishVerifier(rt);
 #endif
 
     /* Delete all remaining Compartments. */
     for (CompartmentsIter c(rt); !c.done(); c.next())
@@ -1446,19 +1436,17 @@ ArenaLists::allocateFromArena(JSCompartm
                                                      Arena::firstThingOffset(thingKind),
                                                      Arena::thingSize(thingKind));
 }
 
 void
 ArenaLists::finalizeNow(FreeOp *fop, AllocKind thingKind)
 {
     JS_ASSERT(!fop->onBackgroundThread());
-#ifdef JS_THREADSAFE
     JS_ASSERT(backgroundFinalizeState[thingKind] == BFS_DONE);
-#endif
     FinalizeArenas(fop, &arenaLists[thingKind], thingKind);
 }
 
 inline void
 ArenaLists::finalizeLater(FreeOp *fop, AllocKind thingKind)
 {
     JS_ASSERT(thingKind == FINALIZE_OBJECT0_BACKGROUND  ||
               thingKind == FINALIZE_OBJECT2_BACKGROUND  ||
@@ -1503,21 +1491,22 @@ ArenaLists::finalizeLater(FreeOp *fop, A
 
 #else /* !JS_THREADSAFE */
 
     finalizeNow(fop, thingKind);
 
 #endif
 }
 
-#ifdef JS_THREADSAFE
 /*static*/ void
 ArenaLists::backgroundFinalize(FreeOp *fop, ArenaHeader *listHead)
 {
+#ifdef JS_THREADSAFE
     JS_ASSERT(fop->onBackgroundThread());
+#endif /* JS_THREADSAFE */
     JS_ASSERT(listHead);
     AllocKind thingKind = listHead->getAllocKind();
     JSCompartment *comp = listHead->compartment;
     ArenaList finalized;
     finalized.head = listHead;
     FinalizeArenas(fop, &finalized, thingKind);
 
     /*
@@ -1545,36 +1534,33 @@ ArenaLists::backgroundFinalize(FreeOp *f
         *al->cursor = finalized.head;
         if (finalized.cursor != &finalized.head)
             al->cursor = finalized.cursor;
         lists->backgroundFinalizeState[thingKind] = BFS_JUST_FINISHED;
     } else {
         lists->backgroundFinalizeState[thingKind] = BFS_DONE;
     }
 }
-#endif /* JS_THREADSAFE */
 
 void
 ArenaLists::finalizeObjects(FreeOp *fop)
 {
     finalizeNow(fop, FINALIZE_OBJECT0);
     finalizeNow(fop, FINALIZE_OBJECT2);
     finalizeNow(fop, FINALIZE_OBJECT4);
     finalizeNow(fop, FINALIZE_OBJECT8);
     finalizeNow(fop, FINALIZE_OBJECT12);
     finalizeNow(fop, FINALIZE_OBJECT16);
 
-#ifdef JS_THREADSAFE
     finalizeLater(fop, FINALIZE_OBJECT0_BACKGROUND);
     finalizeLater(fop, FINALIZE_OBJECT2_BACKGROUND);
     finalizeLater(fop, FINALIZE_OBJECT4_BACKGROUND);
     finalizeLater(fop, FINALIZE_OBJECT8_BACKGROUND);
     finalizeLater(fop, FINALIZE_OBJECT12_BACKGROUND);
     finalizeLater(fop, FINALIZE_OBJECT16_BACKGROUND);
-#endif
 
 #if JS_HAS_XML_SUPPORT
     finalizeNow(fop, FINALIZE_XML);
 #endif
 }
 
 void
 ArenaLists::finalizeStrings(FreeOp *fop)
@@ -1645,19 +1631,17 @@ ArenaLists::refillFreeList(JSContext *cx
         for (bool secondAttempt = false; ; secondAttempt = true) {
             void *thing = comp->arenas.allocateFromArena(comp, thingKind);
             if (JS_LIKELY(!!thing))
                 return thing;
             if (secondAttempt)
                 break;
 
             AutoLockGC lock(rt);
-#ifdef JS_THREADSAFE
             rt->gcHelperThread.waitBackgroundSweepEnd();
-#endif
         }
 
         /*
          * We failed to allocate. Run the GC if we haven't done it already.
          * Otherwise report OOM.
          */
         if (runGC)
             break;
@@ -2619,54 +2603,59 @@ ExpireChunksAndArenas(JSRuntime *rt, boo
         FreeChunkList(toFree);
     }
 
     if (shouldShrink)
         DecommitArenas(rt);
 }
 
 #ifdef JS_THREADSAFE
-
 static unsigned
 GetCPUCount()
 {
     static unsigned ncpus = 0;
     if (ncpus == 0) {
 # ifdef XP_WIN
         SYSTEM_INFO sysinfo;
         GetSystemInfo(&sysinfo);
         ncpus = unsigned(sysinfo.dwNumberOfProcessors);
 # else
         long n = sysconf(_SC_NPROCESSORS_ONLN);
         ncpus = (n > 0) ? unsigned(n) : 1;
 # endif
     }
     return ncpus;
 }
+#endif /* JS_THREADSAFE */
 
 bool
 GCHelperThread::init()
 {
+#ifdef JS_THREADSAFE
     if (!(wakeup = PR_NewCondVar(rt->gcLock)))
         return false;
     if (!(done = PR_NewCondVar(rt->gcLock)))
         return false;
 
     thread = PR_CreateThread(PR_USER_THREAD, threadMain, this, PR_PRIORITY_NORMAL,
                              PR_LOCAL_THREAD, PR_JOINABLE_THREAD, 0);
     if (!thread)
         return false;
 
     backgroundAllocation = (GetCPUCount() >= 2);
+#else
+    backgroundAllocation = false;
+#endif /* JS_THREADSAFE */
     return true;
 }
 
 void
 GCHelperThread::finish()
 {
+#ifdef JS_THREADSAFE
     PRThread *join = NULL;
     {
         AutoLockGC lock(rt);
         if (thread && state != SHUTDOWN) {
             /*
              * We cannot be in the ALLOCATING or CANCEL_ALLOCATION states as
              * the allocations should have been stopped during the last GC.
              */
@@ -2680,18 +2669,25 @@ GCHelperThread::finish()
     if (join) {
         /* PR_DestroyThread is not necessary. */
         PR_JoinThread(join);
     }
     if (wakeup)
         PR_DestroyCondVar(wakeup);
     if (done)
         PR_DestroyCondVar(done);
+#else
+    /*
+     * In non-threadsafe configurations, we do all work synchronously, so we must be IDLE
+     */
+    JS_ASSERT(state == IDLE);
+#endif /* JS_THREADSAFE */
 }
 
+#ifdef JS_THREADSAFE
 /* static */
 void
 GCHelperThread::threadMain(void *arg)
 {
     PR_SetCurrentThreadName("JS GC Helper");
     static_cast<GCHelperThread *>(arg)->threadLoop();
 }
 
@@ -2738,38 +2734,48 @@ GCHelperThread::threadLoop()
             break;
           case CANCEL_ALLOCATION:
             state = IDLE;
             PR_NotifyAllCondVar(done);
             break;
         }
     }
 }
+#endif /* JS_THREADSAFE */
 
 bool
 GCHelperThread::prepareForBackgroundSweep()
 {
     JS_ASSERT(state == IDLE);
+#ifdef JS_THREADSAFE
     size_t maxArenaLists = MAX_BACKGROUND_FINALIZE_KINDS * rt->compartments.length();
     return finalizeVector.reserve(maxArenaLists);
+#else
+    return false;
+#endif /* JS_THREADSAFE */
 }
 
 /* Must be called with the GC lock taken. */
 void
 GCHelperThread::startBackgroundSweep(bool shouldShrink)
 {
+#ifdef JS_THREADSAFE
     /* The caller takes the GC lock. */
     JS_ASSERT(state == IDLE);
     JS_ASSERT(!sweepFlag);
     sweepFlag = true;
     shrinkFlag = shouldShrink;
     state = SWEEPING;
     PR_NotifyCondVar(wakeup);
+#else
+    JS_NOT_REACHED("No background sweep if !JS_THREADSAFE");
+#endif /* JS_THREADSAFE */
 }
 
+#ifdef JS_THREADSAFE
 /* Must be called with the GC lock taken. */
 void
 GCHelperThread::startBackgroundShrink()
 {
     switch (state) {
       case IDLE:
         JS_ASSERT(!sweepFlag);
         shrinkFlag = true;
@@ -2785,43 +2791,56 @@ GCHelperThread::startBackgroundShrink()
          * If we have started background allocation there is nothing to
          * shrink.
          */
         break;
       case SHUTDOWN:
         JS_NOT_REACHED("No shrink on shutdown");
     }
 }
+#endif /* JS_THREADSAFE */
 
 /* Must be called with the GC lock taken. */
 void
 GCHelperThread::waitBackgroundSweepEnd()
 {
+#ifdef JS_THREADSAFE
     while (state == SWEEPING)
         PR_WaitCondVar(done, PR_INTERVAL_NO_TIMEOUT);
+#else
+    JS_ASSERT(state == IDLE);
+#endif /* JS_THREADSAFE */
 }
 
 /* Must be called with the GC lock taken. */
 void
 GCHelperThread::waitBackgroundSweepOrAllocEnd()
 {
+#ifdef JS_THREADSAFE
     if (state == ALLOCATING)
         state = CANCEL_ALLOCATION;
     while (state == SWEEPING || state == CANCEL_ALLOCATION)
         PR_WaitCondVar(done, PR_INTERVAL_NO_TIMEOUT);
+#else
+    JS_ASSERT(state == IDLE);
+#endif /* JS_THREADSAFE */
 }
 
 /* Must be called with the GC lock taken. */
 inline void
 GCHelperThread::startBackgroundAllocationIfIdle()
 {
+#ifdef JS_THREADSAFE
     if (state == IDLE) {
         state = ALLOCATING;
         PR_NotifyCondVar(wakeup);
     }
+#else
+    JS_ASSERT(state == IDLE);
+#endif /* JS_THREADSAFE */
 }
 
 JS_FRIEND_API(void)
 GCHelperThread::replenishAndFreeLater(void *ptr)
 {
     JS_ASSERT(freeCursor == freeCursorEnd);
     do {
         if (freeCursor && !freeVector.append(freeCursorEnd - FREE_ARRAY_LENGTH))
@@ -2833,16 +2852,17 @@ GCHelperThread::replenishAndFreeLater(vo
         }
         freeCursorEnd = freeCursor + FREE_ARRAY_LENGTH;
         *freeCursor++ = ptr;
         return;
     } while (false);
     Foreground::free_(ptr);
 }
 
+#ifdef JS_THREADSAFE
 /* Must be called with the GC lock taken. */
 void
 GCHelperThread::doSweep()
 {
     if (sweepFlag) {
         sweepFlag = false;
         AutoUnlockGC unlock(rt);
 
@@ -2877,17 +2897,16 @@ GCHelperThread::doSweep()
      * ExpireChunksAndArenas(rt, false) was running, so we recheck the flag
      * afterwards.
      */
     if (!shrinking && shrinkFlag) {
         shrinkFlag = false;
         ExpireChunksAndArenas(rt, true);
     }
 }
-
 #endif /* JS_THREADSAFE */
 
 } /* namespace js */
 
 static bool
 ReleaseObservedTypes(JSRuntime *rt)
 {
     bool releaseTypes = false;
@@ -3244,21 +3263,17 @@ SweepPhase(JSRuntime *rt, JSGCInvocation
      * BeginMarkPhase. More compartments may have been created since then.
      */
     bool isFull = true;
     for (CompartmentsIter c(rt); !c.done(); c.next()) {
         if (!c->isCollecting())
             isFull = false;
     }
 
-#ifdef JS_THREADSAFE
     *startBackgroundSweep = (rt->hasContexts() && rt->gcHelperThread.prepareForBackgroundSweep());
-#else
-    *startBackgroundSweep = false;
-#endif
 
     /* Purge the ArenaLists before sweeping. */
     for (GCCompartmentsIter c(rt); !c.done(); c.next())
         c->arenas.purge();
 
     FreeOp fop(rt, *startBackgroundSweep, false);
     {
         gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_FINALIZE_START);
@@ -3683,28 +3698,26 @@ GCCycle(JSRuntime *rt, bool incremental,
 
     /* Don't GC if we are reporting an OOM. */
     if (rt->inOOMReport)
         return;
 
     AutoLockGC lock(rt);
     AutoGCSession gcsession(rt);
 
-#ifdef JS_THREADSAFE
     /*
      * As we about to purge caches and clear the mark bits we must wait for
      * any background finalization to finish. We must also wait for the
      * background allocation to finish so we can avoid taking the GC lock
      * when manipulating the chunks during the GC.
      */
     {
         gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_WAIT_BACKGROUND_THREAD);
         rt->gcHelperThread.waitBackgroundSweepOrAllocEnd();
     }
-#endif
 
     bool startBackgroundSweep = false;
     {
         AutoUnlockGC unlock(rt);
 
         if (!incremental) {
             /* If non-incremental GC was requested, reset incremental GC. */
             ResetIncrementalGC(rt, "requested");
@@ -3728,20 +3741,18 @@ GCCycle(JSRuntime *rt, bool incremental,
             for (CompartmentsIter c(rt); !c.done(); c.next())
                 JS_ASSERT(!c->needsBarrier());
         }
 #endif
         if (shouldSweep)
             SweepPhase(rt, gckind, &startBackgroundSweep);
     }
 
-#ifdef JS_THREADSAFE
     if (startBackgroundSweep)
         rt->gcHelperThread.startBackgroundSweep(gckind == GC_SHRINK);
-#endif
 }
 
 #ifdef JS_GC_ZEAL
 static bool
 IsDeterministicGCReason(gcreason::Reason reason)
 {
     if (reason > gcreason::DEBUG_GC && reason != gcreason::CC_FORCED)
         return false;
@@ -3957,19 +3968,17 @@ IterateCompartmentsArenasCells(JSRuntime
                                JSIterateCompartmentCallback compartmentCallback,
                                IterateArenaCallback arenaCallback,
                                IterateCellCallback cellCallback)
 {
     JS_ASSERT(!rt->gcRunning);
 
     AutoLockGC lock(rt);
     AutoHeapSession session(rt);
-#ifdef JS_THREADSAFE
     rt->gcHelperThread.waitBackgroundSweepEnd();
-#endif
     AutoUnlockGC unlock(rt);
 
     AutoCopyFreeListToArenas copy(rt);
     for (CompartmentsIter c(rt); !c.done(); c.next()) {
         (*compartmentCallback)(rt, data, c);
 
         for (size_t thingKind = 0; thingKind != FINALIZE_LIMIT; thingKind++) {
             JSGCTraceKind traceKind = MapAllocToTraceKind(AllocKind(thingKind));
@@ -3984,37 +3993,33 @@ IterateCompartmentsArenasCells(JSRuntime
 void
 IterateChunks(JSRuntime *rt, void *data, IterateChunkCallback chunkCallback)
 {
     /* :XXX: Any way to common this preamble with IterateCompartmentsArenasCells? */
     JS_ASSERT(!rt->gcRunning);
 
     AutoLockGC lock(rt);
     AutoHeapSession session(rt);
-#ifdef JS_THREADSAFE
     rt->gcHelperThread.waitBackgroundSweepEnd();
-#endif
     AutoUnlockGC unlock(rt);
 
     for (js::GCChunkSet::Range r = rt->gcChunkSet.all(); !r.empty(); r.popFront())
         chunkCallback(rt, data, r.front());
 }
 
 void
 IterateCells(JSRuntime *rt, JSCompartment *compartment, AllocKind thingKind,
              void *data, IterateCellCallback cellCallback)
 {
     /* :XXX: Any way to common this preamble with IterateCompartmentsArenasCells? */
     JS_ASSERT(!rt->gcRunning);
 
     AutoLockGC lock(rt);
     AutoHeapSession session(rt);
-#ifdef JS_THREADSAFE
     rt->gcHelperThread.waitBackgroundSweepEnd();
-#endif
     AutoUnlockGC unlock(rt);
 
     AutoCopyFreeListToArenas copy(rt);
 
     JSGCTraceKind traceKind = MapAllocToTraceKind(thingKind);
     size_t thingSize = Arena::thingSize(thingKind);
 
     if (compartment) {
@@ -4342,19 +4347,17 @@ StartVerifyBarriers(JSRuntime *rt)
         return;
 
     AutoLockGC lock(rt);
     AutoHeapSession session(rt);
 
     if (!IsIncrementalGCSafe(rt))
         return;
 
-#ifdef JS_THREADSAFE
     rt->gcHelperThread.waitBackgroundSweepOrAllocEnd();
-#endif
 
     AutoUnlockGC unlock(rt);
 
     AutoCopyFreeListToArenas copy(rt);
     RecordNativeStackTopForGC(rt);
 
     for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
         r.front()->bitmap.clear();
@@ -4471,19 +4474,17 @@ AssertMarkedOrAllocated(const EdgeValue 
 }
 
 static void
 EndVerifyBarriers(JSRuntime *rt)
 {
     AutoLockGC lock(rt);
     AutoHeapSession session(rt);
 
-#ifdef JS_THREADSAFE
     rt->gcHelperThread.waitBackgroundSweepOrAllocEnd();
-#endif
 
     AutoUnlockGC unlock(rt);
 
     AutoCopyFreeListToArenas copy(rt);
     RecordNativeStackTopForGC(rt);
 
     VerifyTracer *trc = (VerifyTracer *)rt->gcVerifyData;
 
@@ -4696,19 +4697,17 @@ PurgePCCounts(JSContext *cx)
 JS_PUBLIC_API(void)
 JS_IterateCompartments(JSRuntime *rt, void *data,
                        JSIterateCompartmentCallback compartmentCallback)
 {
     JS_ASSERT(!rt->gcRunning);
 
     AutoLockGC lock(rt);
     AutoHeapSession session(rt);
-#ifdef JS_THREADSAFE
     rt->gcHelperThread.waitBackgroundSweepOrAllocEnd();
-#endif
     AutoUnlockGC unlock(rt);
 
     for (CompartmentsIter c(rt); !c.done(); c.next())
         (*compartmentCallback)(rt, data, c);
 }
 
 #if JS_HAS_XML_SUPPORT
 extern size_t sE4XObjectsCreated;
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -154,17 +154,16 @@ struct ArenaLists {
      * update the arena header after the initial allocation. When starting the
      * GC we only move the head of the of the list of spans back to the arena
      * only for the arena that was not fully allocated.
      */
     FreeSpan       freeLists[FINALIZE_LIMIT];
 
     ArenaList      arenaLists[FINALIZE_LIMIT];
 
-#ifdef JS_THREADSAFE
     /*
      * The background finalization adds the finalized arenas to the list at
      * the *cursor position. backgroundFinalizeState controls the interaction
      * between the GC lock and the access to the list from the allocation
      * thread.
      *
      * BFS_DONE indicates that the finalizations is not running or cannot
      * affect this arena list. The allocation thread can access the list
@@ -178,37 +177,32 @@ struct ArenaLists {
      */
     enum BackgroundFinalizeState {
         BFS_DONE,
         BFS_RUN,
         BFS_JUST_FINISHED
     };
 
     volatile uintptr_t backgroundFinalizeState[FINALIZE_LIMIT];
-#endif
 
   public:
     ArenaLists() {
         for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
             freeLists[i].initAsEmpty();
-#ifdef JS_THREADSAFE
         for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
             backgroundFinalizeState[i] = BFS_DONE;
-#endif
     }
 
     ~ArenaLists() {
         for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
-#ifdef JS_THREADSAFE
             /*
              * We can only call this during the shutdown after the last GC when
              * the background finalization is disabled.
              */
             JS_ASSERT(backgroundFinalizeState[i] == BFS_DONE);
-#endif
             ArenaHeader **headp = &arenaLists[i].head;
             while (ArenaHeader *aheader = *headp) {
                 *headp = aheader->next;
                 aheader->chunk()->releaseArena(aheader);
             }
         }
     }
 
@@ -217,49 +211,43 @@ struct ArenaLists {
     }
 
     ArenaHeader *getFirstArena(AllocKind thingKind) const {
         return arenaLists[thingKind].head;
     }
 
     bool arenaListsAreEmpty() const {
         for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
-#ifdef JS_THREADSAFE
             /*
              * The arena cannot be empty if the background finalization is not yet
              * done.
              */
             if (backgroundFinalizeState[i] != BFS_DONE)
                 return false;
-#endif
             if (arenaLists[i].head)
                 return false;
         }
         return true;
     }
 
     void unmarkAll() {
         for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
-# ifdef JS_THREADSAFE
             /* The background finalization must have stopped at this point. */
             JS_ASSERT(backgroundFinalizeState[i] == BFS_DONE ||
                       backgroundFinalizeState[i] == BFS_JUST_FINISHED);
-# endif
             for (ArenaHeader *aheader = arenaLists[i].head; aheader; aheader = aheader->next) {
                 uintptr_t *word = aheader->chunk()->bitmap.arenaBits(aheader);
                 memset(word, 0, ArenaBitmapWords * sizeof(uintptr_t));
             }
         }
     }
 
-#ifdef JS_THREADSAFE
     bool doneBackgroundFinalize(AllocKind kind) const {
         return backgroundFinalizeState[kind] == BFS_DONE;
     }
-#endif
 
     /*
      * Return the free list back to the arena so the GC finalization will not
      * run the finalizers over unitialized bytes from free things.
      */
     void purge() {
         for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
             FreeSpan *headSpan = &freeLists[i];
@@ -348,19 +336,17 @@ struct ArenaLists {
         JS_ASSERT(freeLists[kind].isEmpty());
     }
 
     void finalizeObjects(FreeOp *fop);
     void finalizeStrings(FreeOp *fop);
     void finalizeShapes(FreeOp *fop);
     void finalizeScripts(FreeOp *fop);
 
-#ifdef JS_THREADSAFE
     static void backgroundFinalize(FreeOp *fop, ArenaHeader *listHead);
-#endif
 
   private:
     inline void finalizeNow(FreeOp *fop, AllocKind thingKind);
     inline void finalizeLater(FreeOp *fop, AllocKind thingKind);
 
     inline void *allocateFromArena(JSCompartment *comp, AllocKind thingKind);
 };
 
@@ -502,18 +488,24 @@ PrepareForDebugGC(JSRuntime *rt);
 
 } /* namespace js */
 
 namespace js {
 
 void
 InitTracer(JSTracer *trc, JSRuntime *rt, JSTraceCallback callback);
 
-#ifdef JS_THREADSAFE
-
+/*
+ * Helper that implements sweeping and allocation for kinds that can be swept
+ * and allocated off the main thread.
+ *
+ * In non-threadsafe builds, all actual sweeping and allocation is performed
+ * on the main thread, but GCHelperThread encapsulates this from clients as
+ * much as possible.
+ */
 class GCHelperThread {
     enum State {
         IDLE,
         SWEEPING,
         ALLOCATING,
         CANCEL_ALLOCATION,
         SHUTDOWN
     };
@@ -630,17 +622,16 @@ class GCHelperThread {
         else
             replenishAndFreeLater(ptr);
     }
 
     /* Must be called with the GC lock taken. */
     bool prepareForBackgroundSweep();
 };
 
-#endif /* JS_THREADSAFE */
 
 struct GCChunkHasher {
     typedef gc::Chunk *Lookup;
 
     /*
      * Strip zeros for better distribution after multiplying by the golden
      * ratio.
      */
--- a/js/src/jsgcinlines.h
+++ b/js/src/jsgcinlines.h
@@ -393,20 +393,18 @@ class GCCompartmentsIter {
  * in the partially initialized thing.
  */
 
 template <typename T>
 inline T *
 NewGCThing(JSContext *cx, js::gc::AllocKind kind, size_t thingSize)
 {
     JS_ASSERT(thingSize == js::gc::Arena::thingSize(kind));
-#ifdef JS_THREADSAFE
-    JS_ASSERT_IF((cx->compartment == cx->runtime->atomsCompartment),
+    JS_ASSERT_IF(cx->compartment == cx->runtime->atomsCompartment,
                  kind == js::gc::FINALIZE_STRING || kind == js::gc::FINALIZE_SHORT_STRING);
-#endif
     JS_ASSERT(!cx->runtime->gcRunning);
     JS_ASSERT(!cx->runtime->noGCOrAllocationCheck);
 
     /* For testing out of memory conditions */
     JS_OOM_POSSIBLY_FAIL_REPORT(cx);
 
 #ifdef JS_GC_ZEAL
     if (cx->runtime->needZealousGC())
@@ -426,20 +424,18 @@ NewGCThing(JSContext *cx, js::gc::AllocK
 }
 
 /* Alternate form which allocates a GC thing if doing so cannot trigger a GC. */
 template <typename T>
 inline T *
 TryNewGCThing(JSContext *cx, js::gc::AllocKind kind, size_t thingSize)
 {
     JS_ASSERT(thingSize == js::gc::Arena::thingSize(kind));
-#ifdef JS_THREADSAFE
-    JS_ASSERT_IF((cx->compartment == cx->runtime->atomsCompartment),
+    JS_ASSERT_IF(cx->compartment == cx->runtime->atomsCompartment,
                  kind == js::gc::FINALIZE_STRING || kind == js::gc::FINALIZE_SHORT_STRING);
-#endif
     JS_ASSERT(!cx->runtime->gcRunning);
     JS_ASSERT(!cx->runtime->noGCOrAllocationCheck);
 
 #ifdef JS_GC_ZEAL
     if (cx->runtime->needZealousGC())
         return NULL;
 #endif
 
--- a/js/src/jslock.h
+++ b/js/src/jslock.h
@@ -18,16 +18,20 @@
 
 # define JS_ATOMIC_INCREMENT(p)      PR_ATOMIC_INCREMENT((PRInt32 *)(p))
 # define JS_ATOMIC_DECREMENT(p)      PR_ATOMIC_DECREMENT((PRInt32 *)(p))
 # define JS_ATOMIC_ADD(p,v)          PR_ATOMIC_ADD((PRInt32 *)(p), (PRInt32)(v))
 # define JS_ATOMIC_SET(p,v)          PR_ATOMIC_SET((PRInt32 *)(p), (PRInt32)(v))
 
 #else  /* JS_THREADSAFE */
 
+typedef struct PRThread PRThread;
+typedef struct PRCondVar PRCondVar;
+typedef struct PRLock PRLock;
+
 # define JS_ATOMIC_INCREMENT(p)      (++*(p))
 # define JS_ATOMIC_DECREMENT(p)      (--*(p))
 # define JS_ATOMIC_ADD(p,v)          (*(p) += (v))
 # define JS_ATOMIC_SET(p,v)          (*(p) = (v))
 
 #endif /* JS_THREADSAFE */
 
 namespace js {
--- a/js/src/jsobjinlines.h
+++ b/js/src/jsobjinlines.h
@@ -1350,29 +1350,25 @@ NewObjectCache::copyCachedToObject(JSObj
     Shape::writeBarrierPost(dst->shape_, &dst->shape_);
     types::TypeObject::writeBarrierPost(dst->type_, &dst->type_);
 #endif
 }
 
 static inline bool
 CanBeFinalizedInBackground(gc::AllocKind kind, Class *clasp)
 {
-#ifdef JS_THREADSAFE
     JS_ASSERT(kind <= gc::FINALIZE_OBJECT_LAST);
     /* If the class has no finalizer or a finalizer that is safe to call on
      * a different thread, we change the finalize kind. For example,
      * FINALIZE_OBJECT0 calls the finalizer on the main thread,
      * FINALIZE_OBJECT0_BACKGROUND calls the finalizer on the gcHelperThread.
      * IsBackgroundAllocKind is called to prevent recursively incrementing
      * the finalize kind; kind may already be a background finalize kind.
      */
-    if (!gc::IsBackgroundAllocKind(kind) && !clasp->finalize)
-        return true;
-#endif
-    return false;
+    return (!gc::IsBackgroundAllocKind(kind) && !clasp->finalize);
 }
 
 /*
  * Make an object with the specified prototype. If parent is null, it will
  * default to the prototype's global if the prototype is non-null.
  */
 JSObject *
 NewObjectWithGivenProto(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent,
@@ -1473,19 +1469,17 @@ NewObjectWithType(JSContext *cx, HandleT
 /* Make an object with pregenerated shape from a NEWOBJECT bytecode. */
 static inline JSObject *
 CopyInitializerObject(JSContext *cx, HandleObject baseobj)
 {
     JS_ASSERT(baseobj->getClass() == &ObjectClass);
     JS_ASSERT(!baseobj->inDictionaryMode());
 
     gc::AllocKind kind = gc::GetGCObjectFixedSlotsKind(baseobj->numFixedSlots());
-#ifdef JS_THREADSAFE
     kind = gc::GetBackgroundAllocKind(kind);
-#endif
     JS_ASSERT(kind == baseobj->getAllocKind());
     JSObject *obj = NewBuiltinClassInstance(cx, &ObjectClass, kind);
 
     if (!obj)
         return NULL;
 
     if (!obj->setLastProperty(cx, baseobj->lastProperty()))
         return NULL;
--- a/js/src/jstypedarray.cpp
+++ b/js/src/jstypedarray.cpp
@@ -249,22 +249,17 @@ DelegateObject(JSContext *cx, HandleObje
 JSObject *
 ArrayBufferObject::create(JSContext *cx, uint32_t nbytes, uint8_t *contents)
 {
     SkipRoot skip(cx, &contents);
 
     RootedObject obj(cx, NewBuiltinClassInstance(cx, &ArrayBufferObject::protoClass));
     if (!obj)
         return NULL;
-#ifdef JS_THREADSAFE
     JS_ASSERT(obj->getAllocKind() == gc::FINALIZE_OBJECT16_BACKGROUND);
-#else
-    JS_ASSERT(obj->getAllocKind() == gc::FINALIZE_OBJECT16);
-#endif
-
     JS_ASSERT(obj->getClass() == &ArrayBufferObject::protoClass);
 
     js::Shape *empty = EmptyShape::getInitialShape(cx, &ArrayBufferClass,
                                                    obj->getProto(), obj->getParent(),
                                                    gc::FINALIZE_OBJECT16);
     if (!empty)
         return NULL;
     obj->setLastPropertyInfallible(empty);
@@ -1392,21 +1387,17 @@ class TypedArrayTemplate
 
     static JSObject *
     makeInstance(JSContext *cx, HandleObject bufobj, uint32_t byteOffset, uint32_t len,
                  HandleObject proto)
     {
         RootedObject obj(cx, NewBuiltinClassInstance(cx, protoClass()));
         if (!obj)
             return NULL;
-#ifdef JS_THREADSAFE
         JS_ASSERT(obj->getAllocKind() == gc::FINALIZE_OBJECT8_BACKGROUND);
-#else
-        JS_ASSERT(obj->getAllocKind() == gc::FINALIZE_OBJECT8);
-#endif
 
         types::TypeObject *type;
         if (proto) {
             type = proto->getNewType(cx);
         } else {
             /*
              * Specialize the type of the object on the current scripted location,
              * and mark the type as definitely a typed array.
--- a/js/src/vm/ScopeObject.cpp
+++ b/js/src/vm/ScopeObject.cpp
@@ -91,20 +91,18 @@ CallObject *
 CallObject::create(JSContext *cx, JSScript *script, HandleObject enclosing, HandleFunction callee)
 {
     RootedShape shape(cx);
     shape = script->bindings.callObjectShape(cx);
     if (shape == NULL)
         return NULL;
 
     gc::AllocKind kind = gc::GetGCObjectKind(shape->numFixedSlots());
-#ifdef JS_THREADSAFE
     JS_ASSERT(CanBeFinalizedInBackground(kind, &CallClass));
     kind = gc::GetBackgroundAllocKind(kind);
-#endif
 
     RootedTypeObject type(cx);
     type = cx->compartment->getEmptyType(cx);
     if (!type)
         return NULL;
 
     HeapSlot *slots;
     if (!PreallocateObjectDynamicSlots(cx, shape, &slots))
--- a/js/src/vm/ScopeObject.h
+++ b/js/src/vm/ScopeObject.h
@@ -196,41 +196,33 @@ class WithObject : public NestedScopeObj
 {
     static const unsigned THIS_SLOT = 2;
 
     /* Use WithObject::object() instead. */
     JSObject *getProto() const;
 
   public:
     static const unsigned RESERVED_SLOTS = 3;
-#ifdef JS_THREADSAFE
     static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4_BACKGROUND;
-#else
-    static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4;
-#endif
 
     static WithObject *
     create(JSContext *cx, HandleObject proto, HandleObject enclosing, uint32_t depth);
 
     /* Return object for the 'this' class hook. */
     JSObject &withThis() const;
 
     /* Return the 'o' in 'with (o)'. */
     JSObject &object() const;
 };
 
 class BlockObject : public NestedScopeObject
 {
   public:
     static const unsigned RESERVED_SLOTS = CALL_BLOCK_RESERVED_SLOTS;
-#ifdef JS_THREADSAFE
     static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4_BACKGROUND;
-#else
-    static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4;
-#endif
 
     /* Return the number of variables associated with this block. */
     inline uint32_t slotCount() const;
 
     /*
      * Return the local corresponding to the ith binding where i is in the
      * range [0, slotCount()) and the return local index is in the range
      * [script->nfixed, script->nfixed + script->nslots).