Bug 641025 - Incremental GC (r=igor,smaug,roc,cdleary,gregor)
authorBill McCloskey <wmccloskey@mozilla.com>
Fri, 17 Feb 2012 14:35:20 -0800
changeset 90019 2a8ceeb27f7c8a6a43ef01d55d827ddccb887e33
parent 90018 2e89173cb52e89af2d6661d6d929d9359302a772
child 90020 50aae34555bb0032ccfffe4a2b362a1ee6f703e9
push idunknown
push userunknown
push dateunknown
reviewersigor, smaug, roc, cdleary, gregor
bugs641025
milestone13.0a1
Bug 641025 - Incremental GC (r=igor,smaug,roc,cdleary,gregor)
content/media/test/Makefile.in
dom/base/nsDOMWindowUtils.cpp
dom/base/nsJSEnvironment.cpp
dom/base/nsJSEnvironment.h
dom/interfaces/base/nsIDOMWindowUtils.idl
dom/plugins/base/nsJSNPRuntime.cpp
dom/src/events/nsJSEventListener.cpp
dom/workers/ListenerManager.cpp
dom/workers/Worker.cpp
dom/workers/WorkerScope.cpp
dom/workers/XMLHttpRequest.cpp
js/jsd/jsd_xpc.cpp
js/src/Makefile.in
js/src/builtin/MapObject.cpp
js/src/ctypes/CTypes.cpp
js/src/gc/Barrier-inl.h
js/src/gc/Barrier.h
js/src/gc/Statistics.cpp
js/src/gc/Statistics.h
js/src/jsapi.cpp
js/src/jsapi.h
js/src/jscntxt.cpp
js/src/jscntxt.h
js/src/jscompartment.cpp
js/src/jscompartment.h
js/src/jsexn.cpp
js/src/jsfriendapi.cpp
js/src/jsfriendapi.h
js/src/jsfun.cpp
js/src/jsgc.cpp
js/src/jsgc.h
js/src/jsgcinlines.h
js/src/jsgcmark.cpp
js/src/jsgcmark.h
js/src/jsinfer.cpp
js/src/jsinterp.cpp
js/src/jsiter.cpp
js/src/jsobj.cpp
js/src/jsobj.h
js/src/jsobjinlines.h
js/src/jspropertycache.cpp
js/src/jsproxy.cpp
js/src/jstypedarray.cpp
js/src/jsweakmap.cpp
js/src/jsweakmap.h
js/src/jsxml.cpp
js/src/methodjit/Compiler.cpp
js/src/methodjit/Compiler.h
js/src/methodjit/MethodJIT.h
js/src/methodjit/PolyIC.cpp
js/src/methodjit/StubCalls.cpp
js/src/shell/js.cpp
js/src/vm/Debugger.cpp
js/src/vm/RegExpObject-inl.h
js/src/vm/RegExpObject.cpp
js/src/vm/RegExpObject.h
js/src/vm/RegExpStatics.cpp
js/src/vm/Stack.cpp
js/src/vm/Stack.h
js/xpconnect/idl/nsIXPConnect.idl
js/xpconnect/src/XPCInlines.h
js/xpconnect/src/XPCJSRuntime.cpp
js/xpconnect/src/nsXPConnect.cpp
js/xpconnect/src/xpcprivate.h
js/xpconnect/src/xpcpublic.h
layout/base/nsPresShell.cpp
modules/libpref/src/init/all.js
toolkit/components/telemetry/TelemetryHistograms.h
toolkit/content/aboutSupport.js
toolkit/content/aboutSupport.xhtml
toolkit/locales/en-US/chrome/global/aboutSupport.dtd
--- a/content/media/test/Makefile.in
+++ b/content/media/test/Makefile.in
@@ -104,17 +104,16 @@ include $(topsrcdir)/config/rules.mk
 		test_bug448534.html \
 		test_bug463162.xhtml \
 		test_bug465498.html \
 		test_bug493187.html \
 		test_bug495145.html \
 		test_bug495300.html \
 		test_bug686942.html \
 		test_can_play_type.html \
-		test_closing_connections.html \
 		test_constants.html \
 		test_controls.html \
 		test_currentTime.html \
 		test_decode_error.html \
 		test_decoder_disable.html \
 		test_defaultMuted.html \
 		test_delay_load.html \
 		test_error_on_404.html \
@@ -171,16 +170,18 @@ endif
 # Bug 492821:
 #   test_videoDocumentTitle.html
 # Bug 493692:
 #   test_preload_suspend.html
 # Bug 567954 and Bug 574586:
 #   test_mixed_principals.html
 # Disabled since we don't play Wave files standalone, for now
 #		test_audioDocumentTitle.html
+# Bug 634564:
+#		test_closing_connections.html \
 
 # sample files
 _TEST_FILES += \
 		320x240.ogv \
 		448636.ogv \
 		audio-overhang.ogg \
 		audio-gaps.ogg \
 		beta-phrasebook.ogg \
--- a/dom/base/nsDOMWindowUtils.cpp
+++ b/dom/base/nsDOMWindowUtils.cpp
@@ -2087,16 +2087,23 @@ nsDOMWindowUtils::GetFileReferences(cons
   }
 
   *aRefCnt = *aDBRefCnt = *aSliceRefCnt = -1;
   *aResult = false;
   return NS_OK;
 }
 
 NS_IMETHODIMP
+nsDOMWindowUtils::IsIncrementalGCEnabled(JSContext* cx, bool* aResult)
+{
+  *aResult = js::IsIncrementalGCEnabled(JS_GetRuntime(cx));
+  return NS_OK;
+}
+
+NS_IMETHODIMP
 nsDOMWindowUtils::StartPCCountProfiling(JSContext* cx)
 {
   js::StartPCCountProfiling(cx);
   return NS_OK;
 }
 
 NS_IMETHODIMP
 nsDOMWindowUtils::StopPCCountProfiling(JSContext* cx)
--- a/dom/base/nsJSEnvironment.cpp
+++ b/dom/base/nsJSEnvironment.cpp
@@ -130,16 +130,19 @@ static PRLogModuleInfo* gJSDiagnostics;
 #define NS_GC_DELAY                 4000 // ms
 
 #define NS_SHRINK_GC_BUFFERS_DELAY  4000 // ms
 
 // The amount of time we wait from the first request to GC to actually
 // doing the first GC.
 #define NS_FIRST_GC_DELAY           10000 // ms
 
+// Maximum amount of time that should elapse between incremental GC slices
+#define NS_INTERSLICE_GC_DELAY      100 // ms
+
 // The amount of time we wait between a request to CC (after GC ran)
 // and doing the actual CC.
 #define NS_CC_DELAY                 5000 // ms
 
 #define NS_CC_SKIPPABLE_DELAY       250 // ms
 
 #define NS_CC_FORCED                (5 * 60 * PR_USEC_PER_SEC) // 5 min
 
@@ -149,16 +152,19 @@ static PRLogModuleInfo* gJSDiagnostics;
 
 static nsITimer *sGCTimer;
 static nsITimer *sShrinkGCBuffersTimer;
 static nsITimer *sCCTimer;
 
 static PRTime sLastCCEndTime;
 
 static bool sGCHasRun;
+static bool sCCLockedOut;
+
+static js::GCSliceCallback sPrevGCSliceCallback;
 
 // The number of currently pending document loads. This count isn't
 // guaranteed to always reflect reality and can't easily as we don't
 // have an easy place to know when a load ends or is interrupted in
 // all cases. This counter also gets reset if we end up GC'ing while
 // we're waiting for a slow page to load. IOW, this count may be 0
 // even when there are pending loads.
 static PRUint32 sPendingLoadCount;
@@ -3269,16 +3275,21 @@ nsJSContext::ShrinkGCBuffersNow()
 void
 nsJSContext::CycleCollectNow(nsICycleCollectorListener *aListener,
                              PRInt32 aExtraForgetSkippableCalls)
 {
   if (!NS_IsMainThread()) {
     return;
   }
 
+  if (sCCLockedOut) {
+    // We're in the middle of an incremental GC; finish it first
+    nsJSContext::GarbageCollectNow(js::gcreason::CC_FORCED, nsGCNormal);
+  }
+
   SAMPLE_LABEL("GC", "CycleCollectNow");
   NS_TIME_FUNCTION_MIN(1.0);
 
   KillCCTimer();
 
   PRTime start = PR_Now();
 
   PRUint32 suspected = nsCycleCollector_suspectedCount();
@@ -3352,17 +3363,17 @@ nsJSContext::CycleCollectNow(nsICycleCol
 
 // static
 void
 GCTimerFired(nsITimer *aTimer, void *aClosure)
 {
   NS_RELEASE(sGCTimer);
 
   uintptr_t reason = reinterpret_cast<uintptr_t>(aClosure);
-  nsJSContext::GarbageCollectNow(static_cast<js::gcreason::Reason>(reason), nsGCNormal);
+  nsJSContext::GarbageCollectNow(static_cast<js::gcreason::Reason>(reason), nsGCIncremental);
 }
 
 void
 ShrinkGCBuffersTimerFired(nsITimer *aTimer, void *aClosure)
 {
   NS_RELEASE(sShrinkGCBuffersTimer);
 
   nsJSContext::ShrinkGCBuffersNow();
@@ -3370,16 +3381,19 @@ ShrinkGCBuffersTimerFired(nsITimer *aTim
 
 // static
 void
 CCTimerFired(nsITimer *aTimer, void *aClosure)
 {
   if (sDidShutdown) {
     return;
   }
+  if (sCCLockedOut) {
+    return;
+  }
   ++sCCTimerFireCount;
   if (sCCTimerFireCount < (NS_CC_DELAY / NS_CC_SKIPPABLE_DELAY)) {
     PRUint32 suspected = nsCycleCollector_suspectedCount();
     if ((sPreviousSuspectedCount + 100) > suspected) {
       // Just few new suspected objects, return early.
       return;
     }
     
@@ -3438,36 +3452,38 @@ nsJSContext::LoadEnd()
 
   // Its probably a good idea to GC soon since we have finished loading.
   sLoadingInProgress = false;
   PokeGC(js::gcreason::LOAD_END);
 }
 
 // static
 void
-nsJSContext::PokeGC(js::gcreason::Reason aReason)
+nsJSContext::PokeGC(js::gcreason::Reason aReason, int aDelay)
 {
   if (sGCTimer) {
     // There's already a timer for GC'ing, just return
     return;
   }
 
   CallCreateInstance("@mozilla.org/timer;1", &sGCTimer);
 
   if (!sGCTimer) {
     // Failed to create timer (probably because we're in XPCOM shutdown)
     return;
   }
 
   static bool first = true;
 
   sGCTimer->InitWithFuncCallback(GCTimerFired, reinterpret_cast<void *>(aReason),
-                                 first
-                                 ? NS_FIRST_GC_DELAY
-                                 : NS_GC_DELAY,
+                                 aDelay
+                                 ? aDelay
+                                 : (first
+                                    ? NS_FIRST_GC_DELAY
+                                    : NS_GC_DELAY),
                                  nsITimer::TYPE_ONE_SHOT);
 
   first = false;
 }
 
 // static
 void
 nsJSContext::PokeShrinkGCBuffers()
@@ -3544,71 +3560,92 @@ nsJSContext::KillCCTimer()
 
 void
 nsJSContext::GC(js::gcreason::Reason aReason)
 {
   PokeGC(aReason);
 }
 
 static void
-DOMGCFinishedCallback(JSRuntime *rt, JSCompartment *comp, const char *status)
+DOMGCSliceCallback(JSRuntime *aRt, js::GCProgress aProgress, const js::GCDescription &aDesc)
 {
   NS_ASSERTION(NS_IsMainThread(), "GCs must run on the main thread");
 
-  if (sPostGCEventsToConsole) {
+  if (aDesc.logMessage && sPostGCEventsToConsole) {
     PRTime now = PR_Now();
     PRTime delta = 0;
     if (sFirstCollectionTime) {
       delta = now - sFirstCollectionTime;
     } else {
       sFirstCollectionTime = now;
     }
 
     NS_NAMED_LITERAL_STRING(kFmt, "GC(T+%.1f) %s");
     nsString msg;
     msg.Adopt(nsTextFormatter::smprintf(kFmt.get(),
-                                        double(delta) / PR_USEC_PER_SEC, status));
+                                        double(delta) / PR_USEC_PER_SEC,
+                                        aDesc.logMessage));
     nsCOMPtr<nsIConsoleService> cs = do_GetService(NS_CONSOLESERVICE_CONTRACTID);
     if (cs) {
       cs->LogStringMessage(msg.get());
     }
   }
 
-  sCCollectedWaitingForGC = 0;
-  sCleanupSinceLastGC = false;
-
-  if (sGCTimer) {
-    // If we were waiting for a GC to happen, kill the timer.
+  // Prevent cycle collections during incremental GC.
+  if (aProgress == js::GC_CYCLE_BEGIN) {
+    sCCLockedOut = true;
+  } else if (aProgress == js::GC_CYCLE_END) {
+    sCCLockedOut = false;
+  }
+
+  // The GC has more work to do, so schedule another GC slice.
+  if (aProgress == js::GC_SLICE_END) {
     nsJSContext::KillGCTimer();
-
-    // If this is a compartment GC, restart it. We still want
-    // a full GC to happen. Compartment GCs usually happen as a
-    // result of last-ditch or MaybeGC. In both cases its
-    // probably a time of heavy activity and we want to delay
-    // the full GC, but we do want it to happen eventually.
-    if (comp) {
-      nsJSContext::PokeGC(js::gcreason::POST_COMPARTMENT);
-
-      // We poked the GC, so we can kill any pending CC here.
-      nsJSContext::KillCCTimer();
+    nsJSContext::KillCCTimer();
+
+    nsJSContext::PokeGC(js::gcreason::INTER_SLICE_GC, NS_INTERSLICE_GC_DELAY);
+  }
+
+  if (aProgress == js::GC_CYCLE_END) {
+    sCCollectedWaitingForGC = 0;
+    sCleanupSinceLastGC = false;
+
+    if (sGCTimer) {
+      // If we were waiting for a GC to happen, kill the timer.
+      nsJSContext::KillGCTimer();
+
+      // If this is a compartment GC, restart it. We still want
+      // a full GC to happen. Compartment GCs usually happen as a
+      // result of last-ditch or MaybeGC. In both cases its
+      // probably a time of heavy activity and we want to delay
+      // the full GC, but we do want it to happen eventually.
+      if (aDesc.isCompartment) {
+        nsJSContext::PokeGC(js::gcreason::POST_COMPARTMENT);
+
+        // We poked the GC, so we can kill any pending CC here.
+        nsJSContext::KillCCTimer();
+      }
+    } else {
+      // If this was a full GC, poke the CC to run soon.
+      if (!aDesc.isCompartment) {
+        sGCHasRun = true;
+        nsJSContext::MaybePokeCC();
+      }
     }
-  } else {
-    // If this was a full GC, poke the CC to run soon.
-    if (!comp) {
-      sGCHasRun = true;
-      nsJSContext::MaybePokeCC();
+
+    // If we didn't end up scheduling a GC, make sure that we release GC buffers
+    // soon after canceling previous shrinking attempt.
+    nsJSContext::KillShrinkGCBuffersTimer();
+    if (!sGCTimer) {
+      nsJSContext::PokeShrinkGCBuffers();
     }
   }
 
-  // If we didn't end up scheduling a GC, make sure that we release GC buffers
-  // soon after canceling previous shrinking attempt 
-  nsJSContext::KillShrinkGCBuffersTimer();
-  if (!sGCTimer) {
-    nsJSContext::PokeShrinkGCBuffers();
-  }
+  if (sPrevGCSliceCallback)
+    (*sPrevGCSliceCallback)(aRt, aProgress, aDesc);
 }
 
 // Script object mananagement - note duplicate implementation
 // in nsJSRuntime below...
 nsresult
 nsJSContext::HoldScriptObject(void* aScriptObject)
 {
     NS_ASSERTION(sIsInitialized, "runtime not initialized");
@@ -3692,16 +3729,17 @@ nsJSRuntime::ParseVersion(const nsString
 
 //static
 void
 nsJSRuntime::Startup()
 {
   // initialize all our statics, so that we can restart XPCOM
   sGCTimer = sCCTimer = nsnull;
   sGCHasRun = false;
+  sCCLockedOut = false;
   sLastCCEndTime = 0;
   sPendingLoadCount = 0;
   sLoadingInProgress = false;
   sCCollectedWaitingForGC = 0;
   sPostGCEventsToConsole = false;
   gNameSpaceManager = nsnull;
   sRuntimeService = nsnull;
   sRuntime = nsnull;
@@ -3763,20 +3801,37 @@ SetMemoryMaxPrefChangedCallback(const ch
   PRUint32 max = (pref <= 0 || pref >= 0x1000) ? -1 : (PRUint32)pref * 1024 * 1024;
   JS_SetGCParameter(nsJSRuntime::sRuntime, JSGC_MAX_BYTES, max);
   return 0;
 }
 
 static int
 SetMemoryGCModePrefChangedCallback(const char* aPrefName, void* aClosure)
 {
-  bool enableCompartmentGC = Preferences::GetBool(aPrefName);
-  JS_SetGCParameter(nsJSRuntime::sRuntime, JSGC_MODE, enableCompartmentGC
-                                                      ? JSGC_MODE_COMPARTMENT
-                                                      : JSGC_MODE_GLOBAL);
+  PRBool enableCompartmentGC = Preferences::GetBool("javascript.options.mem.gc_per_compartment");
+  PRBool enableIncrementalGC = Preferences::GetBool("javascript.options.mem.gc_incremental");
+  JSGCMode mode;
+  if (enableIncrementalGC) {
+    mode = JSGC_MODE_INCREMENTAL;
+  } else if (enableCompartmentGC) {
+    mode = JSGC_MODE_COMPARTMENT;
+  } else {
+    mode = JSGC_MODE_GLOBAL;
+  }
+  JS_SetGCParameter(nsJSRuntime::sRuntime, JSGC_MODE, mode);
+  return 0;
+}
+
+static int
+SetMemoryGCSliceTimePrefChangedCallback(const char* aPrefName, void* aClosure)
+{
+  PRInt32 pref = Preferences::GetInt(aPrefName, -1);
+  // handle overflow and negative pref values
+  if (pref > 0 && pref < 100000)
+    JS_SetGCParameter(nsJSRuntime::sRuntime, JSGC_SLICE_TIME_BUDGET, pref);
   return 0;
 }
 
 static JSPrincipals *
 ObjectPrincipalFinder(JSContext *cx, JSObject *obj)
 {
   if (!sSecurityManager)
     return nsnull;
@@ -3853,17 +3908,17 @@ nsJSRuntime::Init()
   NS_ENSURE_SUCCESS(rv, rv);
 
   rv = sRuntimeService->GetRuntime(&sRuntime);
   NS_ENSURE_SUCCESS(rv, rv);
 
   // Let's make sure that our main thread is the same as the xpcom main thread.
   NS_ASSERTION(NS_IsMainThread(), "bad");
 
-  ::JS_SetGCFinishedCallback(sRuntime, DOMGCFinishedCallback);
+  sPrevGCSliceCallback = js::SetGCSliceCallback(sRuntime, DOMGCSliceCallback);
 
   JSSecurityCallbacks *callbacks = JS_GetRuntimeSecurityCallbacks(sRuntime);
   NS_ASSERTION(callbacks, "SecMan should have set security callbacks!");
 
   callbacks->findObjectPrincipals = ObjectPrincipalFinder;
 
   // Set up the structured clone callbacks.
   static JSStructuredCloneCallbacks cloneCallbacks = {
@@ -3898,16 +3953,26 @@ nsJSRuntime::Init()
   SetMemoryMaxPrefChangedCallback("javascript.options.mem.max",
                                   nsnull);
 
   Preferences::RegisterCallback(SetMemoryGCModePrefChangedCallback,
                                 "javascript.options.mem.gc_per_compartment");
   SetMemoryGCModePrefChangedCallback("javascript.options.mem.gc_per_compartment",
                                      nsnull);
 
+  Preferences::RegisterCallback(SetMemoryGCModePrefChangedCallback,
+                                "javascript.options.mem.gc_incremental");
+  SetMemoryGCModePrefChangedCallback("javascript.options.mem.gc_incremental",
+                                     nsnull);
+
+  Preferences::RegisterCallback(SetMemoryGCSliceTimePrefChangedCallback,
+                                "javascript.options.mem.gc_incremental_slice_ms");
+  SetMemoryGCSliceTimePrefChangedCallback("javascript.options.mem.gc_incremental_slice_ms",
+                                          nsnull);
+
   nsCOMPtr<nsIObserverService> obs = mozilla::services::GetObserverService();
   if (!obs)
     return NS_ERROR_FAILURE;
 
   Preferences::AddBoolVarCache(&sGCOnMemoryPressure,
                                "javascript.options.gc_on_memory_pressure",
                                true);
 
--- a/dom/base/nsJSEnvironment.h
+++ b/dom/base/nsJSEnvironment.h
@@ -183,17 +183,17 @@ public:
 
   static void GarbageCollectNow(js::gcreason::Reason reason, PRUint32 gckind = nsGCNormal);
   static void ShrinkGCBuffersNow();
   // If aExtraForgetSkippableCalls is -1, forgetSkippable won't be
   // called even if the previous collection was GC.
   static void CycleCollectNow(nsICycleCollectorListener *aListener = nsnull,
                               PRInt32 aExtraForgetSkippableCalls = 0);
 
-  static void PokeGC(js::gcreason::Reason aReason);
+  static void PokeGC(js::gcreason::Reason aReason, int aDelay = 0);
   static void KillGCTimer();
 
   static void PokeShrinkGCBuffers();
   static void KillShrinkGCBuffersTimer();
 
   static void MaybePokeCC();
   static void KillCCTimer();
 
--- a/dom/interfaces/base/nsIDOMWindowUtils.idl
+++ b/dom/interfaces/base/nsIDOMWindowUtils.idl
@@ -65,17 +65,17 @@ interface nsIDOMEvent;
 interface nsITransferable;
 interface nsIQueryContentEventResult;
 interface nsIDOMWindow;
 interface nsIDOMBlob;
 interface nsIDOMFile;
 interface nsIFile;
 interface nsIDOMTouch;
 
-[scriptable, uuid(ab6e9c71-8aa1-40bb-8bf9-65e16429055f)]
+[scriptable, uuid(73b48170-55d5-11e1-b86c-0800200c9a66)]
 interface nsIDOMWindowUtils : nsISupports {
 
   /**
    * Image animation mode of the window. When this attribute's value
    * is changed, the implementation should set all images in the window
    * to the given value. That is, when set to kDontAnimMode, all images
    * will stop animating. The attribute's value must be one of the
    * animationMode values from imgIContainer.
@@ -988,16 +988,22 @@ interface nsIDOMWindowUtils : nsISupport
    *
    */
   boolean getFileReferences(in AString aDatabaseName, in long long aId,
                             [optional] out long aRefCnt,
                             [optional] out long aDBRefCnt,
                             [optional] out long aSliceRefCnt);
 
   /**
+   * Return whether incremental GC has been disabled due to a binary add-on.
+   */
+  [implicit_jscontext]
+  boolean isIncrementalGCEnabled();
+
+  /**
    * Begin opcode-level profiling of all JavaScript execution in the window's
    * runtime.
    */
   [implicit_jscontext]
   void startPCCountProfiling();
 
   /**
    * Stop opcode-level profiling of JavaScript execution in the runtime, and
--- a/dom/plugins/base/nsJSNPRuntime.cpp
+++ b/dom/plugins/base/nsJSNPRuntime.cpp
@@ -174,17 +174,17 @@ NPObjWrapper_Construct(JSContext *cx, ui
 
 static JSBool
 CreateNPObjectMember(NPP npp, JSContext *cx, JSObject *obj, NPObject *npobj,
                      jsid id, NPVariant* getPropertyResult, jsval *vp);
 
 static JSClass sNPObjectJSWrapperClass =
   {
     NPRUNTIME_JSCLASS_NAME,
-    JSCLASS_HAS_PRIVATE | JSCLASS_NEW_RESOLVE | JSCLASS_NEW_ENUMERATE,
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_NEW_RESOLVE | JSCLASS_NEW_ENUMERATE,
     NPObjWrapper_AddProperty, NPObjWrapper_DelProperty,
     NPObjWrapper_GetProperty, NPObjWrapper_SetProperty,
     (JSEnumerateOp)NPObjWrapper_newEnumerate,
     (JSResolveOp)NPObjWrapper_NewResolve, NPObjWrapper_Convert,
     NPObjWrapper_Finalize, nsnull, nsnull, NPObjWrapper_Call,
     NPObjWrapper_Construct, nsnull, nsnull
   };
 
--- a/dom/src/events/nsJSEventListener.cpp
+++ b/dom/src/events/nsJSEventListener.cpp
@@ -228,16 +228,18 @@ nsJSEventListener::HandleEvent(nsIDOMEve
   JSContext* cx = nsnull;
   nsCOMPtr<nsIJSContextStack> stack =
     do_GetService("@mozilla.org/js/xpc/ContextStack;1");
   NS_ASSERTION(stack && NS_SUCCEEDED(stack->Peek(&cx)) && cx &&
                GetScriptContextFromJSContext(cx) == mContext,
                "JSEventListener has wrong script context?");
 #endif
   nsCOMPtr<nsIVariant> vrv;
+  xpc_UnmarkGrayObject(mScopeObject);
+  xpc_UnmarkGrayObject(mHandler);
   rv = mContext->CallEventHandler(mTarget, mScopeObject, mHandler, iargv,
                                   getter_AddRefs(vrv));
 
   if (NS_SUCCEEDED(rv)) {
     PRUint16 dataType = nsIDataType::VTYPE_VOID;
     if (vrv)
       vrv->GetDataType(&dataType);
 
--- a/dom/workers/ListenerManager.cpp
+++ b/dom/workers/ListenerManager.cpp
@@ -102,16 +102,19 @@ struct Listener : PRCList
     listener->mPhase = aPhase;
     listener->mWantsUntrusted = aWantsUntrusted;
     return listener;
   }
 
   static void
   Remove(JSContext* aCx, Listener* aListener)
   {
+    if (js::IsIncrementalBarrierNeeded(aCx))
+      js::IncrementalValueBarrier(aListener->mListenerVal);
+
     PR_REMOVE_LINK(aListener);
     JS_free(aCx, aListener);
   }
 
   jsval mListenerVal;
   ListenerManager::Phase mPhase;
   bool mWantsUntrusted;
 };
--- a/dom/workers/Worker.cpp
+++ b/dom/workers/Worker.cpp
@@ -295,17 +295,17 @@ private:
     }
 
     return worker->PostMessage(aCx, message);
   }
 };
 
 JSClass Worker::sClass = {
   "Worker",
-  JSCLASS_HAS_PRIVATE,
+  JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS,
   JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
   JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, Finalize, NULL, NULL, NULL,
   NULL, NULL, NULL, Trace, NULL
 };
 
 JSPropertySpec Worker::sProperties[] = {
   { sEventStrings[STRING_onerror], STRING_onerror, PROPERTY_FLAGS,
     GetEventListener, SetEventListener },
@@ -410,17 +410,17 @@ private:
     if (worker) {
       worker->TraceInstance(aTrc);
     }
   }
 };
 
 JSClass ChromeWorker::sClass = {
   "ChromeWorker",
-  JSCLASS_HAS_PRIVATE,
+  JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS,
   JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
   JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, Finalize, NULL, NULL, NULL,
   NULL, NULL, NULL, Trace, NULL
 };
 
 WorkerPrivate*
 Worker::GetInstancePrivate(JSContext* aCx, JSObject* aObj,
                            const char* aFunctionName)
--- a/dom/workers/WorkerScope.cpp
+++ b/dom/workers/WorkerScope.cpp
@@ -794,17 +794,17 @@ private:
     }
 
     return scope->mWorker->PostMessageToParent(aCx, message);
   }
 };
 
 JSClass DedicatedWorkerGlobalScope::sClass = {
   "DedicatedWorkerGlobalScope",
-  JSCLASS_GLOBAL_FLAGS | JSCLASS_HAS_PRIVATE | JSCLASS_NEW_RESOLVE,
+  JSCLASS_GLOBAL_FLAGS | JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_NEW_RESOLVE,
   JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
   JS_EnumerateStub, reinterpret_cast<JSResolveOp>(Resolve), JS_ConvertStub,
   Finalize, NULL, NULL, NULL, NULL, NULL, NULL, Trace, NULL
 };
 
 JSPropertySpec DedicatedWorkerGlobalScope::sProperties[] = {
   { sEventStrings[STRING_onmessage], STRING_onmessage, PROPERTY_FLAGS,
     GetEventListener, SetEventListener },
--- a/dom/workers/XMLHttpRequest.cpp
+++ b/dom/workers/XMLHttpRequest.cpp
@@ -215,17 +215,17 @@ private:
     }
 
     return priv->SetEventListenerOnEventTarget(aCx, name + 2, aVp);
   }
 };
 
 JSClass XMLHttpRequestUpload::sClass = {
   "XMLHttpRequestUpload",
-  JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(SLOT_COUNT),
+  JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(SLOT_COUNT),
   JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
   JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, Finalize,
   NULL, NULL, NULL, NULL, NULL, NULL, Trace, NULL
 };
 
 JSPropertySpec XMLHttpRequestUpload::sProperties[] = {
   { sEventStrings[STRING_onabort], STRING_onabort, PROPERTY_FLAGS,
     GetEventListener, SetEventListener },
@@ -764,17 +764,17 @@ private:
     }
 
     return priv->OverrideMimeType(aCx, mimeType);
   }
 };
 
 JSClass XMLHttpRequest::sClass = {
   "XMLHttpRequest",
-  JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(SLOT_COUNT),
+  JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(SLOT_COUNT),
   JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
   JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, Finalize,
   NULL, NULL, NULL, NULL, NULL, NULL, Trace, NULL
 };
 
 JSPropertySpec XMLHttpRequest::sProperties[] = {
 
 #define GENERIC_READONLY_PROPERTY(_name) \
--- a/js/jsd/jsd_xpc.cpp
+++ b/js/jsd/jsd_xpc.cpp
@@ -102,18 +102,18 @@
 #define NS_CATMAN_CTRID   "@mozilla.org/categorymanager;1"
 #define NS_JSRT_CTRID     "@mozilla.org/js/xpc/RuntimeService;1"
 
 #define AUTOREG_CATEGORY  "xpcom-autoregistration"
 #define APPSTART_CATEGORY "app-startup"
 #define JSD_AUTOREG_ENTRY "JSDebugger Startup Observer"
 #define JSD_STARTUP_ENTRY "JSDebugger Startup Observer"
 
-static JSBool
-jsds_GCCallbackProc (JSContext *cx, JSGCStatus status);
+static void
+jsds_GCSliceCallbackProc (JSRuntime *rt, js::GCProgress progress, const js::GCDescription &desc);
 
 /*******************************************************************************
  * global vars
  ******************************************************************************/
 
 const char implementationString[] = "Mozilla JavaScript Debugger Service";
 
 const char jsdServiceCtrID[] = "@mozilla.org/js/jsd/debugger-service;1";
@@ -123,19 +123,19 @@ const char jsdASObserverCtrID[] = "servi
 #ifdef DEBUG_verbose
 PRUint32 gScriptCount   = 0;
 PRUint32 gValueCount    = 0;
 PRUint32 gPropertyCount = 0;
 PRUint32 gContextCount  = 0;
 PRUint32 gFrameCount  = 0;
 #endif
 
-static jsdService   *gJsds       = 0;
-static JSGCCallback  gLastGCProc = jsds_GCCallbackProc;
-static JSGCStatus    gGCStatus   = JSGC_END;
+static jsdService          *gJsds               = 0;
+static js::GCSliceCallback gPrevGCSliceCallback = jsds_GCSliceCallbackProc;
+static bool                gGCRunning           = false;
 
 static struct DeadScript {
     PRCList     links;
     JSDContext *jsdc;
     jsdIScript *script;
 } *gDeadScripts = nsnull;
 
 enum PatternType {
@@ -455,21 +455,18 @@ jsds_FilterHook (JSDContext *jsdc, JSDTh
     
 }
 
 /*******************************************************************************
  * c callbacks
  *******************************************************************************/
 
 static void
-jsds_NotifyPendingDeadScripts (JSContext *cx)
+jsds_NotifyPendingDeadScripts (JSRuntime *rt)
 {
-#ifdef CAUTIOUS_SCRIPTHOOK
-    JSRuntime *rt = JS_GetRuntime(cx);
-#endif
     jsdService *jsds = gJsds;
 
     nsCOMPtr<jsdIScriptHook> hook;
     if (jsds) {
         NS_ADDREF(jsds);
         jsds->GetScriptHook (getter_AddRefs(hook));
         jsds->DoPause(nsnull, true);
     }
@@ -506,41 +503,33 @@ jsds_NotifyPendingDeadScripts (JSContext
     }
 
     if (jsds) {
         jsds->DoUnPause(nsnull, true);
         NS_RELEASE(jsds);
     }
 }
 
-static JSBool
-jsds_GCCallbackProc (JSContext *cx, JSGCStatus status)
+static void
+jsds_GCSliceCallbackProc (JSRuntime *rt, js::GCProgress progress, const js::GCDescription &desc)
 {
-#ifdef DEBUG_verbose
-    printf ("new gc status is %i\n", status);
-#endif
-    if (status == JSGC_END) {
-        /* just to guard against reentering. */
-        gGCStatus = JSGC_BEGIN;
+    if (progress == js::GC_CYCLE_END || progress == js::GC_SLICE_END) {
+        NS_ASSERTION(gGCRunning, "GC slice callback was missed");
+
         while (gDeadScripts)
-            jsds_NotifyPendingDeadScripts (cx);
+            jsds_NotifyPendingDeadScripts (rt);
+
+        gGCRunning = false;
+    } else {
+        NS_ASSERTION(!gGCRunning, "should not re-enter GC");
+        gGCRunning = true;
     }
 
-    gGCStatus = status;
-    if (gLastGCProc && !gLastGCProc (cx, status)) {
-        /*
-         * If gLastGCProc returns false, then the GC will abort without making
-         * another callback with status=JSGC_END, so set the status to JSGC_END
-         * here.
-         */
-        gGCStatus = JSGC_END;
-        return JS_FALSE;
-    }
-    
-    return JS_TRUE;
+    if (gPrevGCSliceCallback)
+        (*gPrevGCSliceCallback)(rt, progress, desc);
 }
 
 static uintN
 jsds_ErrorHookProc (JSDContext *jsdc, JSContext *cx, const char *message,
                     JSErrorReport *report, void *callerdata)
 {
     static bool running = false;
 
@@ -746,17 +735,17 @@ jsds_ScriptHookProc (JSDContext* jsdc, J
          * to remove the reference held in the JSDScript private data. */
         nsCOMPtr<jsdIScript> jsdis = 
             static_cast<jsdIScript *>(JSD_GetScriptPrivate(jsdscript));
         if (!jsdis)
             return;
 
         jsdis->Invalidate();
 
-        if (gGCStatus == JSGC_END) {
+        if (!gGCRunning) {
             nsCOMPtr<jsdIScriptHook> hook;
             gJsds->GetScriptHook(getter_AddRefs(hook));
             if (!hook)
                 return;
 
             /* if GC *isn't* running, we can tell the user about the script
              * delete now. */
 #ifdef CAUTIOUS_SCRIPTHOOK
@@ -2575,19 +2564,19 @@ jsdService::DeactivateDebugger ()
 NS_IMETHODIMP
 jsdService::ActivateDebugger (JSRuntime *rt)
 {
     if (mOn)
         return (rt == mRuntime) ? NS_OK : NS_ERROR_ALREADY_INITIALIZED;
 
     mRuntime = rt;
 
-    if (gLastGCProc == jsds_GCCallbackProc)
+    if (gPrevGCSliceCallback == jsds_GCSliceCallbackProc)
         /* condition indicates that the callback proc has not been set yet */
-        gLastGCProc = JS_SetGCCallbackRT (rt, jsds_GCCallbackProc);
+        gPrevGCSliceCallback = js::SetGCSliceCallback (rt, jsds_GCSliceCallbackProc);
 
     mCx = JSD_DebuggerOnForUser (rt, NULL, NULL);
     if (!mCx)
         return NS_ERROR_FAILURE;
 
     JSContext *cx   = JSD_GetDefaultJSContext (mCx);
     JSObject  *glob = JS_GetGlobalObject (cx);
 
@@ -2647,29 +2636,24 @@ jsdService::Off (void)
 {
     if (!mOn)
         return NS_OK;
     
     if (!mCx || !mRuntime)
         return NS_ERROR_NOT_INITIALIZED;
     
     if (gDeadScripts) {
-        if (gGCStatus != JSGC_END)
+        if (gGCRunning)
             return NS_ERROR_NOT_AVAILABLE;
 
         JSContext *cx = JSD_GetDefaultJSContext(mCx);
         while (gDeadScripts)
-            jsds_NotifyPendingDeadScripts (cx);
+            jsds_NotifyPendingDeadScripts (JS_GetRuntime(cx));
     }
 
-    /*
-    if (gLastGCProc != jsds_GCCallbackProc)
-        JS_SetGCCallbackRT (mRuntime, gLastGCProc);
-    */
-
     DeactivateDebugger();
 
 #ifdef DEBUG
     printf ("+++ JavaScript debugging hooks removed.\n");
 #endif
 
     nsresult rv;
     nsCOMPtr<nsIXPConnect> xpc = do_GetService(nsIXPConnect::GetCID(), &rv);
@@ -3369,17 +3353,17 @@ jsdService::~jsdService()
     mBreakpointHook = nsnull;
     mDebugHook = nsnull;
     mDebuggerHook = nsnull;
     mInterruptHook = nsnull;
     mScriptHook = nsnull;
     mThrowHook = nsnull;
     mTopLevelHook = nsnull;
     mFunctionHook = nsnull;
-    gGCStatus = JSGC_END;
+    gGCRunning = false;
     Off();
     gJsds = nsnull;
 }
 
 jsdService *
 jsdService::GetService ()
 {
     if (!gJsds)
--- a/js/src/Makefile.in
+++ b/js/src/Makefile.in
@@ -114,17 +114,16 @@ CPPSRCS		= \
 		jsdbgapi.cpp \
 		jsdhash.cpp \
 		jsdtoa.cpp \
 		jsexn.cpp \
 		jsfriendapi.cpp \
 		jsfun.cpp \
 		jsgc.cpp \
 		jsgcmark.cpp \
-		jsgcstats.cpp \
 		jscrashreport.cpp \
 		jshash.cpp \
 		jsinfer.cpp \
 		jsinterp.cpp \
 		jsiter.cpp \
 		jslog2.cpp \
 		jsmath.cpp \
 		jsnativestack.cpp \
@@ -188,17 +187,16 @@ INSTALLED_HEADERS = \
 		jsclass.h \
 		jsclist.h \
 		jscompat.h \
 		jsdbgapi.h \
 		jsdhash.h \
 		jsfriendapi.h \
 		jsgc.h \
 		jscell.h \
-		jsgcstats.h \
 		jshash.h \
 		jslock.h \
 		json.h \
 		jsproxy.h \
 		jsprf.h \
 		jsproto.tbl \
 		jsprvtd.h \
 		jspubtd.h \
--- a/js/src/builtin/MapObject.cpp
+++ b/js/src/builtin/MapObject.cpp
@@ -141,17 +141,17 @@ HashableValue::equals(const HashableValu
     return b;
 }
 
 
 /*** Map *****************************************************************************************/
 
 Class MapObject::class_ = {
     "Map",
-    JSCLASS_HAS_PRIVATE |
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
     JSCLASS_HAS_CACHED_PROTO(JSProto_Map),
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
     JS_ResolveStub,
     JS_ConvertStub,
@@ -292,17 +292,17 @@ js_InitMapClass(JSContext *cx, JSObject 
     return MapObject::initClass(cx, obj);
 }
 
 
 /*** Set *****************************************************************************************/
 
 Class SetObject::class_ = {
     "Set",
-    JSCLASS_HAS_PRIVATE |
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
     JSCLASS_HAS_CACHED_PROTO(JSProto_Set),
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
     JS_ResolveStub,
     JS_ConvertStub,
--- a/js/src/ctypes/CTypes.cpp
+++ b/js/src/ctypes/CTypes.cpp
@@ -250,34 +250,34 @@ static JSClass sCDataProtoClass = {
   0,
   JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
   JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, JS_FinalizeStub,
   JSCLASS_NO_OPTIONAL_MEMBERS
 };
 
 static JSClass sCTypeClass = {
   "CType",
-  JSCLASS_HAS_RESERVED_SLOTS(CTYPE_SLOTS),
+  JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(CTYPE_SLOTS),
   JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
   JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, CType::Finalize,
   NULL, NULL, CType::ConstructData, CType::ConstructData, NULL,
   CType::HasInstance, CType::Trace, NULL
 };
 
 static JSClass sCDataClass = {
   "CData",
   JSCLASS_HAS_RESERVED_SLOTS(CDATA_SLOTS),
   JS_PropertyStub, JS_PropertyStub, ArrayType::Getter, ArrayType::Setter,
   JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, CData::Finalize,
   NULL, NULL, FunctionType::Call, FunctionType::Call, NULL, NULL, NULL, NULL
 };
 
 static JSClass sCClosureClass = {
   "CClosure",
-  JSCLASS_HAS_RESERVED_SLOTS(CCLOSURE_SLOTS),
+  JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(CCLOSURE_SLOTS),
   JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
   JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, CClosure::Finalize,
   NULL, NULL, NULL, NULL, NULL, NULL, CClosure::Trace, NULL
 };
 
 #define CTYPESFN_FLAGS \
   (JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT)
 
--- a/js/src/gc/Barrier-inl.h
+++ b/js/src/gc/Barrier-inl.h
@@ -261,11 +261,36 @@ HeapId::operator=(const HeapId &v)
 {
     pre();
     JS_ASSERT(!IsPoisonedId(v.value));
     value = v.value;
     post();
     return *this;
 }
 
+inline const Value &
+ReadBarrieredValue::get() const
+{
+    if (value.isObject())
+        JSObject::readBarrier(&value.toObject());
+    else if (value.isString())
+        JSString::readBarrier(value.toString());
+    else
+        JS_ASSERT(!value.isMarkable());
+
+    return value;
+}
+
+inline
+ReadBarrieredValue::operator const Value &() const
+{
+    return get();
+}
+
+inline JSObject &
+ReadBarrieredValue::toObject() const
+{
+    return get().toObject();
+}
+
 } /* namespace js */
 
 #endif /* jsgc_barrier_inl_h___ */
--- a/js/src/gc/Barrier.h
+++ b/js/src/gc/Barrier.h
@@ -451,11 +451,25 @@ class ReadBarriered
     void set(T *v) { value = v; }
 
     operator bool() { return !!value; }
 
     template<class U>
     operator MarkablePtr<U>() const { return MarkablePtr<U>(value); }
 };
 
+class ReadBarrieredValue
+{
+    Value value;
+
+  public:
+    ReadBarrieredValue() : value(UndefinedValue()) {}
+    ReadBarrieredValue(const Value &value) : value(value) {}
+
+    inline const Value &get() const;
+    inline operator const Value &() const;
+
+    inline JSObject &toObject() const;
+};
+
 }
 
 #endif /* jsgc_barrier_h___ */
--- a/js/src/gc/Statistics.cpp
+++ b/js/src/gc/Statistics.cpp
@@ -33,19 +33,20 @@
  * decision by deleting the provisions above and replace them with the notice
  * and other provisions required by the GPL or the LGPL. If you do not delete
  * the provisions above, a recipient may use your version of this file under
  * the terms of any one of the MPL, the GPL or the LGPL.
  *
  * ***** END LICENSE BLOCK ***** */
 
 #include <stdio.h>
-#include <ctype.h>
+#include <stdarg.h>
 
 #include "jscntxt.h"
+#include "jscompartment.h"
 #include "jscrashformat.h"
 #include "jscrashreport.h"
 #include "jsprf.h"
 #include "jsprobes.h"
 #include "jsutil.h"
 #include "prmjtime.h"
 
 #include "gc/Statistics.h"
@@ -64,88 +65,124 @@ ExplainReason(gcreason::Reason reason)
 
         default:
           JS_NOT_REACHED("bad GC reason");
           return "?";
 #undef SWITCH_REASON
     }
 }
 
-Statistics::ColumnInfo::ColumnInfo(const char *title, double t, double total)
-  : title(title)
+void
+Statistics::fmt(const char *f, ...)
 {
-    JS_snprintf(str, sizeof(str), "%.1f", t);
-    JS_snprintf(totalStr, sizeof(totalStr), "%.1f", total);
-    width = 6;
+    va_list va;
+    size_t off = strlen(buffer);
+
+    va_start(va, f);
+    JS_vsnprintf(buffer + off, BUFFER_SIZE - off, f, va);
+    va_end(va);
 }
 
-Statistics::ColumnInfo::ColumnInfo(const char *title, double t)
-  : title(title)
+void
+Statistics::fmtIfNonzero(const char *name, double t)
 {
-    JS_snprintf(str, sizeof(str), "%.1f", t);
-    strcpy(totalStr, "n/a");
-    width = 6;
+    if (t) {
+        if (needComma)
+            fmt(", ");
+        fmt("%s: %.1f", name, t);
+        needComma = true;
+    }
 }
 
-Statistics::ColumnInfo::ColumnInfo(const char *title, unsigned int data)
-  : title(title)
+void
+Statistics::formatPhases(int64_t *times)
 {
-    JS_snprintf(str, sizeof(str), "%d", data);
-    strcpy(totalStr, "n/a");
-    width = 4;
+    needComma = false;
+    fmtIfNonzero("mark", t(times[PHASE_MARK]));
+    fmtIfNonzero("mark-roots", t(times[PHASE_MARK_ROOTS]));
+    fmtIfNonzero("mark-delayed", t(times[PHASE_MARK_DELAYED]));
+    fmtIfNonzero("mark-other", t(times[PHASE_MARK_OTHER]));
+    fmtIfNonzero("sweep", t(times[PHASE_SWEEP]));
+    fmtIfNonzero("sweep-obj", t(times[PHASE_SWEEP_OBJECT]));
+    fmtIfNonzero("sweep-string", t(times[PHASE_SWEEP_STRING]));
+    fmtIfNonzero("sweep-script", t(times[PHASE_SWEEP_SCRIPT]));
+    fmtIfNonzero("sweep-shape", t(times[PHASE_SWEEP_SHAPE]));
+    fmtIfNonzero("discard-code", t(times[PHASE_DISCARD_CODE]));
+    fmtIfNonzero("discard-analysis", t(times[PHASE_DISCARD_ANALYSIS]));
+    fmtIfNonzero("xpconnect", t(times[PHASE_XPCONNECT]));
+    fmtIfNonzero("deallocate", t(times[PHASE_DESTROY]));
 }
 
-Statistics::ColumnInfo::ColumnInfo(const char *title, const char *data)
-  : title(title)
+/* Except for the first and last, slices of less than 12ms are not reported. */
+static const int64_t SLICE_MIN_REPORT_TIME = 12 * PRMJ_USEC_PER_MSEC;
+
+const char *
+Statistics::formatData()
 {
-    JS_ASSERT(strlen(data) < sizeof(str));
-    strcpy(str, data);
-    strcpy(totalStr, "n/a ");
-    width = 0;
-}
+    buffer[0] = 0x00;
+
+    int64_t total = 0, longest = 0;
 
-static const int NUM_COLUMNS = 17;
+    for (SliceData *slice = slices.begin(); slice != slices.end(); slice++) {
+        total += slice->duration();
+        if (slice->duration() > longest)
+            longest = slice->duration();
+    }
 
-void
-Statistics::makeTable(ColumnInfo *cols)
-{
-    int i = 0;
+    double mmu20 = computeMMU(20 * PRMJ_USEC_PER_MSEC);
+    double mmu50 = computeMMU(50 * PRMJ_USEC_PER_MSEC);
 
-    cols[i++] = ColumnInfo("Type", compartment ? "Comp" : "Glob");
+    fmt("TotalTime: %.1fms, Type: %s", t(total), compartment ? "compartment" : "global");
+    fmt(", MMU(20ms): %d%%, MMU(50ms): %d%%", int(mmu20 * 100), int(mmu50 * 100));
+
+    if (slices.length() > 1)
+        fmt(", MaxPause: %.1f", t(longest));
+    else
+        fmt(", Reason: %s", ExplainReason(slices[0].reason));
 
-    cols[i++] = ColumnInfo("Total", t(PHASE_GC), total(PHASE_GC));
-    cols[i++] = ColumnInfo("Wait", beginDelay(PHASE_MARK, PHASE_GC));
-    cols[i++] = ColumnInfo("Mark", t(PHASE_MARK), total(PHASE_MARK));
-    cols[i++] = ColumnInfo("Sweep", t(PHASE_SWEEP), total(PHASE_SWEEP));
-    cols[i++] = ColumnInfo("FinObj", t(PHASE_SWEEP_OBJECT), total(PHASE_SWEEP_OBJECT));
-    cols[i++] = ColumnInfo("FinStr", t(PHASE_SWEEP_STRING), total(PHASE_SWEEP_STRING));
-    cols[i++] = ColumnInfo("FinScr", t(PHASE_SWEEP_SCRIPT), total(PHASE_SWEEP_SCRIPT));
-    cols[i++] = ColumnInfo("FinShp", t(PHASE_SWEEP_SHAPE), total(PHASE_SWEEP_SHAPE));
-    cols[i++] = ColumnInfo("DisCod", t(PHASE_DISCARD_CODE), total(PHASE_DISCARD_CODE));
-    cols[i++] = ColumnInfo("DisAnl", t(PHASE_DISCARD_ANALYSIS), total(PHASE_DISCARD_ANALYSIS));
-    cols[i++] = ColumnInfo("XPCnct", t(PHASE_XPCONNECT), total(PHASE_XPCONNECT));
-    cols[i++] = ColumnInfo("Destry", t(PHASE_DESTROY), total(PHASE_DESTROY));
-    cols[i++] = ColumnInfo("End", endDelay(PHASE_GC, PHASE_DESTROY));
+    if (wasReset)
+        fmt(", ***RESET***");
+
+    fmt(", +chunks: %d, -chunks: %d\n", counts[STAT_NEW_CHUNK], counts[STAT_DESTROY_CHUNK]);
+
+    if (slices.length() > 1) {
+        for (size_t i = 0; i < slices.length(); i++) {
+            int64_t width = slices[i].duration();
+            if (i != 0 && i != slices.length() - 1 && width < SLICE_MIN_REPORT_TIME)
+                continue;
 
-    cols[i++] = ColumnInfo("+Chu", counts[STAT_NEW_CHUNK]);
-    cols[i++] = ColumnInfo("-Chu", counts[STAT_DESTROY_CHUNK]);
+            fmt("    Slice %d @ %.1fms (Pause: %.1f, Reason: %s): ",
+                i,
+                t(slices[i].end - slices[0].start),
+                t(width),
+                ExplainReason(slices[i].reason));
+            formatPhases(slices[i].phaseTimes);
+            fmt("\n");
+        }
 
-    cols[i++] = ColumnInfo("Reason", ExplainReason(triggerReason));
+        fmt("    Totals: ");
+    }
 
-    JS_ASSERT(i == NUM_COLUMNS);
+    formatPhases(phaseTimes);
+    fmt("\n");
+
+    return buffer;
 }
 
 Statistics::Statistics(JSRuntime *rt)
   : runtime(rt),
-    triggerReason(gcreason::NO_REASON)
+    startupTime(PRMJ_Now()),
+    fp(NULL),
+    fullFormat(false),
+    compartment(NULL),
+    wasReset(false),
+    needComma(false)
 {
+    PodArrayZero(phaseTotals);
     PodArrayZero(counts);
-    PodArrayZero(totals);
-
-    startupTime = PRMJ_Now();
 
     char *env = getenv("MOZ_GCTIMER");
     if (!env || strcmp(env, "none") == 0) {
         fp = NULL;
         return;
     }
 
     if (strcmp(env, "stdout") == 0) {
@@ -154,184 +191,199 @@ Statistics::Statistics(JSRuntime *rt)
     } else if (strcmp(env, "stderr") == 0) {
         fullFormat = false;
         fp = stderr;
     } else {
         fullFormat = true;
 
         fp = fopen(env, "a");
         JS_ASSERT(fp);
-
-        fprintf(fp, "     AppTime");
-
-        ColumnInfo cols[NUM_COLUMNS];
-        makeTable(cols);
-        for (int i = 0; i < NUM_COLUMNS; i++)
-            fprintf(fp, ", %*s", cols[i].width, cols[i].title);
-        fprintf(fp, "\n");
     }
 }
 
 Statistics::~Statistics()
 {
     if (fp) {
         if (fullFormat) {
-            fprintf(fp, "------>TOTAL");
-
-            ColumnInfo cols[NUM_COLUMNS];
-            makeTable(cols);
-            for (int i = 0; i < NUM_COLUMNS && cols[i].totalStr[0]; i++)
-                fprintf(fp, ", %*s", cols[i].width, cols[i].totalStr);
-            fprintf(fp, "\n");
+            buffer[0] = 0x00;
+            formatPhases(phaseTotals);
+            fprintf(fp, "TOTALS\n%s\n\n-------\n", buffer);
         }
 
         if (fp != stdout && fp != stderr)
             fclose(fp);
     }
 }
 
-struct GCCrashData
-{
-    int isRegen;
-    int isCompartment;
-};
-
-void
-Statistics::beginGC(JSCompartment *comp, gcreason::Reason reason)
+double
+Statistics::t(int64_t t)
 {
-    compartment = comp;
-
-    PodArrayZero(phaseStarts);
-    PodArrayZero(phaseEnds);
-    PodArrayZero(phaseTimes);
-
-    triggerReason = reason;
-
-    beginPhase(PHASE_GC);
-    Probes::GCStart();
-
-    GCCrashData crashData;
-    crashData.isCompartment = !!compartment;
-    crash::SaveCrashData(crash::JS_CRASH_TAG_GC, &crashData, sizeof(crashData));
-}
-
-double
-Statistics::t(Phase phase)
-{
-    return double(phaseTimes[phase]) / PRMJ_USEC_PER_MSEC;
+    return double(t) / PRMJ_USEC_PER_MSEC;
 }
 
-double
-Statistics::total(Phase phase)
-{
-    return double(totals[phase]) / PRMJ_USEC_PER_MSEC;
-}
-
-double
-Statistics::beginDelay(Phase phase1, Phase phase2)
-{
-    return double(phaseStarts[phase1] - phaseStarts[phase2]) / PRMJ_USEC_PER_MSEC;
-}
-
-double
-Statistics::endDelay(Phase phase1, Phase phase2)
-{
-    return double(phaseEnds[phase1] - phaseEnds[phase2]) / PRMJ_USEC_PER_MSEC;
-}
-
-void
-Statistics::statsToString(char *buffer, size_t size)
+int64_t
+Statistics::gcDuration()
 {
-    JS_ASSERT(size);
-    buffer[0] = 0x00;
-
-    ColumnInfo cols[NUM_COLUMNS];
-    makeTable(cols);
-
-    size_t pos = 0;
-    for (int i = 0; i < NUM_COLUMNS; i++) {
-        int len = strlen(cols[i].title) + 1 + strlen(cols[i].str);
-        if (i > 0)
-            len += 2;
-        if (pos + len >= size)
-            break;
-        if (i > 0)
-            strcat(buffer, ", ");
-        strcat(buffer, cols[i].title);
-        strcat(buffer, ":");
-        strcat(buffer, cols[i].str);
-        pos += len;
-    }
+    return slices.back().end - slices[0].start;
 }
 
 void
 Statistics::printStats()
 {
     if (fullFormat) {
-        fprintf(fp, "%12.0f", double(phaseStarts[PHASE_GC] - startupTime) / PRMJ_USEC_PER_MSEC);
-
-        ColumnInfo cols[NUM_COLUMNS];
-        makeTable(cols);
-        for (int i = 0; i < NUM_COLUMNS; i++)
-            fprintf(fp, ", %*s", cols[i].width, cols[i].str);
-        fprintf(fp, "\n");
+        fprintf(fp, "GC(T+%.3fs) %s\n",
+                t(slices[0].start - startupTime) / 1000.0,
+                formatData());
     } else {
         fprintf(fp, "%f %f %f\n",
-                t(PHASE_GC), t(PHASE_MARK), t(PHASE_SWEEP));
+                t(gcDuration()),
+                t(phaseTimes[PHASE_MARK]),
+                t(phaseTimes[PHASE_SWEEP]));
     }
     fflush(fp);
 }
 
 void
+Statistics::beginGC()
+{
+    PodArrayZero(phaseStarts);
+    PodArrayZero(phaseTimes);
+
+    slices.clearAndFree();
+    wasReset = false;
+
+    Probes::GCStart();
+}
+
+void
 Statistics::endGC()
 {
     Probes::GCEnd();
-    endPhase(PHASE_GC);
     crash::SnapshotGCStack();
 
     for (int i = 0; i < PHASE_LIMIT; i++)
-        totals[i] += phaseTimes[i];
+        phaseTotals[i] += phaseTimes[i];
 
     if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback) {
-        (*cb)(JS_TELEMETRY_GC_REASON, triggerReason);
         (*cb)(JS_TELEMETRY_GC_IS_COMPARTMENTAL, compartment ? 1 : 0);
-        (*cb)(JS_TELEMETRY_GC_MS, t(PHASE_GC));
-        (*cb)(JS_TELEMETRY_GC_MARK_MS, t(PHASE_MARK));
-        (*cb)(JS_TELEMETRY_GC_SWEEP_MS, t(PHASE_SWEEP));
-    }
+        (*cb)(JS_TELEMETRY_GC_MS, t(gcDuration()));
+        (*cb)(JS_TELEMETRY_GC_MARK_MS, t(phaseTimes[PHASE_MARK]));
+        (*cb)(JS_TELEMETRY_GC_SWEEP_MS, t(phaseTimes[PHASE_SWEEP]));
+        (*cb)(JS_TELEMETRY_GC_RESET, wasReset);
+        (*cb)(JS_TELEMETRY_GC_INCREMENTAL_DISABLED, !runtime->gcIncrementalEnabled);
 
-    if (JSGCFinishedCallback cb = runtime->gcFinishedCallback) {
-        char buffer[1024];
-        statsToString(buffer, sizeof(buffer));
-        (*cb)(runtime, compartment, buffer);
+        double mmu50 = computeMMU(50 * PRMJ_USEC_PER_MSEC);
+        (*cb)(JS_TELEMETRY_GC_MMU_50, mmu50 * 100);
     }
 
     if (fp)
         printStats();
 
     PodArrayZero(counts);
 }
 
 void
+Statistics::beginSlice(JSCompartment *comp, gcreason::Reason reason)
+{
+    compartment = comp;
+
+    bool first = runtime->gcIncrementalState == gc::NO_INCREMENTAL;
+    if (first)
+        beginGC();
+
+    SliceData data(reason, PRMJ_Now());
+    (void) slices.append(data); /* Ignore any OOMs here. */
+
+    if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback)
+        (*cb)(JS_TELEMETRY_GC_REASON, reason);
+
+    if (GCSliceCallback cb = runtime->gcSliceCallback) {
+        GCDescription desc(NULL, !!compartment);
+        (*cb)(runtime, first ? GC_CYCLE_BEGIN : GC_SLICE_BEGIN, desc);
+    }
+}
+
+void
+Statistics::endSlice()
+{
+    slices.back().end = PRMJ_Now();
+
+    if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback)
+        (*cb)(JS_TELEMETRY_GC_SLICE_MS, t(slices.back().end - slices.back().start));
+
+    bool last = runtime->gcIncrementalState == gc::NO_INCREMENTAL;
+    if (last)
+        endGC();
+
+    if (GCSliceCallback cb = runtime->gcSliceCallback) {
+        if (last)
+            (*cb)(runtime, GC_CYCLE_END, GCDescription(formatData(), !!compartment));
+        else
+            (*cb)(runtime, GC_SLICE_END, GCDescription(NULL, !!compartment));
+    }
+}
+
+void
 Statistics::beginPhase(Phase phase)
 {
     phaseStarts[phase] = PRMJ_Now();
 
     if (phase == gcstats::PHASE_MARK)
         Probes::GCStartMarkPhase();
     else if (phase == gcstats::PHASE_SWEEP)
         Probes::GCStartSweepPhase();
 }
 
 void
 Statistics::endPhase(Phase phase)
 {
-    phaseEnds[phase] = PRMJ_Now();
-    phaseTimes[phase] += phaseEnds[phase] - phaseStarts[phase];
+    int64_t now = PRMJ_Now();
+    int64_t t = now - phaseStarts[phase];
+    slices.back().phaseTimes[phase] += t;
+    phaseTimes[phase] += t;
 
     if (phase == gcstats::PHASE_MARK)
         Probes::GCEndMarkPhase();
     else if (phase == gcstats::PHASE_SWEEP)
         Probes::GCEndSweepPhase();
 }
 
+/*
+ * MMU (minimum mutator utilization) is a measure of how much garbage collection
+ * is affecting the responsiveness of the system. MMU measurements are given
+ * with respect to a certain window size. If we report MMU(50ms) = 80%, then
+ * that means that, for any 50ms window of time, at least 80% of the window is
+ * devoted to the mutator. In other words, the GC is running for at most 20% of
+ * the window, or 10ms. The GC can run multiple slices during the 50ms window
+ * as long as the total time it spends is at most 10ms.
+ */
+double
+Statistics::computeMMU(int64_t window)
+{
+    JS_ASSERT(!slices.empty());
+
+    int64_t gc = slices[0].end - slices[0].start;
+    int64_t gcMax = gc;
+
+    if (gc >= window)
+        return 0.0;
+
+    int startIndex = 0;
+    for (size_t endIndex = 1; endIndex < slices.length(); endIndex++) {
+        gc += slices[endIndex].end - slices[endIndex].start;
+
+        while (slices[endIndex].end - slices[startIndex].end >= window) {
+            gc -= slices[startIndex].end - slices[startIndex].start;
+            startIndex++;
+        }
+
+        int64_t cur = gc;
+        if (slices[endIndex].end - slices[startIndex].start > window)
+            cur -= (slices[endIndex].end - slices[startIndex].start - window);
+        if (cur > gcMax)
+            gcMax = cur;
+    }
+
+    return double(window - gcMax) / window;
+}
+
 } /* namespace gcstats */
 } /* namespace js */
--- a/js/src/gc/Statistics.h
+++ b/js/src/gc/Statistics.h
@@ -47,18 +47,20 @@
 #include "jsutil.h"
 
 struct JSCompartment;
 
 namespace js {
 namespace gcstats {
 
 enum Phase {
-    PHASE_GC,
     PHASE_MARK,
+    PHASE_MARK_ROOTS,
+    PHASE_MARK_DELAYED,
+    PHASE_MARK_OTHER,
     PHASE_SWEEP,
     PHASE_SWEEP_OBJECT,
     PHASE_SWEEP_STRING,
     PHASE_SWEEP_SCRIPT,
     PHASE_SWEEP_SHAPE,
     PHASE_DISCARD_CODE,
     PHASE_DISCARD_ANALYSIS,
     PHASE_XPCONNECT,
@@ -69,76 +71,96 @@ enum Phase {
 
 enum Stat {
     STAT_NEW_CHUNK,
     STAT_DESTROY_CHUNK,
 
     STAT_LIMIT
 };
 
+static const size_t BUFFER_SIZE = 8192;
+
 struct Statistics {
     Statistics(JSRuntime *rt);
     ~Statistics();
 
-    void beginGC(JSCompartment *comp, gcreason::Reason reason);
-    void endGC();
-
     void beginPhase(Phase phase);
     void endPhase(Phase phase);
 
+    void beginSlice(JSCompartment *comp, gcreason::Reason reason);
+    void endSlice();
+
+    void reset() { wasReset = true; }
+
     void count(Stat s) {
         JS_ASSERT(s < STAT_LIMIT);
         counts[s]++;
     }
 
   private:
     JSRuntime *runtime;
 
-    uint64_t startupTime;
+    int64_t startupTime;
 
     FILE *fp;
     bool fullFormat;
 
-    gcreason::Reason triggerReason;
     JSCompartment *compartment;
+    bool wasReset;
+
+    struct SliceData {
+        SliceData(gcreason::Reason reason, int64_t start)
+          : reason(reason), start(start)
+        {
+            PodArrayZero(phaseTimes);
+        }
+
+        gcreason::Reason reason;
+        int64_t start, end;
+        int64_t phaseTimes[PHASE_LIMIT];
 
-    uint64_t phaseStarts[PHASE_LIMIT];
-    uint64_t phaseEnds[PHASE_LIMIT];
-    uint64_t phaseTimes[PHASE_LIMIT];
-    uint64_t totals[PHASE_LIMIT];
+        int64_t duration() const { return end - start; }
+    };
+
+    Vector<SliceData, 8, SystemAllocPolicy> slices;
+
+    /* Most recent time when the given phase started. */
+    int64_t phaseStarts[PHASE_LIMIT];
+
+    /* Total time in a given phase for this GC. */
+    int64_t phaseTimes[PHASE_LIMIT];
+
+    /* Total time in a given phase over all GCs. */
+    int64_t phaseTotals[PHASE_LIMIT];
+
+    /* Number of events of this type for this GC. */
     unsigned int counts[STAT_LIMIT];
 
-    double t(Phase phase);
-    double total(Phase phase);
-    double beginDelay(Phase phase1, Phase phase2);
-    double endDelay(Phase phase1, Phase phase2);
-    void printStats();
-    void statsToString(char *buffer, size_t size);
+    char buffer[BUFFER_SIZE];
+    bool needComma;
+
+    void beginGC();
+    void endGC();
 
-    struct ColumnInfo {
-        const char *title;
-        char str[32];
-        char totalStr[32];
-        int width;
+    int64_t gcDuration();
+    double t(int64_t t);
+    void printStats();
+    void fmt(const char *f, ...);
+    void fmtIfNonzero(const char *name, double t);
+    void formatPhases(int64_t *times);
+    const char *formatData();
 
-        ColumnInfo() {}
-        ColumnInfo(const char *title, double t, double total);
-        ColumnInfo(const char *title, double t);
-        ColumnInfo(const char *title, unsigned int data);
-        ColumnInfo(const char *title, const char *data);
-    };
-
-    void makeTable(ColumnInfo *cols);
+    double computeMMU(int64_t resolution);
 };
 
-struct AutoGC {
-    AutoGC(Statistics &stats, JSCompartment *comp, gcreason::Reason reason
-           JS_GUARD_OBJECT_NOTIFIER_PARAM)
-      : stats(stats) { JS_GUARD_OBJECT_NOTIFIER_INIT; stats.beginGC(comp, reason); }
-    ~AutoGC() { stats.endGC(); }
+struct AutoGCSlice {
+    AutoGCSlice(Statistics &stats, JSCompartment *comp, gcreason::Reason reason
+                JS_GUARD_OBJECT_NOTIFIER_PARAM)
+      : stats(stats) { JS_GUARD_OBJECT_NOTIFIER_INIT; stats.beginSlice(comp, reason); }
+    ~AutoGCSlice() { stats.endSlice(); }
 
     Statistics &stats;
     JS_DECL_USE_GUARD_OBJECT_NOTIFIER
 };
 
 struct AutoPhase {
     AutoPhase(Statistics &stats, Phase phase JS_GUARD_OBJECT_NOTIFIER_PARAM)
       : stats(stats), phase(phase) { JS_GUARD_OBJECT_NOTIFIER_INIT; stats.beginPhase(phase); }
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -718,41 +718,47 @@ JSRuntime::JSRuntime()
 #endif
     gcSystemAvailableChunkListHead(NULL),
     gcUserAvailableChunkListHead(NULL),
     gcKeepAtoms(0),
     gcBytes(0),
     gcMaxBytes(0),
     gcMaxMallocBytes(0),
     gcNumArenasFreeCommitted(0),
-    gcNumber(0),
-    gcIncrementalTracer(NULL),
     gcVerifyData(NULL),
     gcChunkAllocationSinceLastGC(false),
     gcNextFullGCTime(0),
     gcJitReleaseTime(0),
     gcMode(JSGC_MODE_GLOBAL),
     gcIsNeeded(0),
     gcWeakMapList(NULL),
     gcStats(thisFromCtor()),
+    gcNumber(0),
+    gcStartNumber(0),
     gcTriggerReason(gcreason::NO_REASON),
     gcTriggerCompartment(NULL),
     gcCurrentCompartment(NULL),
     gcCheckCompartment(NULL),
+    gcIncrementalState(gc::NO_INCREMENTAL),
+    gcCompartmentCreated(false),
+    gcLastMarkSlice(false),
+    gcInterFrameGC(0),
+    gcSliceBudget(SliceBudget::Unlimited),
+    gcIncrementalEnabled(true),
+    gcIncrementalCompartment(NULL),
     gcPoke(false),
-    gcMarkAndSweep(false),
     gcRunning(false),
 #ifdef JS_GC_ZEAL
     gcZeal_(0),
     gcZealFrequency(0),
     gcNextScheduled(0),
     gcDebugCompartmentGC(false),
 #endif
     gcCallback(NULL),
-    gcFinishedCallback(NULL),
+    gcSliceCallback(NULL),
     gcMallocBytes(0),
     gcBlackRootsTraceOp(NULL),
     gcBlackRootsData(NULL),
     gcGrayRootsTraceOp(NULL),
     gcGrayRootsData(NULL),
     scriptPCCounters(NULL),
     NaNValue(UndefinedValue()),
     negativeInfinityValue(UndefinedValue()),
@@ -809,16 +815,19 @@ JSRuntime::init(uint32_t maxbytes)
 
 #ifdef JS_METHODJIT_SPEW
     JMCheckLogging();
 #endif
 
     if (!js_InitGC(this, maxbytes))
         return false;
 
+    if (!gcMarker.init())
+        return false;
+
     if (!(atomsCompartment = this->new_<JSCompartment>(this)) ||
         !atomsCompartment->init(NULL) ||
         !compartments.append(atomsCompartment)) {
         Foreground::delete_(atomsCompartment);
         return false;
     }
 
     atomsCompartment->isSystemCompartment = true;
@@ -2432,23 +2441,17 @@ JS_SetExtraGCRootsTracer(JSRuntime *rt, 
     AssertNoGC(rt);
     rt->gcBlackRootsTraceOp = traceOp;
     rt->gcBlackRootsData = data;
 }
 
 JS_PUBLIC_API(void)
 JS_TracerInit(JSTracer *trc, JSContext *cx, JSTraceCallback callback)
 {
-    trc->runtime = cx->runtime;
-    trc->context = cx;
-    trc->callback = callback;
-    trc->debugPrinter = NULL;
-    trc->debugPrintArg = NULL;
-    trc->debugPrintIndex = size_t(-1);
-    trc->eagerlyTraceWeakMaps = true;
+    InitTracer(trc, cx->runtime, cx, callback);
 }
 
 JS_PUBLIC_API(void)
 JS_TraceRuntime(JSTracer *trc)
 {
     AssertNoGC(trc->runtime);
     TraceRuntime(trc);
 }
@@ -2870,18 +2873,17 @@ JS_IsGCMarkingTracer(JSTracer *trc)
 JS_PUBLIC_API(void)
 JS_CompartmentGC(JSContext *cx, JSCompartment *comp)
 {
     AssertNoGC(cx);
 
     /* We cannot GC the atoms compartment alone; use a full GC instead. */
     JS_ASSERT(comp != cx->runtime->atomsCompartment);
 
-    js::gc::VerifyBarriers(cx, true);
-    js_GC(cx, comp, GC_NORMAL, gcreason::API);
+    GC(cx, comp, GC_NORMAL, gcreason::API);
 }
 
 JS_PUBLIC_API(void)
 JS_GC(JSContext *cx)
 {
     JS_CompartmentGC(cx, NULL);
 }
 
@@ -2909,38 +2911,41 @@ JS_SetGCCallbackRT(JSRuntime *rt, JSGCCa
     rt->gcCallback = cb;
     return oldcb;
 }
 
 JS_PUBLIC_API(JSBool)
 JS_IsAboutToBeFinalized(void *thing)
 {
     gc::Cell *t = static_cast<gc::Cell *>(thing);
-    JS_ASSERT(!t->compartment()->rt->gcIncrementalTracer);
     return IsAboutToBeFinalized(t);
 }
 
 JS_PUBLIC_API(void)
 JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32_t value)
 {
     switch (key) {
       case JSGC_MAX_BYTES: {
         AutoLockGC lock(rt);
         JS_ASSERT(value >= rt->gcBytes);
         rt->gcMaxBytes = value;
         break;
       }
       case JSGC_MAX_MALLOC_BYTES:
         rt->setGCMaxMallocBytes(value);
         break;
+      case JSGC_SLICE_TIME_BUDGET:
+        rt->gcSliceBudget = SliceBudget::TimeBudget(value);
+        break;
       default:
         JS_ASSERT(key == JSGC_MODE);
         rt->gcMode = JSGCMode(value);
         JS_ASSERT(rt->gcMode == JSGC_MODE_GLOBAL ||
-                  rt->gcMode == JSGC_MODE_COMPARTMENT);
+                  rt->gcMode == JSGC_MODE_COMPARTMENT ||
+                  rt->gcMode == JSGC_MODE_INCREMENTAL);
         return;
     }
 }
 
 JS_PUBLIC_API(uint32_t)
 JS_GetGCParameter(JSRuntime *rt, JSGCParamKey key)
 {
     switch (key) {
@@ -2951,19 +2956,21 @@ JS_GetGCParameter(JSRuntime *rt, JSGCPar
       case JSGC_BYTES:
         return uint32_t(rt->gcBytes);
       case JSGC_MODE:
         return uint32_t(rt->gcMode);
       case JSGC_UNUSED_CHUNKS:
         return uint32_t(rt->gcChunkPool.getEmptyCount());
       case JSGC_TOTAL_CHUNKS:
         return uint32_t(rt->gcChunkSet.count() + rt->gcChunkPool.getEmptyCount());
+      case JSGC_SLICE_TIME_BUDGET:
+        return uint32_t(rt->gcSliceBudget > 0 ? rt->gcSliceBudget / PRMJ_USEC_PER_MSEC : 0);
       default:
         JS_ASSERT(key == JSGC_NUMBER);
-        return rt->gcNumber;
+        return uint32_t(rt->gcNumber);
     }
 }
 
 JS_PUBLIC_API(void)
 JS_SetGCParameterForThread(JSContext *cx, JSGCParamKey key, uint32_t value)
 {
     JS_ASSERT(key == JSGC_MAX_CODE_CACHE_BYTES);
 }
@@ -6604,17 +6611,26 @@ JS_AbortIfWrongThread(JSRuntime *rt)
         MOZ_Assert("rt->onOwnerThread()", __FILE__, __LINE__);
 #endif
 }
 
 #ifdef JS_GC_ZEAL
 JS_PUBLIC_API(void)
 JS_SetGCZeal(JSContext *cx, uint8_t zeal, uint32_t frequency, JSBool compartment)
 {
-    bool schedule = zeal >= js::gc::ZealAllocThreshold && zeal < js::gc::ZealVerifierThreshold;
+#ifdef JS_GC_ZEAL
+    const char *env = getenv("JS_GC_ZEAL");
+    if (env) {
+        zeal = atoi(env);
+        frequency = 1;
+        compartment = false;
+    }
+#endif
+
+    bool schedule = zeal >= js::gc::ZealAllocValue;
     cx->runtime->gcZeal_ = zeal;
     cx->runtime->gcZealFrequency = frequency;
     cx->runtime->gcNextScheduled = schedule ? frequency : 0;
     cx->runtime->gcDebugCompartmentGC = !!compartment;
 }
 
 JS_PUBLIC_API(void)
 JS_ScheduleGC(JSContext *cx, uint32_t count, JSBool compartment)
--- a/js/src/jsapi.h
+++ b/js/src/jsapi.h
@@ -1423,18 +1423,21 @@ typedef enum JSContextOp {
  *                      return true.
  *   Any other value    For future compatibility the callback must do nothing
  *                      and return true in this case.
  */
 typedef JSBool
 (* JSContextCallback)(JSContext *cx, uintN contextOp);
 
 typedef enum JSGCStatus {
+    /* These callbacks happen outside the GC lock. */
     JSGC_BEGIN,
     JSGC_END,
+
+    /* These callbacks happen within the GC lock. */
     JSGC_MARK_END,
     JSGC_FINALIZE_END
 } JSGCStatus;
 
 typedef JSBool
 (* JSGCCallback)(JSContext *cx, JSGCStatus status);
 
 /*
@@ -3285,25 +3288,34 @@ typedef enum JSGCParamKey {
 
     /* Select GC mode. */
     JSGC_MODE = 6,
 
     /* Number of cached empty GC chunks. */
     JSGC_UNUSED_CHUNKS = 7,
 
     /* Total number of allocated GC chunks. */
-    JSGC_TOTAL_CHUNKS = 8
+    JSGC_TOTAL_CHUNKS = 8,
+
+    /* Max milliseconds to spend in an incremental GC slice. */
+    JSGC_SLICE_TIME_BUDGET = 9
 } JSGCParamKey;
 
 typedef enum JSGCMode {
     /* Perform only global GCs. */
     JSGC_MODE_GLOBAL = 0,
 
     /* Perform per-compartment GCs until too much garbage has accumulated. */
-    JSGC_MODE_COMPARTMENT = 1
+    JSGC_MODE_COMPARTMENT = 1,
+
+    /*
+     * Collect in short time slices rather than all at once. Implies
+     * JSGC_MODE_COMPARTMENT.
+     */
+    JSGC_MODE_INCREMENTAL = 2
 } JSGCMode;
 
 extern JS_PUBLIC_API(void)
 JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32_t value);
 
 extern JS_PUBLIC_API(uint32_t)
 JS_GetGCParameter(JSRuntime *rt, JSGCParamKey key);
 
@@ -3388,16 +3400,18 @@ struct JSClass {
 #define JSCLASS_HAS_PRIVATE             (1<<0)  /* objects have private slot */
 #define JSCLASS_NEW_ENUMERATE           (1<<1)  /* has JSNewEnumerateOp hook */
 #define JSCLASS_NEW_RESOLVE             (1<<2)  /* has JSNewResolveOp hook */
 #define JSCLASS_PRIVATE_IS_NSISUPPORTS  (1<<3)  /* private is (nsISupports *) */
 #define JSCLASS_NEW_RESOLVE_GETS_START  (1<<4)  /* JSNewResolveOp gets starting
                                                    object in prototype chain
                                                    passed in via *objp in/out
                                                    parameter */
+#define JSCLASS_IMPLEMENTS_BARRIERS     (1<<5)  /* Correctly implements GC read
+                                                   and write barriers */
 #define JSCLASS_DOCUMENT_OBSERVER       (1<<6)  /* DOM document observer */
 
 /*
  * To reserve slots fetched and stored via JS_Get/SetReservedSlot, bitwise-or
  * JSCLASS_HAS_RESERVED_SLOTS(n) into the initializer for JSClass.flags, where
  * n is a constant in [1, 255].  Reserved slots are indexed from 0 to n-1.
  */
 #define JSCLASS_RESERVED_SLOTS_SHIFT    8       /* room for 8 flags below */
--- a/js/src/jscntxt.cpp
+++ b/js/src/jscntxt.cpp
@@ -277,20 +277,20 @@ js_DestroyContext(JSContext *cx, JSDestr
             /* Unpin all common atoms before final GC. */
             js_FinishCommonAtoms(cx);
 
             /* Clear debugging state to remove GC roots. */
             for (CompartmentsIter c(rt); !c.done(); c.next())
                 c->clearTraps(cx);
             JS_ClearAllWatchPoints(cx);
 
-            js_GC(cx, NULL, GC_NORMAL, gcreason::LAST_CONTEXT);
+            GC(cx, NULL, GC_NORMAL, gcreason::LAST_CONTEXT);
 
         } else if (mode == JSDCM_FORCE_GC) {
-            js_GC(cx, NULL, GC_NORMAL, gcreason::DESTROY_CONTEXT);
+            GC(cx, NULL, GC_NORMAL, gcreason::DESTROY_CONTEXT);
         } else if (mode == JSDCM_MAYBE_GC) {
             JS_MaybeGC(cx);
         }
         JS_LOCK_GC(rt);
     }
 #ifdef JS_THREADSAFE
     rt->gcHelperThread.waitBackgroundSweepEnd();
 #endif
@@ -870,17 +870,17 @@ js_InvokeOperationCallback(JSContext *cx
     /*
      * Reset the callback counter first, then run GC and yield. If another
      * thread is racing us here we will accumulate another callback request
      * which will be serviced at the next opportunity.
      */
     JS_ATOMIC_SET(&rt->interrupt, 0);
 
     if (rt->gcIsNeeded)
-        js_GC(cx, rt->gcTriggerCompartment, GC_NORMAL, rt->gcTriggerReason);
+        GCSlice(cx, rt->gcTriggerCompartment, GC_NORMAL, rt->gcTriggerReason);
 
 #ifdef JS_THREADSAFE
     /*
      * We automatically yield the current context every time the operation
      * callback is hit since we might be called as a result of an impending
      * GC on another thread, which would deadlock if we do not yield.
      * Operation callbacks are supposed to happen rarely (seconds, not
      * milliseconds) so it is acceptable to yield at every callback.
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -299,103 +299,149 @@ struct JSRuntime : js::RuntimeFriendFiel
     size_t              gcMaxMallocBytes;
 
     /*
      * Number of the committed arenas in all GC chunks including empty chunks.
      * The counter is volatile as it is read without the GC lock, see comments
      * in MaybeGC.
      */
     volatile uint32_t   gcNumArenasFreeCommitted;
-    uint32_t            gcNumber;
-    js::GCMarker        *gcIncrementalTracer;
+    js::FullGCMarker    gcMarker;
     void                *gcVerifyData;
     bool                gcChunkAllocationSinceLastGC;
     int64_t             gcNextFullGCTime;
     int64_t             gcJitReleaseTime;
     JSGCMode            gcMode;
-    volatile uintptr_t  gcBarrierFailed;
     volatile uintptr_t  gcIsNeeded;
     js::WeakMapBase     *gcWeakMapList;
     js::gcstats::Statistics gcStats;
 
+    /* Incremented on every GC slice. */
+    uint64_t            gcNumber;
+
+    /* The gcNumber at the time of the most recent GC's first slice. */
+    uint64_t            gcStartNumber;
+
     /* The reason that an interrupt-triggered GC should be called. */
     js::gcreason::Reason gcTriggerReason;
 
-    /* Pre-allocated space for the GC mark stack. */
-    uintptr_t           gcMarkStackArray[js::MARK_STACK_LENGTH];
-
     /*
      * Compartment that triggered GC. If more than one Compatment need GC,
      * gcTriggerCompartment is reset to NULL and a global GC is performed.
      */
     JSCompartment       *gcTriggerCompartment;
 
     /* Compartment that is currently involved in per-compartment GC */
     JSCompartment       *gcCurrentCompartment;
 
     /*
      * If this is non-NULL, all marked objects must belong to this compartment.
      * This is used to look for compartment bugs.
      */
     JSCompartment       *gcCheckCompartment;
 
     /*
+     * The current incremental GC phase. During non-incremental GC, this is
+     * always NO_INCREMENTAL.
+     */
+    js::gc::State       gcIncrementalState;
+
+    /* Indicates that a new compartment was created during incremental GC. */
+    bool                gcCompartmentCreated;
+
+    /* Indicates that the last incremental slice exhausted the mark stack. */
+    bool                gcLastMarkSlice;
+
+    /*
+     * Indicates that a GC slice has taken place in the middle of an animation
+     * frame, rather than at the beginning. In this case, the next slice will be
+     * delayed so that we don't get back-to-back slices.
+     */
+    volatile uintptr_t  gcInterFrameGC;
+
+    /* Default budget for incremental GC slice. See SliceBudget in jsgc.h. */
+    int64_t             gcSliceBudget;
+
+    /*
+     * We disable incremental GC if we encounter a js::Class with a trace hook
+     * that does not implement write barriers.
+     */
+    bool                gcIncrementalEnabled;
+
+    /* Compartment that is undergoing an incremental GC. */
+    JSCompartment       *gcIncrementalCompartment;
+
+    /*
+     * We save all conservative scanned roots in this vector so that
+     * conservative scanning can be "replayed" deterministically. In DEBUG mode,
+     * this allows us to run a non-incremental GC after every incremental GC to
+     * ensure that no objects were missed.
+     */
+#ifdef DEBUG
+    struct SavedGCRoot {
+        void *thing;
+        JSGCTraceKind kind;
+
+        SavedGCRoot(void *thing, JSGCTraceKind kind) : thing(thing), kind(kind) {}
+    };
+    js::Vector<SavedGCRoot, 0, js::SystemAllocPolicy> gcSavedRoots;
+#endif
+
+    /*
      * We can pack these flags as only the GC thread writes to them. Atomic
      * updates to packed bytes are not guaranteed, so stores issued by one
      * thread may be lost due to unsynchronized read-modify-write cycles on
      * other threads.
      */
     bool                gcPoke;
-    bool                gcMarkAndSweep;
     bool                gcRunning;
 
     /*
      * These options control the zealousness of the GC. The fundamental values
      * are gcNextScheduled and gcDebugCompartmentGC. At every allocation,
      * gcNextScheduled is decremented. When it reaches zero, we do either a
      * full or a compartmental GC, based on gcDebugCompartmentGC.
      *
-     * At this point, if gcZeal_ >= 2 then gcNextScheduled is reset to the
+     * At this point, if gcZeal_ == 2 then gcNextScheduled is reset to the
      * value of gcZealFrequency. Otherwise, no additional GCs take place.
      *
      * You can control these values in several ways:
      *   - Pass the -Z flag to the shell (see the usage info for details)
      *   - Call gczeal() or schedulegc() from inside shell-executed JS code
      *     (see the help for details)
      *
-     * Additionally, if gzZeal_ == 1 then we perform GCs in select places
-     * (during MaybeGC and whenever a GC poke happens). This option is mainly
-     * useful to embedders.
+     * If gzZeal_ == 1 then we perform GCs in select places (during MaybeGC and
+     * whenever a GC poke happens). This option is mainly useful to embedders.
      *
      * We use gcZeal_ == 4 to enable write barrier verification. See the comment
      * in jsgc.cpp for more information about this.
      */
 #ifdef JS_GC_ZEAL
     int                 gcZeal_;
     int                 gcZealFrequency;
     int                 gcNextScheduled;
     bool                gcDebugCompartmentGC;
 
     int gcZeal() { return gcZeal_; }
 
     bool needZealousGC() {
         if (gcNextScheduled > 0 && --gcNextScheduled == 0) {
-            if (gcZeal() >= js::gc::ZealAllocThreshold && gcZeal() < js::gc::ZealVerifierThreshold)
+            if (gcZeal() == js::gc::ZealAllocValue)
                 gcNextScheduled = gcZealFrequency;
             return true;
         }
         return false;
     }
 #else
     int gcZeal() { return 0; }
     bool needZealousGC() { return false; }
 #endif
 
     JSGCCallback        gcCallback;
-    JSGCFinishedCallback gcFinishedCallback;
+    js::GCSliceCallback gcSliceCallback;
 
   private:
     /*
      * Malloc counter to measure memory pressure for GC scheduling. It runs
      * from gcMaxMallocBytes down to zero.
      */
     volatile ptrdiff_t  gcMallocBytes;
 
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -68,17 +68,16 @@
 using namespace mozilla;
 using namespace js;
 using namespace js::gc;
 
 JSCompartment::JSCompartment(JSRuntime *rt)
   : rt(rt),
     principals(NULL),
     needsBarrier_(false),
-    gcIncrementalTracer(NULL),
     gcBytes(0),
     gcTriggerBytes(0),
     gcLastBytes(0),
     hold(false),
     typeLifoAlloc(TYPE_LIFO_ALLOC_PRIMARY_CHUNK_SIZE),
     data(NULL),
     active(false),
     hasDebugModeCodeToDrop(false),
@@ -123,16 +122,19 @@ JSCompartment::init(JSContext *cx)
         return false;
 
     if (!regExps.init(cx))
         return false;
 
     if (!scriptFilenameTable.init())
         return false;
 
+    if (!barrierMarker_.init())
+        return false;
+
     return debuggees.init();
 }
 
 #ifdef JS_METHODJIT
 bool
 JSCompartment::ensureJaegerCompartmentExists(JSContext *cx)
 {
     if (jaegerCompartment_)
@@ -454,65 +456,72 @@ JSCompartment::markTypes(JSTracer *trc)
     for (CellIterUnderGC i(this, FINALIZE_TYPE_OBJECT); !i.done(); i.next()) {
         types::TypeObject *type = i.get<types::TypeObject>();
         MarkTypeObjectRoot(trc, &type, "mark_types_scan");
         JS_ASSERT(type == i.get<types::TypeObject>());
     }
 }
 
 void
+JSCompartment::discardJitCode(JSContext *cx)
+{
+    /*
+     * Kick all frames on the stack into the interpreter, and release all JIT
+     * code in the compartment.
+     */
+#ifdef JS_METHODJIT
+    mjit::ClearAllFrames(this);
+
+    for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
+        JSScript *script = i.get<JSScript>();
+        mjit::ReleaseScriptCode(cx, script);
+
+        /*
+         * Use counts for scripts are reset on GC. After discarding code we
+         * need to let it warm back up to get information like which opcodes
+         * are setting array holes or accessing getter properties.
+         */
+        script->resetUseCount();
+    }
+#endif
+}
+
+void
 JSCompartment::sweep(JSContext *cx, bool releaseTypes)
 {
     /* Remove dead wrappers from the table. */
     for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
         JS_ASSERT_IF(IsAboutToBeFinalized(e.front().key) &&
                      !IsAboutToBeFinalized(e.front().value),
                      e.front().key.isString());
         if (IsAboutToBeFinalized(e.front().key) ||
             IsAboutToBeFinalized(e.front().value)) {
             e.removeFront();
         }
     }
 
     /* Remove dead references held weakly by the compartment. */
 
+    regExps.sweep(rt);
+
     sweepBaseShapeTable(cx);
     sweepInitialShapeTable(cx);
     sweepNewTypeObjectTable(cx, newTypeObjects);
     sweepNewTypeObjectTable(cx, lazyTypeObjects);
 
     if (emptyTypeObject && IsAboutToBeFinalized(emptyTypeObject))
         emptyTypeObject = NULL;
 
     newObjectCache.reset();
 
     sweepBreakpoints(cx);
 
     {
         gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_DISCARD_CODE);
-
-        /*
-         * Kick all frames on the stack into the interpreter, and release all JIT
-         * code in the compartment.
-         */
-#ifdef JS_METHODJIT
-        mjit::ClearAllFrames(this);
-
-        for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
-            JSScript *script = i.get<JSScript>();
-            mjit::ReleaseScriptCode(cx, script);
-
-            /*
-             * Use counts for scripts are reset on GC. After discarding code we
-             * need to let it warm back up to get information like which opcodes
-             * are setting array holes or accessing getter properties.
-             */
-            script->resetUseCount();
-        }
-#endif
+        discardJitCode(cx);
     }
 
     if (!activeAnalysis) {
         gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_DISCARD_ANALYSIS);
 
         /*
          * Clear the analysis pool, but don't release its data yet. While
          * sweeping types any live data will be allocated into the pool.
@@ -556,18 +565,16 @@ JSCompartment::sweep(JSContext *cx, bool
     }
 
     active = false;
 }
 
 void
 JSCompartment::purge(JSContext *cx)
 {
-    arenas.purge();
-    regExps.purge();
     dtoaCache.purge();
 
     /*
      * Clear the hash and reset all evalHashLink to null before the GC. This
      * way MarkChildren(trc, JSScript *) can assume that JSScript::u.object is
      * not null when we have script owned by an object and not from the eval
      * cache.
      */
@@ -771,23 +778,16 @@ JSCompartment::sweepBreakpoints(JSContex
                 nextbp = bp->nextInSite();
                 if (scriptGone || IsAboutToBeFinalized(bp->debugger->toJSObject()))
                     bp->destroy(cx);
             }
         }
     }
 }
 
-GCMarker *
-JSCompartment::createBarrierTracer()
-{
-    JS_ASSERT(!gcIncrementalTracer);
-    return NULL;
-}
-
 size_t
 JSCompartment::sizeOfShapeTable(JSMallocSizeOfFun mallocSizeOf)
 {
     return baseShapes.sizeOfExcludingThis(mallocSizeOf)
          + initialShapes.sizeOfExcludingThis(mallocSizeOf)
          + newTypeObjects.sizeOfExcludingThis(mallocSizeOf)
          + lazyTypeObjects.sizeOfExcludingThis(mallocSizeOf);
 }
--- a/js/src/jscompartment.h
+++ b/js/src/jscompartment.h
@@ -41,17 +41,16 @@
 #define jscompartment_h___
 
 #include "mozilla/Attributes.h"
 
 #include "jsclist.h"
 #include "jscntxt.h"
 #include "jsfun.h"
 #include "jsgc.h"
-#include "jsgcstats.h"
 #include "jsobj.h"
 #include "jsscope.h"
 #include "vm/GlobalObject.h"
 #include "vm/RegExpObject.h"
 
 #ifdef _MSC_VER
 #pragma warning(push)
 #pragma warning(disable:4251) /* Silence warning about JS_FRIEND_API and data members. */
@@ -158,41 +157,56 @@ struct ScriptFilenameHasher
         return strcmp(e->filename, l) == 0;
     }
 };
 
 typedef HashSet<ScriptFilenameEntry *,
                 ScriptFilenameHasher,
                 SystemAllocPolicy> ScriptFilenameTable;
 
+/* If HashNumber grows, need to change WrapperHasher. */
+JS_STATIC_ASSERT(sizeof(HashNumber) == 4);
+
+struct WrapperHasher
+{
+    typedef Value Lookup;
+
+    static HashNumber hash(Value key) {
+        uint64_t bits = JSVAL_TO_IMPL(key).asBits;
+        return uint32_t(bits) ^ uint32_t(bits >> 32);
+    }
+
+    static bool match(const Value &l, const Value &k) { return l == k; }
+};
+
+typedef HashMap<Value, ReadBarrieredValue, WrapperHasher, SystemAllocPolicy> WrapperMap;
+
 } /* namespace js */
 
 namespace JS {
 struct TypeInferenceSizes;
 }
 
 struct JSCompartment
 {
     JSRuntime                    *rt;
     JSPrincipals                 *principals;
 
     js::gc::ArenaLists           arenas;
 
     bool                         needsBarrier_;
-    js::GCMarker                 *gcIncrementalTracer;
+    js::BarrierGCMarker          barrierMarker_;
 
     bool needsBarrier() {
         return needsBarrier_;
     }
 
     js::GCMarker *barrierTracer() {
         JS_ASSERT(needsBarrier_);
-        if (gcIncrementalTracer)
-            return gcIncrementalTracer;
-        return createBarrierTracer();
+        return &barrierMarker_;
     }
 
     size_t                       gcBytes;
     size_t                       gcTriggerBytes;
     size_t                       gcLastBytes;
     size_t                       gcMaxMallocBytes;
 
     bool                         hold;
@@ -320,20 +334,21 @@ struct JSCompartment
     bool wrap(JSContext *cx, JSObject **objp);
     bool wrapId(JSContext *cx, jsid *idp);
     bool wrap(JSContext *cx, js::PropertyOp *op);
     bool wrap(JSContext *cx, js::StrictPropertyOp *op);
     bool wrap(JSContext *cx, js::PropertyDescriptor *desc);
     bool wrap(JSContext *cx, js::AutoIdVector &props);
 
     void markTypes(JSTracer *trc);
+    void discardJitCode(JSContext *cx);
     void sweep(JSContext *cx, bool releaseTypes);
     void purge(JSContext *cx);
 
-    void setGCLastBytes(size_t lastBytes, JSGCInvocationKind gckind);
+    void setGCLastBytes(size_t lastBytes, js::JSGCInvocationKind gckind);
     void reduceGCTriggerBytes(size_t amount);
     
     void resetGCMallocBytes();
     void setGCMaxMallocBytes(size_t value);
     void updateMallocCounter(size_t nbytes) {
         ptrdiff_t oldCount = gcMallocBytes;
         ptrdiff_t newCount = oldCount - ptrdiff_t(nbytes);
         gcMallocBytes = newCount;
@@ -392,18 +407,16 @@ struct JSCompartment
     bool setDebugModeFromC(JSContext *cx, bool b);
 
     void clearBreakpointsIn(JSContext *cx, js::Debugger *dbg, JSObject *handler);
     void clearTraps(JSContext *cx);
 
   private:
     void sweepBreakpoints(JSContext *cx);
 
-    js::GCMarker *createBarrierTracer();
-
   public:
     js::WatchpointMap *watchpointMap;
 };
 
 #define JS_PROPERTY_TREE(cx)    ((cx)->compartment->propertyTree)
 
 namespace js {
 static inline MathCache *
--- a/js/src/jsexn.cpp
+++ b/js/src/jsexn.cpp
@@ -89,17 +89,17 @@ static void
 exn_finalize(JSContext *cx, JSObject *obj);
 
 static JSBool
 exn_resolve(JSContext *cx, JSObject *obj, jsid id, uintN flags,
             JSObject **objp);
 
 Class js::ErrorClass = {
     js_Error_str,
-    JSCLASS_HAS_PRIVATE | JSCLASS_NEW_RESOLVE |
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_NEW_RESOLVE |
     JSCLASS_HAS_CACHED_PROTO(JSProto_Error),
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
     (JSResolveOp)exn_resolve,
     JS_ConvertStub,
--- a/js/src/jsfriendapi.cpp
+++ b/js/src/jsfriendapi.cpp
@@ -127,32 +127,38 @@ JS_NewObjectWithUniqueType(JSContext *cx
     if (!obj || !obj->setSingletonType(cx))
         return NULL;
     return obj;
 }
 
 JS_FRIEND_API(void)
 js::GCForReason(JSContext *cx, gcreason::Reason reason)
 {
-    js_GC(cx, NULL, GC_NORMAL, reason);
+    GC(cx, NULL, GC_NORMAL, reason);
 }
 
 JS_FRIEND_API(void)
 js::CompartmentGCForReason(JSContext *cx, JSCompartment *comp, gcreason::Reason reason)
 {
     /* We cannot GC the atoms compartment alone; use a full GC instead. */
     JS_ASSERT(comp != cx->runtime->atomsCompartment);
 
-    js_GC(cx, comp, GC_NORMAL, reason);
+    GC(cx, comp, GC_NORMAL, reason);
 }
 
 JS_FRIEND_API(void)
 js::ShrinkingGC(JSContext *cx, gcreason::Reason reason)
 {
-    js_GC(cx, NULL, GC_SHRINK, reason);
+    GC(cx, NULL, GC_SHRINK, reason);
+}
+
+JS_FRIEND_API(void)
+js::IncrementalGC(JSContext *cx, gcreason::Reason reason)
+{
+    GCSlice(cx, NULL, GC_NORMAL, reason);
 }
 
 JS_FRIEND_API(void)
 JS_ShrinkGCBuffers(JSRuntime *rt)
 {
     ShrinkGCBuffers(rt);
 }
 
@@ -396,22 +402,16 @@ js::GCThingIsMarkedGray(void *thing)
 }
 
 JS_FRIEND_API(void)
 JS_SetAccumulateTelemetryCallback(JSRuntime *rt, JSAccumulateTelemetryDataCallback callback)
 {
     rt->telemetryCallback = callback;
 }
 
-JS_FRIEND_API(void)
-JS_SetGCFinishedCallback(JSRuntime *rt, JSGCFinishedCallback callback)
-{
-    rt->gcFinishedCallback = callback;
-}
-
 #ifdef DEBUG
 JS_FRIEND_API(void)
 js_DumpString(JSString *str)
 {
     str->dump();
 }
 
 JS_FRIEND_API(void)
@@ -546,49 +546,16 @@ js::DumpHeapComplete(JSContext *cx, FILE
 
     dtrc.visited.finish();
 }
 
 #endif
 
 namespace js {
 
-JS_FRIEND_API(bool)
-IsIncrementalBarrierNeeded(JSRuntime *rt)
-{
-    return !!rt->gcIncrementalTracer && !rt->gcRunning;
-}
-
-JS_FRIEND_API(bool)
-IsIncrementalBarrierNeeded(JSContext *cx)
-{
-    return IsIncrementalBarrierNeeded(cx->runtime);
-}
-
-extern JS_FRIEND_API(void)
-IncrementalReferenceBarrier(void *ptr)
-{
-    if (!ptr)
-        return;
-    JS_ASSERT(!static_cast<gc::Cell *>(ptr)->compartment()->rt->gcRunning);
-    uint32_t kind = gc::GetGCThingTraceKind(ptr);
-    if (kind == JSTRACE_OBJECT)
-        JSObject::writeBarrierPre((JSObject *) ptr);
-    else if (kind == JSTRACE_STRING)
-        JSString::writeBarrierPre((JSString *) ptr);
-    else
-        JS_NOT_REACHED("invalid trace kind");
-}
-
-extern JS_FRIEND_API(void)
-IncrementalValueBarrier(const Value &v)
-{
-    HeapValue::writeBarrierPre(v);
-}
-
 /* static */ void
 AutoLockGC::LockGC(JSRuntime *rt)
 {
     JS_ASSERT(rt);
     JS_LOCK_GC(rt);
 }
 
 /* static */ void
@@ -714,9 +681,95 @@ GetRuntimeCompartments(JSRuntime *rt)
 }
 
 JS_FRIEND_API(size_t)
 SizeOfJSContext()
 {
     return sizeof(JSContext);
 }
 
+JS_FRIEND_API(GCSliceCallback)
+SetGCSliceCallback(JSRuntime *rt, GCSliceCallback callback)
+{
+    GCSliceCallback old = rt->gcSliceCallback;
+    rt->gcSliceCallback = callback;
+    return old;
+}
+
+JS_FRIEND_API(bool)
+WantGCSlice(JSRuntime *rt)
+{
+    if (rt->gcZeal() == gc::ZealFrameVerifierValue || rt->gcZeal() == gc::ZealFrameGCValue)
+        return true;
+
+    if (rt->gcIncrementalState != gc::NO_INCREMENTAL)
+        return true;
+
+    return false;
+}
+
+JS_FRIEND_API(void)
+NotifyDidPaint(JSContext *cx)
+{
+    JSRuntime *rt = cx->runtime;
+
+    if (rt->gcZeal() == gc::ZealFrameVerifierValue) {
+        gc::VerifyBarriers(cx);
+        return;
+    }
+
+    if (rt->gcZeal() == gc::ZealFrameGCValue) {
+        GCSlice(cx, NULL, GC_NORMAL, gcreason::REFRESH_FRAME);
+        return;
+    }
+
+    if (rt->gcIncrementalState != gc::NO_INCREMENTAL && !rt->gcInterFrameGC)
+        GCSlice(cx, rt->gcIncrementalCompartment, GC_NORMAL, gcreason::REFRESH_FRAME);
+
+    rt->gcInterFrameGC = false;
+}
+
+extern JS_FRIEND_API(bool)
+IsIncrementalGCEnabled(JSRuntime *rt)
+{
+    return rt->gcIncrementalEnabled;
+}
+
+JS_FRIEND_API(bool)
+IsIncrementalBarrierNeeded(JSRuntime *rt)
+{
+    return (rt->gcIncrementalState == gc::MARK && !rt->gcRunning);
+}
+
+JS_FRIEND_API(bool)
+IsIncrementalBarrierNeeded(JSContext *cx)
+{
+    return IsIncrementalBarrierNeeded(cx->runtime);
+}
+
+JS_FRIEND_API(bool)
+IsIncrementalBarrierNeededOnObject(JSObject *obj)
+{
+    return obj->compartment()->needsBarrier();
+}
+
+extern JS_FRIEND_API(void)
+IncrementalReferenceBarrier(void *ptr)
+{
+    if (!ptr)
+        return;
+    JS_ASSERT(!static_cast<gc::Cell *>(ptr)->compartment()->rt->gcRunning);
+    uint32_t kind = gc::GetGCThingTraceKind(ptr);
+    if (kind == JSTRACE_OBJECT)
+        JSObject::writeBarrierPre((JSObject *) ptr);
+    else if (kind == JSTRACE_STRING)
+        JSString::writeBarrierPre((JSString *) ptr);
+    else
+        JS_NOT_REACHED("invalid trace kind");
+}
+
+extern JS_FRIEND_API(void)
+IncrementalValueBarrier(const Value &v)
+{
+    HeapValue::writeBarrierPre(v);
+}
+
 } // namespace js
--- a/js/src/jsfriendapi.h
+++ b/js/src/jsfriendapi.h
@@ -95,31 +95,29 @@ JS_NondeterministicGetWeakMapKeys(JSCont
 extern JS_FRIEND_API(void)
 JS_TraceShapeCycleCollectorChildren(JSTracer *trc, void *shape);
 
 enum {
     JS_TELEMETRY_GC_REASON,
     JS_TELEMETRY_GC_IS_COMPARTMENTAL,
     JS_TELEMETRY_GC_MS,
     JS_TELEMETRY_GC_MARK_MS,
-    JS_TELEMETRY_GC_SWEEP_MS
+    JS_TELEMETRY_GC_SWEEP_MS,
+    JS_TELEMETRY_GC_SLICE_MS,
+    JS_TELEMETRY_GC_MMU_50,
+    JS_TELEMETRY_GC_RESET,
+    JS_TELEMETRY_GC_INCREMENTAL_DISABLED
 };
 
 typedef void
 (* JSAccumulateTelemetryDataCallback)(int id, uint32_t sample);
 
 extern JS_FRIEND_API(void)
 JS_SetAccumulateTelemetryCallback(JSRuntime *rt, JSAccumulateTelemetryDataCallback callback);
 
-typedef void
-(* JSGCFinishedCallback)(JSRuntime *rt, JSCompartment *comp, const char *description);
-
-extern JS_FRIEND_API(void)
-JS_SetGCFinishedCallback(JSRuntime *rt, JSGCFinishedCallback callback);
-
 extern JS_FRIEND_API(JSPrincipals *)
 JS_GetCompartmentPrincipals(JSCompartment *compartment);
 
 /* Safe to call with input obj == NULL. Returns non-NULL iff obj != NULL. */
 extern JS_FRIEND_API(JSObject *)
 JS_ObjectToInnerObject(JSContext *cx, JSObject *obj);
 
 /* Requires obj != NULL. */
@@ -698,22 +696,75 @@ extern JS_FRIEND_API(void)
 GCForReason(JSContext *cx, gcreason::Reason reason);
 
 extern JS_FRIEND_API(void)
 CompartmentGCForReason(JSContext *cx, JSCompartment *comp, gcreason::Reason reason);
 
 extern JS_FRIEND_API(void)
 ShrinkingGC(JSContext *cx, gcreason::Reason reason);
 
+extern JS_FRIEND_API(void)
+IncrementalGC(JSContext *cx, gcreason::Reason reason);
+
+extern JS_FRIEND_API(void)
+SetGCSliceTimeBudget(JSContext *cx, int64_t millis);
+
+enum GCProgress {
+    /*
+     * During non-incremental GC, the GC is bracketed by JSGC_CYCLE_BEGIN/END
+     * callbacks. During an incremental GC, the sequence of callbacks is as
+     * follows:
+     *   JSGC_CYCLE_BEGIN, JSGC_SLICE_END  (first slice)
+     *   JSGC_SLICE_BEGIN, JSGC_SLICE_END  (second slice)
+     *   ...
+     *   JSGC_SLICE_BEGIN, JSGC_CYCLE_END  (last slice)
+     */
+
+    GC_CYCLE_BEGIN,
+    GC_SLICE_BEGIN,
+    GC_SLICE_END,
+    GC_CYCLE_END
+};
+
+struct GCDescription {
+    const char *logMessage;
+    bool isCompartment;
+
+    GCDescription(const char *msg, bool isCompartment)
+      : logMessage(msg), isCompartment(isCompartment) {}
+};
+
+typedef void
+(* GCSliceCallback)(JSRuntime *rt, GCProgress progress, const GCDescription &desc);
+
+extern JS_FRIEND_API(GCSliceCallback)
+SetGCSliceCallback(JSRuntime *rt, GCSliceCallback callback);
+
+extern JS_FRIEND_API(bool)
+WantGCSlice(JSRuntime *rt);
+
+/*
+ * Signals a good place to do an incremental slice, because the browser is
+ * drawing a frame.
+ */
+extern JS_FRIEND_API(void)
+NotifyDidPaint(JSContext *cx);
+
+extern JS_FRIEND_API(bool)
+IsIncrementalGCEnabled(JSRuntime *rt);
+
 extern JS_FRIEND_API(bool)
 IsIncrementalBarrierNeeded(JSRuntime *rt);
 
 extern JS_FRIEND_API(bool)
 IsIncrementalBarrierNeeded(JSContext *cx);
 
+extern JS_FRIEND_API(bool)
+IsIncrementalBarrierNeededOnObject(JSObject *obj);
+
 extern JS_FRIEND_API(void)
 IncrementalReferenceBarrier(void *ptr);
 
 extern JS_FRIEND_API(void)
 IncrementalValueBarrier(const Value &v);
 
 class ObjectPtr
 {
--- a/js/src/jsfun.cpp
+++ b/js/src/jsfun.cpp
@@ -546,17 +546,17 @@ args_trace(JSTracer *trc, JSObject *obj)
 /*
  * The classes below collaborate to lazily reflect and synchronize actual
  * argument values, argument count, and callee function object stored in a
  * StackFrame with their corresponding property values in the frame's
  * arguments object.
  */
 Class js::NormalArgumentsObjectClass = {
     "Arguments",
-    JSCLASS_NEW_RESOLVE |
+    JSCLASS_NEW_RESOLVE | JSCLASS_IMPLEMENTS_BARRIERS |
     JSCLASS_HAS_RESERVED_SLOTS(NormalArgumentsObject::RESERVED_SLOTS) |
     JSCLASS_HAS_CACHED_PROTO(JSProto_Object) |
     JSCLASS_FOR_OF_ITERATION,
     JS_PropertyStub,         /* addProperty */
     args_delProperty,
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     args_enumerate,
@@ -582,17 +582,17 @@ Class js::NormalArgumentsObjectClass = {
 
 /*
  * Strict mode arguments is significantly less magical than non-strict mode
  * arguments, so it is represented by a different class while sharing some
  * functionality.
  */
 Class js::StrictArgumentsObjectClass = {
     "Arguments",
-    JSCLASS_NEW_RESOLVE |
+    JSCLASS_NEW_RESOLVE | JSCLASS_IMPLEMENTS_BARRIERS |
     JSCLASS_HAS_RESERVED_SLOTS(StrictArgumentsObject::RESERVED_SLOTS) |
     JSCLASS_HAS_CACHED_PROTO(JSProto_Object) |
     JSCLASS_FOR_OF_ITERATION,
     JS_PropertyStub,         /* addProperty */
     args_delProperty,
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     strictargs_enumerate,
@@ -937,17 +937,17 @@ call_trace(JSTracer *trc, JSObject *obj)
     StackFrame *fp = (StackFrame *) obj->getPrivate();
     if (fp && fp->isFloatingGenerator())
         MarkObject(trc, &js_FloatingFrameToGenerator(fp)->obj, "generator object");
 #endif
 }
 
 JS_PUBLIC_DATA(Class) js::CallClass = {
     "Call",
-    JSCLASS_HAS_PRIVATE |
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
     JSCLASS_HAS_RESERVED_SLOTS(CallObject::RESERVED_SLOTS) |
     JSCLASS_NEW_RESOLVE | JSCLASS_IS_ANONYMOUS,
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
     (JSResolveOp)call_resolve,
@@ -1494,17 +1494,17 @@ JSFunction::sizeOfMisc(JSMallocSizeOfFun
 
 /*
  * Reserve two slots in all function objects for XPConnect.  Note that this
  * does not bloat every instance, only those on which reserved slots are set,
  * and those on which ad-hoc properties are defined.
  */
 JS_FRIEND_DATA(Class) js::FunctionClass = {
     js_Function_str,
-    JSCLASS_NEW_RESOLVE |
+    JSCLASS_NEW_RESOLVE | JSCLASS_IMPLEMENTS_BARRIERS |
     JSCLASS_HAS_CACHED_PROTO(JSProto_Function),
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     fun_enumerate,
     (JSResolveOp)fun_resolve,
     JS_ConvertStub,
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -39,23 +39,49 @@
  * ***** END LICENSE BLOCK ***** */
 
 /* JS Mark-and-Sweep Garbage Collector. */
 
 #include "mozilla/Attributes.h"
 #include "mozilla/Util.h"
 
 /*
- * This GC allocates fixed-sized things with sizes up to GC_NBYTES_MAX (see
- * jsgc.h). It allocates from a special GC arena pool with each arena allocated
- * using malloc. It uses an ideally parallel array of flag bytes to hold the
- * mark bit, finalizer type index, etc.
+ * This code implements a mark-and-sweep garbage collector. The mark phase is
+ * incremental. Most sweeping is done on a background thread. A GC is divided
+ * into slices as follows:
+ *
+ * Slice 1: Roots pushed onto the mark stack. The mark stack is processed by
+ * popping an element, marking it, and pushing its children.
+ *   ... JS code runs ...
+ * Slice 2: More mark stack processing.
+ *   ... JS code runs ...
+ * Slice n-1: More mark stack processing.
+ *   ... JS code runs ...
+ * Slice n: Mark stack is completely drained. Some sweeping is done.
+ *   ... JS code runs, remaining sweeping done on background thread ...
+ *
+ * When background sweeping finishes the GC is complete.
  *
- * XXX swizzle page to freelist for better locality of reference
+ * Incremental GC requires close collaboration with the mutator (i.e., JS code):
+ *
+ * 1. During an incremental GC, if a memory location (except a root) is written
+ * to, then the value it previously held must be marked. Write barriers ensure
+ * this.
+ * 2. Any object that is allocated during incremental GC must start out marked.
+ * 3. Roots are special memory locations that don't need write
+ * barriers. However, they must be marked in the first slice. Roots are things
+ * like the C stack and the VM stack, since it would be too expensive to put
+ * barriers on them.
+ *
+ * Write barriers are handled using the compartment's barrierMarker_
+ * JSTracer. This includes a per-compartment stack of GC things that have been
+ * write-barriered. This stack is processed in each GC slice. The barrierMarker_
+ * is also used during write barrier verification (VerifyBarriers below).
  */
+
 #include <math.h>
 #include <string.h>     /* for memset used when DEBUG */
 
 #include "jstypes.h"
 #include "jsutil.h"
 #include "jshash.h"
 #include "jsclist.h"
 #include "jsprf.h"
@@ -113,22 +139,41 @@
 
 using namespace mozilla;
 using namespace js;
 using namespace js::gc;
 
 namespace js {
 namespace gc {
 
+/*
+ * Lower limit after which we limit the heap growth
+ */
+const size_t GC_ALLOCATION_THRESHOLD = 30 * 1024 * 1024;
+
+/*
+ * A GC is triggered once the number of newly allocated arenas is
+ * GC_HEAP_GROWTH_FACTOR times the number of live arenas after the last GC
+ * starting after the lower limit of GC_ALLOCATION_THRESHOLD. This number is
+ * used for non-incremental GCs.
+ */
+const float GC_HEAP_GROWTH_FACTOR = 3.0f;
+
+/* Perform a Full GC every 20 seconds if MaybeGC is called */
+static const uint64_t GC_IDLE_FULL_SPAN = 20 * 1000 * 1000;
+
 #ifdef JS_GC_ZEAL
 static void
 StartVerifyBarriers(JSContext *cx);
 
 static void
 EndVerifyBarriers(JSContext *cx);
+
+void
+FinishVerifier(JSRuntime *rt);
 #endif
 
 /* This array should be const, but that doesn't link right under GCC. */
 AllocKind slotsToThingKind[] = {
     /* 0 */  FINALIZE_OBJECT0,  FINALIZE_OBJECT2,  FINALIZE_OBJECT2,  FINALIZE_OBJECT4,
     /* 4 */  FINALIZE_OBJECT4,  FINALIZE_OBJECT8,  FINALIZE_OBJECT8,  FINALIZE_OBJECT8,
     /* 8 */  FINALIZE_OBJECT8,  FINALIZE_OBJECT12, FINALIZE_OBJECT12, FINALIZE_OBJECT12,
     /* 12 */ FINALIZE_OBJECT12, FINALIZE_OBJECT16, FINALIZE_OBJECT16, FINALIZE_OBJECT16,
@@ -270,16 +315,18 @@ Arena::finalize(JSContext *cx, AllocKind
     /* Enforce requirements on size of T. */
     JS_ASSERT(thingSize % Cell::CellSize == 0);
     JS_ASSERT(thingSize <= 255);
 
     JS_ASSERT(aheader.allocated());
     JS_ASSERT(thingKind == aheader.getAllocKind());
     JS_ASSERT(thingSize == aheader.getThingSize());
     JS_ASSERT(!aheader.hasDelayedMarking);
+    JS_ASSERT(!aheader.markOverflow);
+    JS_ASSERT(!aheader.allocatedDuringIncremental);
 
     uintptr_t thing = thingsStart(thingKind);
     uintptr_t lastByte = thingsEnd() - 1;
 
     FreeSpan nextFree(aheader.getFirstFreeSpan());
     nextFree.checkSpan();
 
     FreeSpan newListHead;
@@ -845,17 +892,16 @@ PickChunk(JSCompartment *comp)
 
 JS_FRIEND_API(bool)
 IsAboutToBeFinalized(const Cell *thing)
 {
     JSCompartment *thingCompartment = reinterpret_cast<const Cell *>(thing)->compartment();
     JSRuntime *rt = thingCompartment->rt;
     if (rt->gcCurrentCompartment != NULL && rt->gcCurrentCompartment != thingCompartment)
         return false;
-
     return !reinterpret_cast<const Cell *>(thing)->isMarked();
 }
 
 bool
 IsAboutToBeFinalized(const Value &v)
 {
     JS_ASSERT(v.isMarkable());
     return IsAboutToBeFinalized((Cell *)v.toGCThing());
@@ -921,16 +967,28 @@ InFreeList(ArenaHeader *aheader, uintptr
         /*
          * The last possible empty span is an the end of the arena. Here
          * span->end < thing < thingsEnd and so we must have more spans.
          */
         span = span->nextSpan();
     }
 }
 
+enum ConservativeGCTest
+{
+    CGCT_VALID,
+    CGCT_LOWBITSET, /* excluded because one of the low bits was set */
+    CGCT_NOTARENA,  /* not within arena range in a chunk */
+    CGCT_OTHERCOMPARTMENT,  /* in another compartment */
+    CGCT_NOTCHUNK,  /* not within a valid chunk */
+    CGCT_FREEARENA, /* within arena containing only free things */
+    CGCT_NOTLIVE,   /* gcthing is not allocated */
+    CGCT_END
+};
+
 /*
  * Tests whether w is a (possibly dead) GC thing. Returns CGCT_VALID and
  * details about the thing if so. On failure, returns the reason for rejection.
  */
 inline ConservativeGCTest
 IsAddressableGCThing(JSRuntime *rt, uintptr_t w,
                      gc::AllocKind *thingKindPtr, ArenaHeader **arenaHeader, void **thing)
 {
@@ -1019,32 +1077,28 @@ MarkIfGCThingWord(JSTracer *trc, uintptr
     /*
      * Check if the thing is free. We must use the list of free spans as at
      * this point we no longer have the mark bits from the previous GC run and
      * we must account for newly allocated things.
      */
     if (InFreeList(aheader, uintptr_t(thing)))
         return CGCT_NOTLIVE;
 
+    JSGCTraceKind traceKind = MapAllocToTraceKind(thingKind);
 #ifdef DEBUG
     const char pattern[] = "machine_stack %p";
     char nameBuf[sizeof(pattern) - 2 + sizeof(thing) * 2];
     JS_snprintf(nameBuf, sizeof(nameBuf), pattern, thing);
     JS_SET_TRACING_NAME(trc, nameBuf);
 #endif
-    MarkKind(trc, thing, MapAllocToTraceKind(thingKind));
-
-#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
-    if (IS_GC_MARKING_TRACER(trc)) {
-        GCMarker *marker = static_cast<GCMarker *>(trc);
-        if (marker->conservativeDumpFileName)
-            marker->conservativeRoots.append(thing);
-        if (uintptr_t(thing) != w)
-            marker->conservativeStats.unaligned++;
-    }
+    MarkKind(trc, thing, traceKind);
+
+#ifdef DEBUG
+    if (trc->runtime->gcIncrementalState == MARK_ROOTS)
+        trc->runtime->gcSavedRoots.append(JSRuntime::SavedGCRoot(thing, traceKind));
 #endif
 
     return CGCT_VALID;
 }
 
 static void
 MarkWordConservatively(JSTracer *trc, uintptr_t w)
 {
@@ -1065,18 +1119,36 @@ static void
 MarkRangeConservatively(JSTracer *trc, const uintptr_t *begin, const uintptr_t *end)
 {
     JS_ASSERT(begin <= end);
     for (const uintptr_t *i = begin; i < end; ++i)
         MarkWordConservatively(trc, *i);
 }
 
 static JS_NEVER_INLINE void
-MarkConservativeStackRoots(JSTracer *trc, JSRuntime *rt)
+MarkConservativeStackRoots(JSTracer *trc, bool useSavedRoots)
 {
+    JSRuntime *rt = trc->runtime;
+
+#ifdef DEBUG
+    if (useSavedRoots) {
+        for (JSRuntime::SavedGCRoot *root = rt->gcSavedRoots.begin();
+             root != rt->gcSavedRoots.end();
+             root++)
+        {
+            JS_SET_TRACING_NAME(trc, "cstack");
+            MarkKind(trc, root->thing, root->kind);
+        }
+        return;
+    }
+
+    if (rt->gcIncrementalState == MARK_ROOTS)
+        rt->gcSavedRoots.clearAndFree();
+#endif
+
     ConservativeGCData *cgcd = &rt->conservativeGC;
     if (!cgcd->hasStackToScan()) {
 #ifdef JS_THREADSAFE
         JS_ASSERT(!rt->suspendCount);
         JS_ASSERT(rt->requestDepth <= cgcd->requestThreshold);
 #endif
         return;
     }
@@ -1127,16 +1199,18 @@ MarkStackRangeConservatively(JSTracer *t
     JS_ASSERT(begin <= end);
     for (const uintptr_t *i = begin; i < end; i += sizeof(Value) / sizeof(uintptr_t))
         MarkWordConservatively(trc, *i);
 #else
     MarkRangeConservatively(trc, begin, end);
 #endif
 }
 
+
+
 JS_NEVER_INLINE void
 ConservativeGCData::recordStackTop()
 {
     /* Update the native stack pointer if it points to a bigger stack. */
     uintptr_t dummy;
     nativeStackTop = &dummy;
 
     /*
@@ -1186,16 +1260,21 @@ js_FinishGC(JSRuntime *rt)
     /*
      * Wait until the background finalization stops and the helper thread
      * shuts down before we forcefully release any remaining GC memory.
      */
 #ifdef JS_THREADSAFE
     rt->gcHelperThread.finish();
 #endif
 
+#ifdef JS_GC_ZEAL
+    /* Free memory associated with GC verification. */
+    FinishVerifier(rt);
+#endif
+
     /* Delete all remaining Compartments. */
     for (CompartmentsIter c(rt); !c.done(); c.next())
         Foreground::delete_(c.get());
     rt->compartments.clear();
     rt->atomsCompartment = NULL;
 
     rt->gcSystemAvailableChunkListHead = NULL;
     rt->gcUserAvailableChunkListHead = NULL;
@@ -1231,33 +1310,33 @@ js_AddGCThingRoot(JSContext *cx, void **
     return ok;
 }
 
 JS_FRIEND_API(JSBool)
 js_AddRootRT(JSRuntime *rt, jsval *vp, const char *name)
 {
     /*
      * Due to the long-standing, but now removed, use of rt->gcLock across the
-     * bulk of js_GC, API users have come to depend on JS_AddRoot etc. locking
+     * bulk of js::GC, API users have come to depend on JS_AddRoot etc. locking
      * properly with a racing GC, without calling JS_AddRoot from a request.
      * We have to preserve API compatibility here, now that we avoid holding
      * rt->gcLock across the mark phase (including the root hashtable mark).
      */
     AutoLockGC lock(rt);
 
     return !!rt->gcRootsHash.put((void *)vp,
                                  RootInfo(name, JS_GC_ROOT_VALUE_PTR));
 }
 
 JS_FRIEND_API(JSBool)
 js_AddGCThingRootRT(JSRuntime *rt, void **rp, const char *name)
 {
     /*
      * Due to the long-standing, but now removed, use of rt->gcLock across the
-     * bulk of js_GC, API users have come to depend on JS_AddRoot etc. locking
+     * bulk of js::GC, API users have come to depend on JS_AddRoot etc. locking
      * properly with a racing GC, without calling JS_AddRoot from a request.
      * We have to preserve API compatibility here, now that we avoid holding
      * rt->gcLock across the mark phase (including the root hashtable mark).
      */
     AutoLockGC lock(rt);
 
     return !!rt->gcRootsHash.put((void *)rp,
                                  RootInfo(name, JS_GC_ROOT_GCTHING_PTR));
@@ -1365,16 +1444,29 @@ JSCompartment::reduceGCTriggerBytes(size
     if (gcTriggerBytes - amount < GC_ALLOCATION_THRESHOLD * GC_HEAP_GROWTH_FACTOR)
         return;
     gcTriggerBytes -= amount;
 }
 
 namespace js {
 namespace gc {
 
+inline void
+ArenaLists::prepareForIncrementalGC(JSCompartment *comp)
+{
+    for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
+        FreeSpan *headSpan = &freeLists[i];
+        if (!headSpan->isEmpty()) {
+            ArenaHeader *aheader = headSpan->arenaHeader();
+            aheader->allocatedDuringIncremental = true;
+            comp->barrierMarker_.delayMarkingArena(aheader);
+        }
+    }
+}
+
 inline void *
 ArenaLists::allocateFromArena(JSCompartment *comp, AllocKind thingKind)
 {
     Chunk *chunk = NULL;
 
     ArenaList *al = &arenaLists[thingKind];
     AutoLockGC maybeLock;
 
@@ -1418,16 +1510,20 @@ ArenaLists::allocateFromArena(JSCompartm
             al->cursor = &aheader->next;
 
             /*
              * Move the free span stored in the arena to the free list and
              * allocate from it.
              */
             freeLists[thingKind] = aheader->getFirstFreeSpan();
             aheader->setAsFullyUsed();
+            if (JS_UNLIKELY(comp->needsBarrier())) {
+                aheader->allocatedDuringIncremental = true;
+                comp->barrierMarker_.delayMarkingArena(aheader);
+            }
             return freeLists[thingKind].infallibleAllocate(Arena::thingSize(thingKind));
         }
 
         /* Make sure we hold the GC lock before we call PickChunk. */
         if (!maybeLock.locked())
             maybeLock.lock(comp->rt);
         chunk = PickChunk(comp);
         if (!chunk)
@@ -1443,16 +1539,20 @@ ArenaLists::allocateFromArena(JSCompartm
      * cursor, so after the GC the most recently added arena will be used first
      * for allocations improving cache locality.
      */
     JS_ASSERT(!*al->cursor);
     ArenaHeader *aheader = chunk->allocateArena(comp, thingKind);
     if (!aheader)
         return NULL;
 
+    if (JS_UNLIKELY(comp->needsBarrier())) {
+        aheader->allocatedDuringIncremental = true;
+        comp->barrierMarker_.delayMarkingArena(aheader);
+    }
     aheader->next = al->head;
     if (!al->head) {
         JS_ASSERT(al->cursor == &al->head);
         al->cursor = &aheader->next;
     }
     al->head = aheader;
 
     /* See comments before allocateFromNewArena about this assert. */
@@ -1614,29 +1714,29 @@ ArenaLists::finalizeScripts(JSContext *c
 
 static void
 RunLastDitchGC(JSContext *cx)
 {
     JSRuntime *rt = cx->runtime;
 
     /* The last ditch GC preserves all atoms. */
     AutoKeepAtoms keep(rt);
-    js_GC(cx, rt->gcTriggerCompartment, GC_NORMAL, gcreason::LAST_DITCH);
+    GC(cx, rt->gcTriggerCompartment, GC_NORMAL, gcreason::LAST_DITCH);
 }
 
 /* static */ void *
 ArenaLists::refillFreeList(JSContext *cx, AllocKind thingKind)
 {
     JS_ASSERT(cx->compartment->arenas.freeLists[thingKind].isEmpty());
 
     JSCompartment *comp = cx->compartment;
     JSRuntime *rt = comp->rt;
     JS_ASSERT(!rt->gcRunning);
 
-    bool runGC = !!rt->gcIsNeeded;
+    bool runGC = rt->gcIncrementalState != NO_INCREMENTAL && comp->gcBytes > comp->gcTriggerBytes;
     for (;;) {
         if (JS_UNLIKELY(runGC)) {
             RunLastDitchGC(cx);
 
             /*
              * The JSGC_END callback can legitimately allocate new GC
              * things and populate the free list. If that happens, just
              * return that list head.
@@ -1717,103 +1817,309 @@ js_UnlockGCThingRT(JSRuntime *rt, void *
         rt->gcPoke = true;
         if (--p->value == 0)
             rt->gcLocksHash.remove(p);
     }
 }
 
 namespace js {
 
+void
+InitTracer(JSTracer *trc, JSRuntime *rt, JSContext *cx, JSTraceCallback callback)
+{
+    trc->runtime = rt;
+    trc->context = cx;
+    trc->callback = callback;
+    trc->debugPrinter = NULL;
+    trc->debugPrintArg = NULL;
+    trc->debugPrintIndex = size_t(-1);
+    trc->eagerlyTraceWeakMaps = true;
+}
+
+/* static */ int64_t
+SliceBudget::TimeBudget(int64_t millis)
+{
+    return millis * PRMJ_USEC_PER_MSEC;
+}
+
+/* static */ int64_t
+SliceBudget::WorkBudget(int64_t work)
+{
+    return -work;
+}
+
+SliceBudget::SliceBudget()
+  : deadline(INT64_MAX),
+    counter(INTPTR_MAX)
+{
+}
+
+SliceBudget::SliceBudget(int64_t budget)
+{
+    if (budget == Unlimited) {
+        deadline = INT64_MAX;
+        counter = INTPTR_MAX;
+    } else if (budget > 0) {
+        deadline = PRMJ_Now() + budget;
+        counter = CounterReset;
+    } else {
+        deadline = 0;
+        counter = -budget;
+    }
+}
+
+bool
+SliceBudget::checkOverBudget()
+{
+    bool over = PRMJ_Now() > deadline;
+    if (!over)
+        counter = CounterReset;
+    return over;
+}
+
+GCMarker::GCMarker()
+  : color(BLACK),
+    started(false),
+    unmarkedArenaStackTop(NULL),
+    markLaterArenas(0),
+    grayFailed(false)
+{
+}
+
+bool
+GCMarker::init(bool lazy)
+{
+    if (!stack.init(lazy ? 0 : MARK_STACK_LENGTH))
+        return false;
+    return true;
+}
+
+void
+GCMarker::start(JSRuntime *rt, JSContext *cx)
+{
+    InitTracer(this, rt, cx, NULL);
+    JS_ASSERT(!started);
+    started = true;
+    color = BLACK;
+
+    JS_ASSERT(!unmarkedArenaStackTop);
+    JS_ASSERT(markLaterArenas == 0);
+
+    JS_ASSERT(grayRoots.empty());
+    JS_ASSERT(!grayFailed);
+
+    /*
+     * The GC is recomputing the liveness of WeakMap entries, so we delay
+     * visting entries.
+     */
+    eagerlyTraceWeakMaps = JS_FALSE;
+}
+
+void
+GCMarker::stop()
+{
+    JS_ASSERT(isDrained());
+
+    JS_ASSERT(started);
+    started = false;
+
+    JS_ASSERT(!unmarkedArenaStackTop);
+    JS_ASSERT(markLaterArenas == 0);
+
+    JS_ASSERT(grayRoots.empty());
+    grayFailed = false;
+}
+
+void
+GCMarker::reset()
+{
+    color = BLACK;
+
+    stack.reset();
+    JS_ASSERT(isMarkStackEmpty());
+
+    while (unmarkedArenaStackTop) {
+        ArenaHeader *aheader = unmarkedArenaStackTop;
+        JS_ASSERT(aheader->hasDelayedMarking);
+        JS_ASSERT(markLaterArenas);
+        unmarkedArenaStackTop = aheader->getNextDelayedMarking();
+        aheader->hasDelayedMarking = 0;
+        aheader->markOverflow = 0;
+        aheader->allocatedDuringIncremental = 0;
+        markLaterArenas--;
+    }
+    JS_ASSERT(isDrained());
+    JS_ASSERT(!markLaterArenas);
+
+    grayRoots.clearAndFree();
+    grayFailed = false;
+}
+
 /*
  * When the native stack is low, the GC does not call JS_TraceChildren to mark
  * the reachable "children" of the thing. Rather the thing is put aside and
  * JS_TraceChildren is called later with more space on the C stack.
  *
  * To implement such delayed marking of the children with minimal overhead for
  * the normal case of sufficient native stack, the code adds a field per
  * arena. The field markingDelay->link links all arenas with delayed things
  * into a stack list with the pointer to stack top in
  * GCMarker::unmarkedArenaStackTop. delayMarkingChildren adds
  * arenas to the stack as necessary while markDelayedChildren pops the arenas
  * from the stack until it empties.
  */
 
-GCMarker::GCMarker(JSContext *cx)
-  : color(BLACK),
-    unmarkedArenaStackTop(NULL),
-    stack(cx->runtime->gcMarkStackArray)
+inline void
+GCMarker::delayMarkingArena(ArenaHeader *aheader)
 {
-    JS_TracerInit(this, cx, NULL);
-    markLaterArenas = 0;
-#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
-    conservativeDumpFileName = getenv("JS_DUMP_CONSERVATIVE_GC_ROOTS");
-    memset(&conservativeStats, 0, sizeof(conservativeStats));
-#endif
-
-    /*
-     * The GC is recomputing the liveness of WeakMap entries, so we
-     * delay visting entries.
-     */
-    eagerlyTraceWeakMaps = JS_FALSE;
-}
-
-GCMarker::~GCMarker()
-{
-#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
-    dumpConservativeRoots();
-#endif
+    if (aheader->hasDelayedMarking) {
+        /* Arena already scheduled to be marked later */
+        return;
+    }
+    aheader->setNextDelayedMarking(unmarkedArenaStackTop);
+    unmarkedArenaStackTop = aheader;
+    markLaterArenas++;
 }
 
 void
 GCMarker::delayMarkingChildren(const void *thing)
 {
     const Cell *cell = reinterpret_cast<const Cell *>(thing);
-    ArenaHeader *aheader = cell->arenaHeader();
-    if (aheader->hasDelayedMarking) {
-        /* Arena already scheduled to be marked later */
-        return;
-    }
-    aheader->setNextDelayedMarking(unmarkedArenaStackTop);
-    unmarkedArenaStackTop = aheader->getArena();
-    markLaterArenas++;
-}
-
-static void
-MarkDelayedChildren(GCMarker *trc, Arena *a)
-{
-    AllocKind allocKind = a->aheader.getAllocKind();
-    JSGCTraceKind traceKind = MapAllocToTraceKind(allocKind);
-    size_t thingSize = Arena::thingSize(allocKind);
-    uintptr_t end = a->thingsEnd();
-    for (uintptr_t thing = a->thingsStart(allocKind); thing != end; thing += thingSize) {
-        Cell *t = reinterpret_cast<Cell *>(thing);
-        if (t->isMarked())
-            JS_TraceChildren(trc, t, traceKind);
-    }
+    cell->arenaHeader()->markOverflow = 1;
+    delayMarkingArena(cell->arenaHeader());
 }
 
 void
-GCMarker::markDelayedChildren()
+GCMarker::markDelayedChildren(ArenaHeader *aheader)
 {
+    if (aheader->markOverflow) {
+        bool always = aheader->allocatedDuringIncremental;
+        aheader->markOverflow = 0;
+
+        for (CellIterUnderGC i(aheader); !i.done(); i.next()) {
+            Cell *t = i.getCell();
+            if (always || t->isMarked()) {
+                t->markIfUnmarked();
+                JS_TraceChildren(this, t, MapAllocToTraceKind(aheader->getAllocKind()));
+            }
+        }
+    } else {
+        JS_ASSERT(aheader->allocatedDuringIncremental);
+        PushArena(this, aheader);
+    }
+    aheader->allocatedDuringIncremental = 0;
+}
+
+bool
+GCMarker::markDelayedChildren(SliceBudget &budget)
+{
+    gcstats::AutoPhase ap(runtime->gcStats, gcstats::PHASE_MARK_DELAYED);
+
     JS_ASSERT(unmarkedArenaStackTop);
     do {
         /*
          * If marking gets delayed at the same arena again, we must repeat
          * marking of its things. For that we pop arena from the stack and
          * clear its hasDelayedMarking flag before we begin the marking.
          */
-        Arena *a = unmarkedArenaStackTop;
-        JS_ASSERT(a->aheader.hasDelayedMarking);
+        ArenaHeader *aheader = unmarkedArenaStackTop;
+        JS_ASSERT(aheader->hasDelayedMarking);
         JS_ASSERT(markLaterArenas);
-        unmarkedArenaStackTop = a->aheader.getNextDelayedMarking();
-        a->aheader.hasDelayedMarking = 0;
+        unmarkedArenaStackTop = aheader->getNextDelayedMarking();
+        aheader->hasDelayedMarking = 0;
         markLaterArenas--;
-        MarkDelayedChildren(this, a);
+        markDelayedChildren(aheader);
+
+        if (budget.checkOverBudget())
+            return false;
     } while (unmarkedArenaStackTop);
     JS_ASSERT(!markLaterArenas);
+
+    return true;
+}
+
+#ifdef DEBUG
+void
+GCMarker::checkCompartment(void *p)
+{
+    JS_ASSERT(started);
+
+    Cell *cell = static_cast<Cell *>(p);
+    if (runtime->gcRunning && runtime->gcCurrentCompartment)
+        JS_ASSERT(cell->compartment() == runtime->gcCurrentCompartment);
+    else if (runtime->gcIncrementalCompartment)
+        JS_ASSERT(cell->compartment() == runtime->gcIncrementalCompartment);
+}
+#endif
+
+bool
+GCMarker::hasBufferedGrayRoots() const
+{
+    return !grayFailed;
+}
+
+void
+GCMarker::startBufferingGrayRoots()
+{
+    JS_ASSERT(!callback);
+    callback = GrayCallback;
+    JS_ASSERT(IS_GC_MARKING_TRACER(this));
+}
+
+void
+GCMarker::endBufferingGrayRoots()
+{
+    JS_ASSERT(callback == GrayCallback);
+    callback = NULL;
+    JS_ASSERT(IS_GC_MARKING_TRACER(this));
+}
+
+void
+GCMarker::markBufferedGrayRoots()
+{
+    JS_ASSERT(!grayFailed);
+
+    for (GrayRoot *elem = grayRoots.begin(); elem != grayRoots.end(); elem++) {
+#ifdef DEBUG
+        debugPrinter = elem->debugPrinter;
+        debugPrintArg = elem->debugPrintArg;
+        debugPrintIndex = elem->debugPrintIndex;
+#endif
+        MarkKind(this, elem->thing, elem->kind);
+    }
+
+    grayRoots.clearAndFree();
+}
+
+void
+GCMarker::appendGrayRoot(void *thing, JSGCTraceKind kind)
+{
+    JS_ASSERT(started);
+
+    if (grayFailed)
+        return;
+
+    GrayRoot root(thing, kind);
+#ifdef DEBUG
+    root.debugPrinter = debugPrinter;
+    root.debugPrintArg = debugPrintArg;
+    root.debugPrintIndex = debugPrintIndex;
+#endif
+
+    if (!grayRoots.append(root)) {
+        grayRoots.clearAndFree();
+        grayFailed = true;
+    }
+}
+
+void
+GCMarker::GrayCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind)
+{
+    GCMarker *gcmarker = static_cast<GCMarker *>(trc);
+    gcmarker->appendGrayRoot(*thingp, kind);
 }
 
 } /* namespace js */
 
 #ifdef DEBUG
 static void
 EmptyMarkCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind)
 {
@@ -1860,16 +2166,27 @@ gc_root_traversal(JSTracer *trc, const R
 
 static void
 gc_lock_traversal(const GCLocks::Entry &entry, JSTracer *trc)
 {
     JS_ASSERT(entry.value >= 1);
     MarkGCThingRoot(trc, entry.key, "locked object");
 }
 
+namespace js {
+
+void
+MarkCompartmentActive(StackFrame *fp)
+{
+    if (fp->isScriptFrame())
+        fp->script()->compartment()->active = true;
+}
+
+} /* namespace js */
+
 void
 AutoIdArray::trace(JSTracer *trc)
 {
     JS_ASSERT(tag == IDARRAY);
     gc::MarkIdRange(trc, idArray->length, idArray->vector, "JSAutoIdArray.idArray");
 }
 
 void
@@ -1999,35 +2316,29 @@ void
 AutoGCRooter::traceAll(JSTracer *trc)
 {
     for (js::AutoGCRooter *gcr = this; gcr; gcr = gcr->down)
         gcr->trace(trc);
 }
 
 namespace js {
 
-void
-MarkWeakReferences(GCMarker *gcmarker)
-{
-    JS_ASSERT(gcmarker->isMarkStackEmpty());
-    while (WatchpointMap::markAllIteratively(gcmarker) ||
-           WeakMapBase::markAllIteratively(gcmarker) ||
-           Debugger::markAllIteratively(gcmarker)) {
-        gcmarker->drainMarkStack();
-    }
-    JS_ASSERT(gcmarker->isMarkStackEmpty());
-}
-
 static void
-MarkRuntime(JSTracer *trc)
+MarkRuntime(JSTracer *trc, bool useSavedRoots = false)
 {
     JSRuntime *rt = trc->runtime;
+    JS_ASSERT(trc->callback != GCMarker::GrayCallback);
+    if (rt->gcCurrentCompartment) {
+        for (CompartmentsIter c(rt); !c.done(); c.next())
+            c->markCrossCompartmentWrappers(trc);
+        Debugger::markCrossCompartmentDebuggerObjectReferents(trc);
+    }
 
     if (rt->hasContexts())
-        MarkConservativeStackRoots(trc, rt);
+        MarkConservativeStackRoots(trc, useSavedRoots);
 
     for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront())
         gc_root_traversal(trc, r.front());
 
     for (GCLocks::Range r = rt->gcLocksHash.all(); !r.empty(); r.popFront())
         gc_lock_traversal(r.front(), trc);
 
     if (rt->scriptPCCounters) {
@@ -2072,23 +2383,28 @@ MarkRuntime(JSTracer *trc)
 #endif
 
     rt->stackSpace.mark(trc);
 
     /* The embedding can register additional roots here. */
     if (JSTraceDataOp op = rt->gcBlackRootsTraceOp)
         (*op)(trc, rt->gcBlackRootsData);
 
-    if (!IS_GC_MARKING_TRACER(trc)) {
-        /* We don't want to miss these when called from TraceRuntime. */
-        if (JSTraceDataOp op = rt->gcGrayRootsTraceOp)
+    /* During GC, this buffers up the gray roots and doesn't mark them. */
+    if (JSTraceDataOp op = rt->gcGrayRootsTraceOp) {
+        if (IS_GC_MARKING_TRACER(trc)) {
+            GCMarker *gcmarker = static_cast<GCMarker *>(trc);
+            gcmarker->startBufferingGrayRoots();
             (*op)(trc, rt->gcGrayRootsData);
+            gcmarker->endBufferingGrayRoots();
+        } else {
+            (*op)(trc, rt->gcGrayRootsData);
+        }
     }
 }
-
 void
 TriggerGC(JSRuntime *rt, gcreason::Reason reason)
 {
     JS_ASSERT(rt->onOwnerThread());
 
     if (rt->gcRunning || rt->gcIsNeeded)
         return;
 
@@ -2100,22 +2416,22 @@ TriggerGC(JSRuntime *rt, gcreason::Reaso
 }
 
 void
 TriggerCompartmentGC(JSCompartment *comp, gcreason::Reason reason)
 {
     JSRuntime *rt = comp->rt;
     JS_ASSERT(!rt->gcRunning);
 
-    if (rt->gcZeal()) {
+    if (rt->gcZeal() == ZealAllocValue) {
         TriggerGC(rt, reason);
         return;
     }
 
-    if (rt->gcMode != JSGC_MODE_COMPARTMENT || comp == rt->atomsCompartment) {
+    if (rt->gcMode == JSGC_MODE_GLOBAL || comp == rt->atomsCompartment) {
         /* We can't do a compartmental GC of the default compartment. */
         TriggerGC(rt, reason);
         return;
     }
 
     if (rt->gcIsNeeded) {
         /* If we need to GC more than one compartment, run a full GC. */
         if (rt->gcTriggerCompartment != comp)
@@ -2134,43 +2450,47 @@ TriggerCompartmentGC(JSCompartment *comp
 }
 
 void
 MaybeGC(JSContext *cx)
 {
     JSRuntime *rt = cx->runtime;
     JS_ASSERT(rt->onOwnerThread());
 
-    if (rt->gcZeal()) {
-        js_GC(cx, NULL, GC_NORMAL, gcreason::MAYBEGC);
+    if (rt->gcZeal() == ZealAllocValue || rt->gcZeal() == ZealPokeValue) {
+        GC(cx, NULL, GC_NORMAL, gcreason::MAYBEGC);
         return;
     }
 
     JSCompartment *comp = cx->compartment;
     if (rt->gcIsNeeded) {
-        js_GC(cx, (comp == rt->gcTriggerCompartment) ? comp : NULL, GC_NORMAL, gcreason::MAYBEGC);
+        GCSlice(cx, (comp == rt->gcTriggerCompartment) ? comp : NULL,
+                GC_NORMAL, gcreason::MAYBEGC);
         return;
     }
 
-    if (comp->gcBytes > 8192 && comp->gcBytes >= 3 * (comp->gcTriggerBytes / 4)) {
-        js_GC(cx, (rt->gcMode == JSGC_MODE_COMPARTMENT) ? comp : NULL, GC_NORMAL, gcreason::MAYBEGC);
+    if (comp->gcBytes > 8192 &&
+        comp->gcBytes >= 3 * (comp->gcTriggerBytes / 4) &&
+        rt->gcIncrementalState == NO_INCREMENTAL)
+    {
+        GCSlice(cx, NULL, GC_NORMAL, gcreason::MAYBEGC);
         return;
     }
 
     /*
      * Access to the counters and, on 32 bit, setting gcNextFullGCTime below
      * is not atomic and a race condition could trigger or suppress the GC. We
      * tolerate this.
      */
     int64_t now = PRMJ_Now();
     if (rt->gcNextFullGCTime && rt->gcNextFullGCTime <= now) {
         if (rt->gcChunkAllocationSinceLastGC ||
             rt->gcNumArenasFreeCommitted > FreeCommittedArenasThreshold)
         {
-            js_GC(cx, NULL, GC_SHRINK, gcreason::MAYBEGC);
+            GCSlice(cx, NULL, GC_SHRINK, gcreason::MAYBEGC);
         } else {
             rt->gcNextFullGCTime = now + GC_IDLE_FULL_SPAN;
         }
     }
 }
 
 static void
 DecommitArenasFromAvailableList(JSRuntime *rt, Chunk **availableListHeadp)
@@ -2607,86 +2927,230 @@ SweepCompartments(JSContext *cx, JSGCInv
             continue;
         }
         *write++ = compartment;
     }
     rt->compartments.resize(write - rt->compartments.begin());
 }
 
 static void
-BeginMarkPhase(JSContext *cx, GCMarker *gcmarker, JSGCInvocationKind gckind)
+PurgeRuntime(JSContext *cx)
 {
     JSRuntime *rt = cx->runtime;
 
     for (GCCompartmentsIter c(rt); !c.done(); c.next())
         c->purge(cx);
 
     rt->purge(cx);
 
     {
         JSContext *iter = NULL;
         while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter))
             acx->purge();
     }
+}
+
+static void
+BeginMarkPhase(JSContext *cx)
+{
+    JSRuntime *rt = cx->runtime;
+    GCMarker *gcmarker = &rt->gcMarker;
+
+    rt->gcStartNumber = rt->gcNumber;
+
+    /* Reset weak map list. */
+    WeakMapBase::resetWeakMapList(rt);
+
+    /*
+     * We must purge the runtime at the beginning of an incremental GC. The
+     * danger if we purge later is that the snapshot invariant of incremental
+     * GC will be broken, as follows. If some object is reachable only through
+     * some cache (say the dtoaCache) then it will not be part of the snapshot.
+     * If we purge after root marking, then the mutator could obtain a pointer
+     * to the object and start using it. This object might never be marked, so
+     * a GC hazard would exist.
+     */
+    PurgeRuntime(cx);
 
     /*
      * Mark phase.
      */
-    rt->gcStats.beginPhase(gcstats::PHASE_MARK);
+    gcstats::AutoPhase ap1(rt->gcStats, gcstats::PHASE_MARK);
+    gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_MARK_ROOTS);
 
     for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
         r.front()->bitmap.clear();
 
-    if (rt->gcCurrentCompartment) {
-        for (CompartmentsIter c(rt); !c.done(); c.next())
-            c->markCrossCompartmentWrappers(gcmarker);
-        Debugger::markCrossCompartmentDebuggerObjectReferents(gcmarker);
+    MarkRuntime(gcmarker);
+}
+
+void
+MarkWeakReferences(GCMarker *gcmarker)
+{
+    JS_ASSERT(gcmarker->isDrained());
+    while (WatchpointMap::markAllIteratively(gcmarker) ||
+           WeakMapBase::markAllIteratively(gcmarker) ||
+           Debugger::markAllIteratively(gcmarker))
+    {
+        SliceBudget budget;
+        gcmarker->drainMarkStack(budget);
     }
-
-    MarkRuntime(gcmarker);
+    JS_ASSERT(gcmarker->isDrained());
 }
 
 static void
-EndMarkPhase(JSContext *cx, GCMarker *gcmarker, JSGCInvocationKind gckind)
+MarkGrayAndWeak(JSContext *cx)
 {
     JSRuntime *rt = cx->runtime;
-
-    JS_ASSERT(gcmarker->isMarkStackEmpty());
+    FullGCMarker *gcmarker = &rt->gcMarker;
+
+    JS_ASSERT(gcmarker->isDrained());
+    MarkWeakReferences(gcmarker);
+
+    gcmarker->setMarkColorGray();
+    if (gcmarker->hasBufferedGrayRoots()) {
+        gcmarker->markBufferedGrayRoots();
+    } else {
+        if (JSTraceDataOp op = rt->gcGrayRootsTraceOp)
+            (*op)(gcmarker, rt->gcGrayRootsData);
+    }
+    SliceBudget budget;
+    gcmarker->drainMarkStack(budget);
     MarkWeakReferences(gcmarker);
-
-    if (JSTraceDataOp op = rt->gcGrayRootsTraceOp) {
-        gcmarker->setMarkColorGray();
-        (*op)(gcmarker, rt->gcGrayRootsData);
-        gcmarker->drainMarkStack();
-        MarkWeakReferences(gcmarker);
+    JS_ASSERT(gcmarker->isDrained());
+}
+
+#ifdef DEBUG
+static void
+ValidateIncrementalMarking(JSContext *cx);
+#endif
+
+static void
+EndMarkPhase(JSContext *cx)
+{
+    JSRuntime *rt = cx->runtime;
+
+    {
+        gcstats::AutoPhase ap1(rt->gcStats, gcstats::PHASE_MARK);
+        gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_MARK_OTHER);
+        MarkGrayAndWeak(cx);
     }
 
-    JS_ASSERT(gcmarker->isMarkStackEmpty());
-    rt->gcIncrementalTracer = NULL;
-
-    rt->gcStats.endPhase(gcstats::PHASE_MARK);
+    JS_ASSERT(rt->gcMarker.isDrained());
+
+#ifdef DEBUG
+    if (rt->gcIncrementalState != NO_INCREMENTAL)
+        ValidateIncrementalMarking(cx);
+#endif
 
     if (rt->gcCallback)
         (void) rt->gcCallback(cx, JSGC_MARK_END);
 
 #ifdef DEBUG
     /* Make sure that we didn't mark an object in another compartment */
     if (rt->gcCurrentCompartment) {
         for (CompartmentsIter c(rt); !c.done(); c.next()) {
             JS_ASSERT_IF(c != rt->gcCurrentCompartment && c != rt->atomsCompartment,
                          c->arenas.checkArenaListAllUnmarked());
         }
     }
 #endif
 }
 
+#ifdef DEBUG
 static void
-SweepPhase(JSContext *cx, GCMarker *gcmarker, JSGCInvocationKind gckind)
+ValidateIncrementalMarking(JSContext *cx)
 {
     JSRuntime *rt = cx->runtime;
+    FullGCMarker *gcmarker = &rt->gcMarker;
+
+    js::gc::State state = rt->gcIncrementalState;
+    rt->gcIncrementalState = NO_INCREMENTAL;
+
+    /* As we're re-doing marking, we need to reset the weak map list. */
+    WeakMapBase::resetWeakMapList(rt);
+
+    JS_ASSERT(gcmarker->isDrained());
+    gcmarker->reset();
+
+    typedef HashMap<Chunk *, uintptr_t *> BitmapMap;
+    BitmapMap map(cx);
+    map.init();
+
+    for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront()) {
+        ChunkBitmap *bitmap = &r.front()->bitmap;
+        uintptr_t *entry = (uintptr_t *)js_malloc(sizeof(bitmap->bitmap));
+        if (entry)
+            memcpy(entry, bitmap->bitmap, sizeof(bitmap->bitmap));
+        map.putNew(r.front(), entry);
+    }
+
+    for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
+        r.front()->bitmap.clear();
+
+    MarkRuntime(gcmarker, true);
+    SliceBudget budget;
+    rt->gcMarker.drainMarkStack(budget);
+    MarkGrayAndWeak(cx);
+
+    for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront()) {
+        Chunk *chunk = r.front();
+        ChunkBitmap *bitmap = &chunk->bitmap;
+        uintptr_t *entry = map.lookup(r.front())->value;
+        ChunkBitmap incBitmap;
+
+        if (!entry)
+            continue;
+
+        memcpy(incBitmap.bitmap, entry, sizeof(incBitmap.bitmap));
+        js_free(entry);
+
+        for (size_t i = 0; i < ArenasPerChunk; i++) {
+            Arena *arena = &chunk->arenas[i];
+            if (!arena->aheader.allocated())
+                continue;
+            if (rt->gcCurrentCompartment && arena->aheader.compartment != rt->gcCurrentCompartment)
+                continue;
+            if (arena->aheader.allocatedDuringIncremental)
+                continue;
+
+            AllocKind kind = arena->aheader.getAllocKind();
+            uintptr_t thing = arena->thingsStart(kind);
+            uintptr_t end = arena->thingsEnd();
+            while (thing < end) {
+                Cell *cell = (Cell *)thing;
+                if (bitmap->isMarked(cell, BLACK) && !incBitmap.isMarked(cell, BLACK)) {
+                    JS_DumpHeap(cx, stdout, NULL, JSGCTraceKind(0), NULL, 100000, NULL);
+                    printf("Assertion cell: %p (%d)\n", (void *)cell, cell->getAllocKind());
+                }
+                JS_ASSERT_IF(bitmap->isMarked(cell, BLACK), incBitmap.isMarked(cell, BLACK));
+                thing += Arena::thingSize(kind);
+            }
+        }
+
+        memcpy(bitmap->bitmap, incBitmap.bitmap, sizeof(incBitmap.bitmap));
+    }
+
+    rt->gcIncrementalState = state;
+}
+#endif
+
+static void
+SweepPhase(JSContext *cx, JSGCInvocationKind gckind)
+{
+    JSRuntime *rt = cx->runtime;
+
+#ifdef JS_THREADSAFE
+    if (rt->hasContexts() && rt->gcHelperThread.prepareForBackgroundSweep())
+        cx->gcBackgroundFree = &rt->gcHelperThread;
+#endif
+
+    /* Purge the ArenaLists before sweeping. */
+    for (GCCompartmentsIter c(rt); !c.done(); c.next())
+        c->arenas.purge();
 
     /*
      * Sweep phase.
      *
      * Finalize as we sweep, outside of rt->gcLock but with rt->gcRunning set
      * so that any attempt to allocate a GC-thing from a finalizer will fail,
      * rather than nest badly and leave the unmarked newborn to be swept.
      *
@@ -2695,17 +3159,17 @@ SweepPhase(JSContext *cx, GCMarker *gcma
      * freed. Note that even after the entry is freed, JSObject finalizers can
      * continue to access the corresponding JSString* assuming that they are
      * unique. This works since the atomization API must not be called during
      * the GC.
      */
     gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP);
 
     /* Finalize unreachable (key,value) pairs in all weak maps. */
-    WeakMapBase::sweepAll(gcmarker);
+    WeakMapBase::sweepAll(&rt->gcMarker);
 
     js_SweepAtomState(rt);
 
     /* Collect watch points associated with unreachable objects. */
     WatchpointMap::sweepAll(rt);
 
     if (!rt->gcCurrentCompartment)
         Debugger::sweepAll(cx);
@@ -2776,218 +3240,190 @@ SweepPhase(JSContext *cx, GCMarker *gcma
 #endif
     }
 
     {
         gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_XPCONNECT);
         if (rt->gcCallback)
             (void) rt->gcCallback(cx, JSGC_FINALIZE_END);
     }
+
+    for (CompartmentsIter c(rt); !c.done(); c.next())
+        c->setGCLastBytes(c->gcBytes, gckind);
 }
 
 /* Perform mark-and-sweep GC. If comp is set, we perform a single-compartment GC. */
 static void
 MarkAndSweep(JSContext *cx, JSGCInvocationKind gckind)
 {
     JSRuntime *rt = cx->runtime;
-    rt->gcNumber++;
-
-    /* Clear gcIsNeeded now, when we are about to start a normal GC cycle. */
-    rt->gcIsNeeded = false;
-    rt->gcTriggerCompartment = NULL;
-    
-    /* Clear gcMallocBytes for all compartments */
-    JSCompartment **read = rt->compartments.begin();
-    JSCompartment **end = rt->compartments.end();
-    JS_ASSERT(rt->compartments.length() >= 1);
-    
-    while (read < end) {
-        JSCompartment *compartment = *read++;
-        compartment->resetGCMallocBytes();
-    }
-
-    /* Reset weak map list. */
-    WeakMapBase::resetWeakMapList(rt);
-
-    /* Reset malloc counter. */
-    rt->resetGCMallocBytes();
 
     AutoUnlockGC unlock(rt);
 
-    GCMarker gcmarker(cx);
-    JS_ASSERT(IS_GC_MARKING_TRACER(&gcmarker));
-    JS_ASSERT(gcmarker.getMarkColor() == BLACK);
-    rt->gcIncrementalTracer = &gcmarker;
-
-    BeginMarkPhase(cx, &gcmarker, gckind);
-    gcmarker.drainMarkStack();
-    EndMarkPhase(cx, &gcmarker, gckind);
-    SweepPhase(cx, &gcmarker, gckind);
+    rt->gcMarker.start(rt, cx);
+    JS_ASSERT(!rt->gcMarker.callback);
+
+    BeginMarkPhase(cx);
+    {
+        gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_MARK);
+        SliceBudget budget;
+        rt->gcMarker.drainMarkStack(budget);
+    }
+    EndMarkPhase(cx);
+    SweepPhase(cx, gckind);
+
+    rt->gcMarker.stop();
 }
 
-class AutoGCSession {
+/*
+ * This class should be used by any code that needs to exclusive access to the
+ * heap in order to trace through it...
+ */
+class AutoHeapSession {
   public:
-    explicit AutoGCSession(JSContext *cx);
+    explicit AutoHeapSession(JSContext *cx);
+    ~AutoHeapSession();
+
+  protected:
+    JSContext *context;
+
+  private:
+    AutoHeapSession(const AutoHeapSession&) MOZ_DELETE;
+    void operator=(const AutoHeapSession&) MOZ_DELETE;
+};
+
+/* ...while this class is to be used only for garbage collection. */
+class AutoGCSession : AutoHeapSession {
+  public:
+    explicit AutoGCSession(JSContext *cx, JSCompartment *comp);
     ~AutoGCSession();
 
   private:
-    JSContext   *context;
-
-    AutoGCSession(const AutoGCSession&) MOZ_DELETE;
-    void operator=(const AutoGCSession&) MOZ_DELETE;
+    /*
+     * We should not be depending on cx->compartment in the GC, so set it to
+     * NULL to look for violations.
+     */
+    SwitchToCompartment switcher;
 };
 
-/* Start a new GC session. */
-AutoGCSession::AutoGCSession(JSContext *cx)
+/* Start a new heap session. */
+AutoHeapSession::AutoHeapSession(JSContext *cx)
   : context(cx)
 {
     JS_ASSERT(!cx->runtime->noGCOrAllocationCheck);
     JSRuntime *rt = cx->runtime;
     JS_ASSERT(!rt->gcRunning);
     rt->gcRunning = true;
 }
 
-AutoGCSession::~AutoGCSession()
+AutoHeapSession::~AutoHeapSession()
 {
     JSRuntime *rt = context->runtime;
     rt->gcRunning = false;
 }
 
-/*
- * GC, repeatedly if necessary, until we think we have not created any new
- * garbage. We disable inlining to ensure that the bottom of the stack with
- * possible GC roots recorded in js_GC excludes any pointers we use during the
- * marking implementation.
- */
-static JS_NEVER_INLINE void
-GCCycle(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind)
+AutoGCSession::AutoGCSession(JSContext *cx, JSCompartment *comp)
+  : AutoHeapSession(cx),
+    switcher(cx, (JSCompartment *)NULL)
 {
     JSRuntime *rt = cx->runtime;
 
-    JS_ASSERT_IF(comp, comp != rt->atomsCompartment);
-    JS_ASSERT_IF(comp, rt->gcMode == JSGC_MODE_COMPARTMENT);
-
-    /* Recursive GC is no-op. */
-    if (rt->gcMarkAndSweep)
-        return;
-
-    AutoGCSession gcsession(cx);
-
-    /* Don't GC if we are reporting an OOM. */
-    if (rt->inOOMReport)
-        return;
-
-    /*
-     * We should not be depending on cx->compartment in the GC, so set it to
-     * NULL to look for violations.
-     */
-    SwitchToCompartment sc(cx, (JSCompartment *)NULL);
-
     JS_ASSERT(!rt->gcCurrentCompartment);
     rt->gcCurrentCompartment = comp;
 
-    rt->gcMarkAndSweep = true;
-
-#ifdef JS_THREADSAFE
-    /*
-     * As we about to purge caches and clear the mark bits we must wait for
-     * any background finalization to finish. We must also wait for the
-     * background allocation to finish so we can avoid taking the GC lock
-     * when manipulating the chunks during the GC.
-     */
-    JS_ASSERT(!cx->gcBackgroundFree);
-    rt->gcHelperThread.waitBackgroundSweepOrAllocEnd();
-    if (rt->hasContexts() && rt->gcHelperThread.prepareForBackgroundSweep())
-        cx->gcBackgroundFree = &rt->gcHelperThread;
-#endif
-
-    MarkAndSweep(cx, gckind);
-
-#ifdef JS_THREADSAFE
-    if (cx->gcBackgroundFree) {
-        JS_ASSERT(cx->gcBackgroundFree == &rt->gcHelperThread);
-        cx->gcBackgroundFree = NULL;
-        rt->gcHelperThread.startBackgroundSweep(cx, gckind == GC_SHRINK);
-    }
-#endif
-
-    rt->gcMarkAndSweep = false;
-    rt->gcCurrentCompartment = NULL;
-
+    rt->gcIsNeeded = false;
+    rt->gcTriggerCompartment = NULL;
+    rt->gcInterFrameGC = true;
+
+    rt->gcNumber++;
+
+    rt->resetGCMallocBytes();
+
+    /* Clear gcMallocBytes for all compartments */
     for (CompartmentsIter c(rt); !c.done(); c.next())
-        c->setGCLastBytes(c->gcBytes, gckind);
+        c->resetGCMallocBytes();
 }
 
-void
-js_GC(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, gcreason::Reason reason)
+AutoGCSession::~AutoGCSession()
 {
-    JSRuntime *rt = cx->runtime;
-    JS_AbortIfWrongThread(rt);
-
-#ifdef JS_GC_ZEAL
-    struct AutoVerifyBarriers {
-        JSContext *cx;
-        bool inVerify;
-        AutoVerifyBarriers(JSContext *cx) : cx(cx), inVerify(cx->runtime->gcVerifyData) {
-            if (inVerify) EndVerifyBarriers(cx);
-        }
-        ~AutoVerifyBarriers() { if (inVerify) StartVerifyBarriers(cx); }
-    } av(cx);
-#endif
-
-    RecordNativeStackTopForGC(cx);
-
-    gcstats::AutoGC agc(rt->gcStats, comp, reason);
-
-    do {
-        /*
-         * Let the API user decide to defer a GC if it wants to (unless this
-         * is the last context).  Invoke the callback regardless. Sample the
-         * callback in case we are freely racing with a JS_SetGCCallback{,RT}
-         * on another thread.
-         */
-        if (JSGCCallback callback = rt->gcCallback) {
-            if (!callback(cx, JSGC_BEGIN) && rt->hasContexts())
-                return;
-        }
-
-        {
-            /* Lock out other GC allocator and collector invocations. */
-            AutoLockGC lock(rt);
-            rt->gcPoke = false;
-            GCCycle(cx, comp, gckind);
-        }
-
-        /* We re-sample the callback again as the finalizers can change it. */
-        if (JSGCCallback callback = rt->gcCallback)
-            (void) callback(cx, JSGC_END);
-
-        /*
-         * On shutdown, iterate until finalizers or the JSGC_END callback
-         * stop creating garbage.
-         */
-    } while (!rt->hasContexts() && rt->gcPoke);
-
+    JSRuntime *rt = context->runtime;
+
+    rt->gcCurrentCompartment = NULL;
     rt->gcNextFullGCTime = PRMJ_Now() + GC_IDLE_FULL_SPAN;
-
     rt->gcChunkAllocationSinceLastGC = false;
 }
 
-namespace js {
-
-void
-ShrinkGCBuffers(JSRuntime *rt)
+static void
+ResetIncrementalGC(JSRuntime *rt)
 {
-    AutoLockGC lock(rt);
-    JS_ASSERT(!rt->gcRunning);
-#ifndef JS_THREADSAFE
-    ExpireChunksAndArenas(rt, true);
-#else
-    rt->gcHelperThread.startBackgroundShrink();
-#endif
+    if (rt->gcIncrementalState == NO_INCREMENTAL)
+        return;
+
+    for (CompartmentsIter c(rt); !c.done(); c.next()) {
+        if (!rt->gcIncrementalCompartment || rt->gcIncrementalCompartment == c) {
+            c->needsBarrier_ = false;
+            c->barrierMarker_.reset();
+            c->barrierMarker_.stop();
+        }
+        JS_ASSERT(!c->needsBarrier_);
+    }
+
+    rt->gcIncrementalCompartment = NULL;
+    rt->gcMarker.reset();
+    rt->gcMarker.stop();
+    rt->gcIncrementalState = NO_INCREMENTAL;
+
+    rt->gcStats.reset();
+}
+
+class AutoGCSlice {
+  public:
+    AutoGCSlice(JSContext *cx);
+    ~AutoGCSlice();
+
+  private:
+    JSContext *context;
+};
+
+AutoGCSlice::AutoGCSlice(JSContext *cx)
+  : context(cx)
+{
+    JSRuntime *rt = context->runtime;
+
+    /*
+     * During incremental GC, the compartment's active flag determines whether
+     * there are stack frames active for any of its scripts. Normally this flag
+     * is set at the beginning of the mark phase. During incremental GC, we also
+     * set it at the start of every phase.
+     */
+    rt->stackSpace.markActiveCompartments();
+
+    for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
+        /* Clear this early so we don't do any write barriers during GC. */
+        if (rt->gcIncrementalState == MARK)
+            c->needsBarrier_ = false;
+        else
+            JS_ASSERT(!c->needsBarrier_);
+    }
+}
+
+AutoGCSlice::~AutoGCSlice()
+{
+    JSRuntime *rt = context->runtime;
+
+    for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
+        if (rt->gcIncrementalState == MARK) {
+            c->needsBarrier_ = true;
+            c->arenas.prepareForIncrementalGC(c);
+        } else {
+            JS_ASSERT(rt->gcIncrementalState == NO_INCREMENTAL);
+
+            c->needsBarrier_ = false;
+        }
+    }
 }
 
 class AutoCopyFreeListToArenas {
     JSRuntime *rt;
 
   public:
     AutoCopyFreeListToArenas(JSRuntime *rt)
       : rt(rt) {
@@ -2996,28 +3432,318 @@ class AutoCopyFreeListToArenas {
     }
 
     ~AutoCopyFreeListToArenas() {
         for (CompartmentsIter c(rt); !c.done(); c.next())
             c->arenas.clearFreeListsInArenas();
     }
 };
 
+static void
+IncrementalGCSlice(JSContext *cx, int64_t budget, JSGCInvocationKind gckind)
+{
+    JS_ASSERT(budget != SliceBudget::Unlimited);
+
+    JSRuntime *rt = cx->runtime;
+
+    AutoUnlockGC unlock(rt);
+    AutoGCSlice slice(cx);
+
+    gc::State initialState = rt->gcIncrementalState;
+
+    if (rt->gcIncrementalState == NO_INCREMENTAL) {
+        JS_ASSERT(!rt->gcIncrementalCompartment);
+        rt->gcIncrementalCompartment = rt->gcCurrentCompartment;
+        rt->gcIncrementalState = MARK_ROOTS;
+        rt->gcLastMarkSlice = false;
+    }
+
+    if (rt->gcIncrementalState == MARK_ROOTS) {
+        rt->gcMarker.start(rt, cx);
+        JS_ASSERT(IS_GC_MARKING_TRACER(&rt->gcMarker));
+
+        for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
+            c->discardJitCode(cx);
+            c->barrierMarker_.start(rt, NULL);
+        }
+
+        BeginMarkPhase(cx);
+
+        rt->gcIncrementalState = MARK;
+    }
+
+    if (rt->gcIncrementalState == MARK) {
+        gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_MARK);
+        SliceBudget sliceBudget(budget);
+
+        /* If we needed delayed marking for gray roots, then collect until done. */
+        if (!rt->gcMarker.hasBufferedGrayRoots())
+            sliceBudget.reset();
+
+        bool finished = rt->gcMarker.drainMarkStack(sliceBudget);
+
+        for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
+            c->barrierMarker_.context = cx;
+            finished &= c->barrierMarker_.drainMarkStack(sliceBudget);
+            c->barrierMarker_.context = NULL;
+        }
+
+        if (finished) {
+            JS_ASSERT(rt->gcMarker.isDrained());
+#ifdef DEBUG
+            for (GCCompartmentsIter c(rt); !c.done(); c.next())
+                JS_ASSERT(c->barrierMarker_.isDrained());
+#endif
+            if (initialState == MARK && !rt->gcLastMarkSlice)
+                rt->gcLastMarkSlice = true;
+            else
+                rt->gcIncrementalState = SWEEP;
+        }
+    }
+
+    if (rt->gcIncrementalState == SWEEP) {
+        EndMarkPhase(cx);
+        SweepPhase(cx, gckind);
+
+        rt->gcMarker.stop();
+
+        /* JIT code was already discarded during sweeping. */
+        for (GCCompartmentsIter c(rt); !c.done(); c.next())
+            c->barrierMarker_.stop();
+
+        rt->gcIncrementalCompartment = NULL;
+
+        rt->gcIncrementalState = NO_INCREMENTAL;
+    }
+}
+
+static bool
+IsIncrementalGCSafe(JSContext *cx)
+{
+    JSRuntime *rt = cx->runtime;
+
+    if (rt->gcCompartmentCreated) {
+        rt->gcCompartmentCreated = false;
+        return false;
+    }
+
+    if (rt->gcKeepAtoms)
+        return false;
+
+    for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
+        if (c->activeAnalysis)
+            return false;
+    }
+
+    if (rt->gcIncrementalState != NO_INCREMENTAL &&
+        rt->gcCurrentCompartment != rt->gcIncrementalCompartment)
+    {
+        return false;
+    }
+
+    if (!rt->gcIncrementalEnabled)
+        return false;
+
+    return true;
+}
+
+static bool
+IsIncrementalGCAllowed(JSContext *cx)
+{
+    JSRuntime *rt = cx->runtime;
+
+    if (rt->gcMode != JSGC_MODE_INCREMENTAL)
+        return false;
+
+#ifdef ANDROID
+    /* Incremental GC is disabled on Android for now. */
+    return false;
+#endif
+
+    if (!IsIncrementalGCSafe(cx))
+        return false;
+
+    for (CompartmentsIter c(rt); !c.done(); c.next()) {
+        if (c->gcBytes > c->gcTriggerBytes)
+            return false;
+    }
+
+    return true;
+}
+
+/*
+ * GC, repeatedly if necessary, until we think we have not created any new
+ * garbage. We disable inlining to ensure that the bottom of the stack with
+ * possible GC roots recorded in js_GC excludes any pointers we use during the
+ * marking implementation.
+ */
+static JS_NEVER_INLINE void
+GCCycle(JSContext *cx, JSCompartment *comp, int64_t budget, JSGCInvocationKind gckind)
+{
+    JSRuntime *rt = cx->runtime;
+
+    JS_ASSERT_IF(comp, comp != rt->atomsCompartment);
+    JS_ASSERT_IF(comp, rt->gcMode != JSGC_MODE_GLOBAL);
+
+    /* Recursive GC is no-op. */
+    if (rt->gcRunning)
+        return;
+
+    AutoGCSession gcsession(cx, comp);
+
+    /* Don't GC if we are reporting an OOM. */
+    if (rt->inOOMReport)
+        return;
+
+#ifdef JS_THREADSAFE
+    /*
+     * As we about to purge caches and clear the mark bits we must wait for
+     * any background finalization to finish. We must also wait for the
+     * background allocation to finish so we can avoid taking the GC lock
+     * when manipulating the chunks during the GC.
+     */
+    JS_ASSERT(!cx->gcBackgroundFree);
+    rt->gcHelperThread.waitBackgroundSweepOrAllocEnd();
+#endif
+
+    if (budget != SliceBudget::Unlimited) {
+        if (!IsIncrementalGCAllowed(cx))
+            budget = SliceBudget::Unlimited;
+    }
+
+    if (budget == SliceBudget::Unlimited)
+        ResetIncrementalGC(rt);
+
+    AutoCopyFreeListToArenas copy(rt);
+
+    if (budget == SliceBudget::Unlimited)
+        MarkAndSweep(cx, gckind);
+    else
+        IncrementalGCSlice(cx, budget, gckind);
+
+#ifdef DEBUG
+    if (rt->gcIncrementalState == NO_INCREMENTAL) {
+        for (CompartmentsIter c(rt); !c.done(); c.next())
+            JS_ASSERT(!c->needsBarrier_);
+    }
+#endif
+#ifdef JS_THREADSAFE
+    if (rt->gcIncrementalState == NO_INCREMENTAL) {
+        if (cx->gcBackgroundFree) {
+            JS_ASSERT(cx->gcBackgroundFree == &rt->gcHelperThread);
+            cx->gcBackgroundFree = NULL;
+            rt->gcHelperThread.startBackgroundSweep(cx, gckind == GC_SHRINK);
+        }
+    }
+#endif
+}
+
+static void
+Collect(JSContext *cx, JSCompartment *comp, int64_t budget,
+        JSGCInvocationKind gckind, gcreason::Reason reason)
+{
+    JSRuntime *rt = cx->runtime;
+    JS_AbortIfWrongThread(rt);
+
+    JS_ASSERT_IF(budget != SliceBudget::Unlimited, JSGC_INCREMENTAL);
+
+#ifdef JS_GC_ZEAL
+    struct AutoVerifyBarriers {
+        JSContext *cx;
+        bool inVerify;
+        AutoVerifyBarriers(JSContext *cx) : cx(cx), inVerify(cx->runtime->gcVerifyData) {
+            if (inVerify) EndVerifyBarriers(cx);
+        }
+        ~AutoVerifyBarriers() { if (inVerify) StartVerifyBarriers(cx); }
+    } av(cx);
+#endif
+
+    RecordNativeStackTopForGC(cx);
+
+    /* This is a heuristic to avoid resets. */
+    if (rt->gcIncrementalState != NO_INCREMENTAL && !rt->gcIncrementalCompartment)
+        comp = NULL;
+
+    gcstats::AutoGCSlice agc(rt->gcStats, comp, reason);
+
+    do {
+        /*
+         * Let the API user decide to defer a GC if it wants to (unless this
+         * is the last context). Invoke the callback regardless.
+         */
+        if (rt->gcIncrementalState == NO_INCREMENTAL) {
+            if (JSGCCallback callback = rt->gcCallback) {
+                if (!callback(cx, JSGC_BEGIN) && rt->hasContexts())
+                    return;
+            }
+        }
+
+        {
+            /* Lock out other GC allocator and collector invocations. */
+            AutoLockGC lock(rt);
+            rt->gcPoke = false;
+            GCCycle(cx, comp, budget, gckind);
+        }
+
+        if (rt->gcIncrementalState == NO_INCREMENTAL) {
+            if (JSGCCallback callback = rt->gcCallback)
+                (void) callback(cx, JSGC_END);
+        }
+
+        /*
+         * On shutdown, iterate until finalizers or the JSGC_END callback
+         * stop creating garbage.
+         */
+    } while (!rt->hasContexts() && rt->gcPoke);
+}
+
+namespace js {
+
+void
+GC(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, gcreason::Reason reason)
+{
+    Collect(cx, comp, SliceBudget::Unlimited, gckind, reason);
+}
+
+void
+GCSlice(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, gcreason::Reason reason)
+{
+    Collect(cx, comp, cx->runtime->gcSliceBudget, gckind, reason);
+}
+
+void
+GCDebugSlice(JSContext *cx, int64_t objCount)
+{
+    Collect(cx, NULL, SliceBudget::WorkBudget(objCount), GC_NORMAL, gcreason::API);
+}
+
+void
+ShrinkGCBuffers(JSRuntime *rt)
+{
+    AutoLockGC lock(rt);
+    JS_ASSERT(!rt->gcRunning);
+#ifndef JS_THREADSAFE
+    ExpireChunksAndArenas(rt, true);
+#else
+    rt->gcHelperThread.startBackgroundShrink();
+#endif
+}
+
 void
 TraceRuntime(JSTracer *trc)
 {
     JS_ASSERT(!IS_GC_MARKING_TRACER(trc));
 
 #ifdef JS_THREADSAFE
     {
         JSContext *cx = trc->context;
         JSRuntime *rt = cx->runtime;
         if (!rt->gcRunning) {
             AutoLockGC lock(rt);
-            AutoGCSession gcsession(cx);
+            AutoHeapSession session(cx);
 
             rt->gcHelperThread.waitBackgroundSweepEnd();
             AutoUnlockGC unlock(rt);
 
             AutoCopyFreeListToArenas copy(rt);
             RecordNativeStackTopForGC(trc->context);
             MarkRuntime(trc);
             return;
@@ -3068,17 +3794,17 @@ IterateCompartments(JSContext *cx, void 
                     IterateCompartmentCallback compartmentCallback)
 {
     CHECK_REQUEST(cx);
 
     JSRuntime *rt = cx->runtime;
     JS_ASSERT(!rt->gcRunning);
 
     AutoLockGC lock(rt);
-    AutoGCSession gcsession(cx);
+    AutoHeapSession session(cx);
 #ifdef JS_THREADSAFE
     rt->gcHelperThread.waitBackgroundSweepEnd();
 #endif
     AutoUnlockGC unlock(rt);
 
     AutoCopyFreeListToArenas copy(rt);
     for (CompartmentsIter c(rt); !c.done(); c.next()) {
         (*compartmentCallback)(cx, data, c);
@@ -3092,17 +3818,17 @@ IterateCompartmentsArenasCells(JSContext
                                IterateCellCallback cellCallback)
 {
     CHECK_REQUEST(cx);
 
     JSRuntime *rt = cx->runtime;
     JS_ASSERT(!rt->gcRunning);
 
     AutoLockGC lock(rt);
-    AutoGCSession gcsession(cx);
+    AutoHeapSession session(cx);
 #ifdef JS_THREADSAFE
     rt->gcHelperThread.waitBackgroundSweepEnd();
 #endif
     AutoUnlockGC unlock(rt);
 
     AutoCopyFreeListToArenas copy(rt);
     for (CompartmentsIter c(rt); !c.done(); c.next()) {
         (*compartmentCallback)(cx, data, c);
@@ -3122,17 +3848,17 @@ IterateChunks(JSContext *cx, void *data,
 {
     /* :XXX: Any way to common this preamble with IterateCompartmentsArenasCells? */
     CHECK_REQUEST(cx);
 
     JSRuntime *rt = cx->runtime;
     JS_ASSERT(!rt->gcRunning);
 
     AutoLockGC lock(rt);
-    AutoGCSession gcsession(cx);
+    AutoHeapSession session(cx);
 #ifdef JS_THREADSAFE
     rt->gcHelperThread.waitBackgroundSweepEnd();
 #endif
     AutoUnlockGC unlock(rt);
 
     for (js::GCChunkSet::Range r = rt->gcChunkSet.all(); !r.empty(); r.popFront())
         chunkCallback(cx, data, r.front());
 }
@@ -3143,17 +3869,17 @@ IterateCells(JSContext *cx, JSCompartmen
 {
     /* :XXX: Any way to common this preamble with IterateCompartmentsArenasCells? */
     CHECK_REQUEST(cx);
 
     JSRuntime *rt = cx->runtime;
     JS_ASSERT(!rt->gcRunning);
 
     AutoLockGC lock(rt);
-    AutoGCSession gcsession(cx);
+    AutoHeapSession session(cx);
 #ifdef JS_THREADSAFE
     rt->gcHelperThread.waitBackgroundSweepEnd();
 #endif
     AutoUnlockGC unlock(rt);
 
     AutoCopyFreeListToArenas copy(rt);
 
     JSGCTraceKind traceKind = MapAllocToTraceKind(thingKind);
@@ -3191,16 +3917,33 @@ NewCompartment(JSContext *cx, JSPrincipa
         compartment->setGCLastBytes(8192, GC_NORMAL);
 
         /*
          * Before reporting the OOM condition, |lock| needs to be cleaned up,
          * hence the scoping.
          */
         {
             AutoLockGC lock(rt);
+
+            /*
+             * If we're in the middle of an incremental GC, we cancel
+             * it. Otherwise we might fail the mark the newly created
+             * compartment fully.
+             */
+            if (rt->gcIncrementalState == MARK) {
+                rt->gcCompartmentCreated = true;
+
+                /*
+                 * Start the tracer so that it's legal to stop() it when
+                 * resetting the GC.
+                 */
+                if (!rt->gcIncrementalCompartment)
+                    compartment->barrierMarker_.start(rt, NULL);
+            }
+
             if (rt->compartments.append(compartment))
                 return compartment;
         }
 
         js_ReportOutOfMemory(cx);
     }
     Foreground::delete_(compartment);
     return NULL;
@@ -3234,17 +3977,17 @@ CheckStackRoot(JSTracer *trc, uintptr_t 
     VALGRIND_MAKE_MEM_DEFINED(&w, sizeof(w));
 #endif
 
     ConservativeGCTest test = MarkIfGCThingWord(trc, *w, DONT_MARK_THING);
 
     if (test == CGCT_VALID) {
         JSContext *iter = NULL;
         bool matched = false;
-        JSRuntime *rt = trc->context->runtime;
+        JSRuntime *rt = trc->runtime;
         while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter)) {
             for (unsigned i = 0; i < THING_ROOT_COUNT; i++) {
                 Root<Cell*> *rooter = acx->thingGCRooters[i];
                 while (rooter) {
                     if (rooter->address() == (Cell **) w)
                         matched = true;
                     rooter = rooter->previous();
                 }
@@ -3277,17 +4020,17 @@ CheckStackRootsRange(JSTracer *trc, uint
 }
 
 void
 CheckStackRoots(JSContext *cx)
 {
     AutoCopyFreeListToArenas copy(cx->runtime);
 
     JSTracer checker;
-    JS_TRACER_INIT(&checker, cx, EmptyMarkCallback);
+    JS_TracerInit(&checker, cx, EmptyMarkCallback);
 
     ThreadData *td = JS_THREAD_DATA(cx);
 
     ConservativeGCThreadData *ctd = &td->conservativeGC;
     ctd->recordStackTop();
 
     JS_ASSERT(ctd->hasStackToScan());
     uintptr_t *stackMin, *stackEnd;
@@ -3355,32 +4098,30 @@ typedef HashMap<void *, VerifyNode *> No
  * for the graph. If we run out of memory (i.e., if edgeptr goes beyond term),
  * we just abandon the verification.
  *
  * The nodemap field is a hashtable that maps from the address of the GC thing
  * to the VerifyNode that represents it.
  */
 struct VerifyTracer : JSTracer {
     /* The gcNumber when the verification began. */
-    uint32_t number;
+    uint64_t number;
 
     /* This counts up to JS_VERIFIER_FREQ to decide whether to verify. */
     uint32_t count;
 
     /* This graph represents the initial GC "snapshot". */
     VerifyNode *curnode;
     VerifyNode *root;
     char *edgeptr;
     char *term;
     NodeMap nodemap;
 
-    /* A dummy marker used for the write barriers; stored in gcMarkingTracer. */
-    GCMarker gcmarker;
-
-    VerifyTracer(JSContext *cx) : nodemap(cx), gcmarker(cx) {}
+    VerifyTracer(JSContext *cx) : root(NULL), nodemap(cx) {}
+    ~VerifyTracer() { js_free(root); }
 };
 
 /*
  * This function builds up the heap snapshot by adding edges to the current
  * node.
  */
 static void
 AccumulateEdge(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
@@ -3434,55 +4175,41 @@ NextNode(VerifyNode *node)
 			      sizeof(EdgeValue)*(node->count - 1));
 }
 
 static void
 StartVerifyBarriers(JSContext *cx)
 {
     JSRuntime *rt = cx->runtime;
 
-    if (rt->gcVerifyData)
+    if (rt->gcVerifyData || rt->gcIncrementalState != NO_INCREMENTAL)
         return;
 
     AutoLockGC lock(rt);
-    AutoGCSession gcsession(cx);
+    AutoHeapSession session(cx);
+
+    if (!IsIncrementalGCSafe(cx))
+        return;
 
 #ifdef JS_THREADSAFE
     rt->gcHelperThread.waitBackgroundSweepOrAllocEnd();
 #endif
 
     AutoUnlockGC unlock(rt);
 
     AutoCopyFreeListToArenas copy(rt);
     RecordNativeStackTopForGC(cx);
 
     for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
         r.front()->bitmap.clear();
 
-    /*
-     * Kick all frames on the stack into the interpreter, and release all JIT
-     * code in the compartment.
-     */
-#ifdef JS_METHODJIT
-    for (CompartmentsIter c(rt); !c.done(); c.next()) {
-        mjit::ClearAllFrames(c);
-
-        for (CellIterUnderGC i(c, FINALIZE_SCRIPT); !i.done(); i.next()) {
-            JSScript *script = i.get<JSScript>();
-            mjit::ReleaseScriptCode(cx, script);
-
-            /*
-             * Use counts for scripts are reset on GC. After discarding code we
-             * need to let it warm back up to get information like which opcodes
-             * are setting array holes or accessing getter properties.
-             */
-            script->resetUseCount();
-        }
-    }
-#endif
+    for (CompartmentsIter c(rt); !c.done(); c.next())
+        c->discardJitCode(cx);
+
+    PurgeRuntime(cx);
 
     VerifyTracer *trc = new (js_malloc(sizeof(VerifyTracer))) VerifyTracer(cx);
 
     rt->gcNumber++;
     trc->number = rt->gcNumber;
     trc->count = 0;
 
     JS_TracerInit(trc, cx, AccumulateEdge);
@@ -3493,16 +4220,19 @@ StartVerifyBarriers(JSContext *cx)
     trc->edgeptr = (char *)trc->root;
     trc->term = trc->edgeptr + size;
 
     trc->nodemap.init();
 
     /* Create the root node. */
     trc->curnode = MakeNode(trc, NULL, JSGCTraceKind(0));
 
+    /* We want MarkRuntime to save the roots to gcSavedRoots. */
+    rt->gcIncrementalState = MARK_ROOTS;
+
     /* Make all the roots be edges emanating from the root node. */
     MarkRuntime(trc);
 
     VerifyNode *node = trc->curnode;
     if (trc->edgeptr == trc->term)
         goto oom;
 
     /* For each edge, make a node for it if one doesn't already exist. */
@@ -3517,65 +4247,93 @@ StartVerifyBarriers(JSContext *cx)
             if (trc->edgeptr == trc->term)
                 goto oom;
         }
 
         node = NextNode(node);
     }
 
     rt->gcVerifyData = trc;
-    rt->gcIncrementalTracer = &trc->gcmarker;
+    rt->gcIncrementalState = MARK;
     for (CompartmentsIter c(rt); !c.done(); c.next()) {
-        c->gcIncrementalTracer = &trc->gcmarker;
         c->needsBarrier_ = true;
+        c->barrierMarker_.start(rt, NULL);
+        c->arenas.prepareForIncrementalGC(c);
     }
 
     return;
 
 oom:
-    js_free(trc->root);
+    rt->gcIncrementalState = NO_INCREMENTAL;
     trc->~VerifyTracer();
     js_free(trc);
 }
 
 static void
-CheckAutorooter(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
+MarkFromAutorooter(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
 {
     static_cast<Cell *>(*thingp)->markIfUnmarked();
 }
 
+static bool
+IsMarkedOrAllocated(Cell *cell)
+{
+    return cell->isMarked() || cell->arenaHeader()->allocatedDuringIncremental;
+}
+
+const static uint32_t MAX_VERIFIER_EDGES = 1000;
+
 /*
  * This function is called by EndVerifyBarriers for every heap edge. If the edge
  * already existed in the original snapshot, we "cancel it out" by overwriting
  * it with NULL. EndVerifyBarriers later asserts that the remaining non-NULL
  * edges (i.e., the ones from the original snapshot that must have been
  * modified) must point to marked objects.
  */
 static void
 CheckEdge(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
 {
     VerifyTracer *trc = (VerifyTracer *)jstrc;
     VerifyNode *node = trc->curnode;
 
+    /* Avoid n^2 behavior. */
+    if (node->count > MAX_VERIFIER_EDGES)
+        return;
+
     for (uint32_t i = 0; i < node->count; i++) {
         if (node->edges[i].thing == *thingp) {
             JS_ASSERT(node->edges[i].kind == kind);
             node->edges[i].thing = NULL;
             return;
         }
     }
+
+    /*
+     * Anything that is reachable now should have been reachable before, or else
+     * it should be marked.
+     */
+    NodeMap::Ptr p = trc->nodemap.lookup(*thingp);
+    JS_ASSERT_IF(!p, IsMarkedOrAllocated(static_cast<Cell *>(*thingp)));
+}
+
+static void
+CheckReachable(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
+{
+    VerifyTracer *trc = (VerifyTracer *)jstrc;
+    NodeMap::Ptr p = trc->nodemap.lookup(*thingp);
+    JS_ASSERT_IF(!p, IsMarkedOrAllocated(static_cast<Cell *>(*thingp)));
 }
 
 static void
 EndVerifyBarriers(JSContext *cx)
 {
     JSRuntime *rt = cx->runtime;
 
     AutoLockGC lock(rt);
-    AutoGCSession gcsession(cx);
+    AutoHeapSession session(cx);
 
 #ifdef JS_THREADSAFE
     rt->gcHelperThread.waitBackgroundSweepOrAllocEnd();
 #endif
 
     AutoUnlockGC unlock(rt);
 
     AutoCopyFreeListToArenas copy(rt);
@@ -3583,63 +4341,93 @@ EndVerifyBarriers(JSContext *cx)
 
     VerifyTracer *trc = (VerifyTracer *)rt->gcVerifyData;
 
     if (!trc)
         return;
 
     JS_ASSERT(trc->number == rt->gcNumber);
 
-    for (CompartmentsIter c(rt); !c.done(); c.next()) {
-        c->gcIncrementalTracer = NULL;
+    /* We need to disable barriers before tracing, which may invoke barriers. */
+    for (CompartmentsIter c(rt); !c.done(); c.next())
         c->needsBarrier_ = false;
-    }
-
-    if (rt->gcIncrementalTracer->hasDelayedChildren())
-        rt->gcIncrementalTracer->markDelayedChildren();
+
+    for (CompartmentsIter c(rt); !c.done(); c.next())
+        c->discardJitCode(cx);
 
     rt->gcVerifyData = NULL;
-    rt->gcIncrementalTracer = NULL;
-
-    JS_TracerInit(trc, cx, CheckAutorooter);
+    rt->gcIncrementalState = NO_INCREMENTAL;
+
+    JS_TracerInit(trc, cx, MarkFromAutorooter);
 
     JSContext *iter = NULL;
     while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter)) {
         if (acx->autoGCRooters)
             acx->autoGCRooters->traceAll(trc);
     }
 
-    JS_TracerInit(trc, cx, CheckEdge);
-
-    /* Start after the roots. */
-    VerifyNode *node = NextNode(trc->root);
-    int count = 0;
-
-    while ((char *)node < trc->edgeptr) {
-        trc->curnode = node;
-        JS_TraceChildren(trc, node->thing, node->kind);
-
-        for (uint32_t i = 0; i < node->count; i++) {
-            void *thing = node->edges[i].thing;
-            JS_ASSERT_IF(thing, static_cast<Cell *>(thing)->isMarked());
+    if (IsIncrementalGCSafe(cx)) {
+        /*
+         * Verify that all the current roots were reachable previously, or else
+         * are marked.
+         */
+        JS_TracerInit(trc, cx, CheckReachable);
+        MarkRuntime(trc, true);
+
+        JS_TracerInit(trc, cx, CheckEdge);
+
+        /* Start after the roots. */
+        VerifyNode *node = NextNode(trc->root);
+        while ((char *)node < trc->edgeptr) {
+            trc->curnode = node;
+            JS_TraceChildren(trc, node->thing, node->kind);
+
+            if (node->count <= MAX_VERIFIER_EDGES) {
+                for (uint32_t i = 0; i < node->count; i++) {
+                    void *thing = node->edges[i].thing;
+                    JS_ASSERT_IF(thing, IsMarkedOrAllocated(static_cast<Cell *>(thing)));
+                }
+            }
+
+            node = NextNode(node);
         }
-
-        count++;
-        node = NextNode(node);
     }
 
-    js_free(trc->root);
+    for (CompartmentsIter c(rt); !c.done(); c.next()) {
+        c->barrierMarker_.reset();
+        c->barrierMarker_.stop();
+    }
+
     trc->~VerifyTracer();
     js_free(trc);
 }
 
 void
-VerifyBarriers(JSContext *cx, bool always)
+FinishVerifier(JSRuntime *rt)
+{
+    if (VerifyTracer *trc = (VerifyTracer *)rt->gcVerifyData) {
+        trc->~VerifyTracer();
+        js_free(trc);
+    }
+}
+
+void
+VerifyBarriers(JSContext *cx)
 {
-    if (cx->runtime->gcZeal() < ZealVerifierThreshold)
+    JSRuntime *rt = cx->runtime;
+    if (rt->gcVerifyData)
+        EndVerifyBarriers(cx);
+    else
+        StartVerifyBarriers(cx);
+}
+
+void
+MaybeVerifyBarriers(JSContext *cx, bool always)
+{
+    if (cx->runtime->gcZeal() != ZealVerifierValue)
         return;
 
     uint32_t freq = cx->runtime->gcZealFrequency;
 
     JSRuntime *rt = cx->runtime;
     if (VerifyTracer *trc = (VerifyTracer *)rt->gcVerifyData) {
         if (++trc->count < freq && !always)
             return;
@@ -3777,8 +4565,9 @@ JSXML *
 js_NewGCXML(JSContext *cx)
 {
     if (!cx->runningWithTrustedPrincipals())
         ++sE4XObjectsCreated;
 
     return NewGCThing<JSXML>(cx, js::gc::FINALIZE_XML, sizeof(JSXML));
 }
 #endif
+
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -50,17 +50,16 @@
 #include "jsalloc.h"
 #include "jstypes.h"
 #include "jsprvtd.h"
 #include "jspubtd.h"
 #include "jsdhash.h"
 #include "jslock.h"
 #include "jsutil.h"
 #include "jsversion.h"
-#include "jsgcstats.h"
 #include "jscell.h"
 
 #include "ds/BitArray.h"
 #include "gc/Statistics.h"
 #include "js/HashTable.h"
 #include "js/Vector.h"
 #include "js/TemplateLib.h"
 
@@ -77,16 +76,24 @@ js_TraceXML(JSTracer *trc, JSXML* thing)
 
 namespace js {
 
 class GCHelperThread;
 struct Shape;
 
 namespace gc {
 
+enum State {
+    NO_INCREMENTAL,
+    MARK_ROOTS,
+    MARK,
+    SWEEP,
+    INVALID
+};
+
 struct Arena;
 
 /*
  * This must be an upper bound, but we do not need the least upper bound, so
  * we just exclude non-background objects.
  */
 const size_t MAX_BACKGROUND_FINALIZE_KINDS = FINALIZE_LIMIT - FINALIZE_OBJECT_LIMIT / 2;
 
@@ -414,59 +421,69 @@ struct ArenaHeader {
 
     /*
      * When recursive marking uses too much stack the marking is delayed and
      * the corresponding arenas are put into a stack using the following field
      * as a linkage. To distinguish the bottom of the stack from the arenas
      * not present in the stack we use an extra flag to tag arenas on the
      * stack.
      *
+     * Delayed marking is also used for arenas that we allocate into during an
+     * incremental GC. In this case, we intend to mark all the objects in the
+     * arena, and it's faster to do this marking in bulk.
+     *
      * To minimize the ArenaHeader size we record the next delayed marking
      * linkage as arenaAddress() >> ArenaShift and pack it with the allocKind
      * field and hasDelayedMarking flag. We use 8 bits for the allocKind, not
      * ArenaShift - 1, so the compiler can use byte-level memory instructions
      * to access it.
      */
   public:
     size_t       hasDelayedMarking  : 1;
-    size_t       nextDelayedMarking : JS_BITS_PER_WORD - 8 - 1;
+    size_t       allocatedDuringIncremental : 1;
+    size_t       markOverflow : 1;
+    size_t       nextDelayedMarking : JS_BITS_PER_WORD - 8 - 1 - 1 - 1;
 
     static void staticAsserts() {
         /* We must be able to fit the allockind into uint8_t. */
         JS_STATIC_ASSERT(FINALIZE_LIMIT <= 255);
 
         /*
          * nextDelayedMarkingpacking assumes that ArenaShift has enough bits
          * to cover allocKind and hasDelayedMarking.
          */
-        JS_STATIC_ASSERT(ArenaShift >= 8 + 1);
+        JS_STATIC_ASSERT(ArenaShift >= 8 + 1 + 1 + 1);
     }
 
     inline uintptr_t address() const;
     inline Chunk *chunk() const;
 
     bool allocated() const {
         JS_ASSERT(allocKind <= size_t(FINALIZE_LIMIT));
         return allocKind < size_t(FINALIZE_LIMIT);
     }
 
     void init(JSCompartment *comp, AllocKind kind) {
         JS_ASSERT(!allocated());
+        JS_ASSERT(!markOverflow);
+        JS_ASSERT(!allocatedDuringIncremental);
         JS_ASSERT(!hasDelayedMarking);
         compartment = comp;
 
         JS_STATIC_ASSERT(FINALIZE_LIMIT <= 255);
         allocKind = size_t(kind);
 
         /* See comments in FreeSpan::allocateFromNewArena. */
         firstFreeSpanOffsets = FreeSpan::FullArenaOffsets;
     }
 
     void setAsNotAllocated() {
         allocKind = size_t(FINALIZE_LIMIT);
+        markOverflow = 0;
+        allocatedDuringIncremental = 0;
         hasDelayedMarking = 0;
         nextDelayedMarking = 0;
     }
 
     uintptr_t arenaAddress() const {
         return address();
     }
 
@@ -502,18 +519,18 @@ struct ArenaHeader {
         JS_ASSERT(span->isWithinArena(arenaAddress()));
         firstFreeSpanOffsets = span->encodeAsOffsets();
     }
 
 #ifdef DEBUG
     void checkSynchronizedWithFreeList() const;
 #endif
 
-    inline Arena *getNextDelayedMarking() const;
-    inline void setNextDelayedMarking(Arena *arena);
+    inline ArenaHeader *getNextDelayedMarking() const;
+    inline void setNextDelayedMarking(ArenaHeader *aheader);
 };
 
 struct Arena {
     /*
      * Layout of an arena:
      * An arena is 4K in size and 4K-aligned. It starts with the ArenaHeader
      * descriptor followed by some pad bytes. The remainder of the arena is
      * filled with the array of T things. The pad bytes ensure that the thing
@@ -903,35 +920,34 @@ ArenaHeader::isEmpty() const
 
 inline size_t
 ArenaHeader::getThingSize() const
 {
     JS_ASSERT(allocated());
     return Arena::thingSize(getAllocKind());
 }
 
-inline Arena *
+inline ArenaHeader *
 ArenaHeader::getNextDelayedMarking() const
 {
-    return reinterpret_cast<Arena *>(nextDelayedMarking << ArenaShift);
+    return &reinterpret_cast<Arena *>(nextDelayedMarking << ArenaShift)->aheader;
 }
 
 inline void
-ArenaHeader::setNextDelayedMarking(Arena *arena)
+ArenaHeader::setNextDelayedMarking(ArenaHeader *aheader)
 {
-    JS_ASSERT(!hasDelayedMarking);
+    JS_ASSERT(!(uintptr_t(aheader) & ArenaMask));
     hasDelayedMarking = 1;
-    nextDelayedMarking = arena->address() >> ArenaShift;
+    nextDelayedMarking = aheader->arenaAddress() >> ArenaShift;
 }
 
 JS_ALWAYS_INLINE void
 ChunkBitmap::getMarkWordAndMask(const Cell *cell, uint32_t color,
                                 uintptr_t **wordp, uintptr_t *maskp)
 {
-    JS_ASSERT(cell->chunk() == Chunk::fromAddress(reinterpret_cast<uintptr_t>(this)));
     size_t bit = (cell->address() & ChunkMask) / Cell::CellSize + color;
     JS_ASSERT(bit < ArenaBitmapBits * ArenasPerChunk);
     *maskp = uintptr_t(1) << (bit % JS_BITS_PER_WORD);
     *wordp = &bitmap[bit / JS_BITS_PER_WORD];
 }
 
 static void
 AssertValidColor(const void *thing, uint32_t color)
@@ -965,31 +981,16 @@ Cell::unmark(uint32_t color) const
 }
 
 JSCompartment *
 Cell::compartment() const
 {
     return arenaHeader()->compartment;
 }
 
-/*
- * Lower limit after which we limit the heap growth
- */
-const size_t GC_ALLOCATION_THRESHOLD = 30 * 1024 * 1024;
-
-/*
- * A GC is triggered once the number of newly allocated arenas is
- * GC_HEAP_GROWTH_FACTOR times the number of live arenas after the last GC
- * starting after the lower limit of GC_ALLOCATION_THRESHOLD.
- */
-const float GC_HEAP_GROWTH_FACTOR = 3.0f;
-
-/* Perform a Full GC every 20 seconds if MaybeGC is called */
-static const int64_t GC_IDLE_FULL_SPAN = 20 * 1000 * 1000;
-
 static inline JSGCTraceKind
 MapAllocToTraceKind(AllocKind thingKind)
 {
     static const JSGCTraceKind map[FINALIZE_LIMIT] = {
         JSTRACE_OBJECT,     /* FINALIZE_OBJECT0 */
         JSTRACE_OBJECT,     /* FINALIZE_OBJECT0_BACKGROUND */
         JSTRACE_OBJECT,     /* FINALIZE_OBJECT2 */
         JSTRACE_OBJECT,     /* FINALIZE_OBJECT2_BACKGROUND */
@@ -1163,23 +1164,24 @@ struct ArenaLists {
      * Return the free list back to the arena so the GC finalization will not
      * run the finalizers over unitialized bytes from free things.
      */
     void purge() {
         for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
             FreeSpan *headSpan = &freeLists[i];
             if (!headSpan->isEmpty()) {
                 ArenaHeader *aheader = headSpan->arenaHeader();
-                JS_ASSERT(!aheader->hasFreeThings());
                 aheader->setFirstFreeSpan(headSpan);
                 headSpan->initAsEmpty();
             }
         }
     }
 
+    inline void prepareForIncrementalGC(JSCompartment *comp);
+
     /*
      * Temporarily copy the free list heads to the arenas so the code can see
      * the proper value in ArenaHeader::freeList when accessing the latter
      * outside the GC.
      */
     void copyFreeListsToArenas() {
         for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
             copyFreeListToArena(AllocKind(i));
@@ -1304,33 +1306,16 @@ struct RootInfo {
     JSGCRootType type;
 };
 
 typedef js::HashMap<void *,
                     RootInfo,
                     js::DefaultHasher<void *>,
                     js::SystemAllocPolicy> RootedValueMap;
 
-/* If HashNumber grows, need to change WrapperHasher. */
-JS_STATIC_ASSERT(sizeof(HashNumber) == 4);
-
-struct WrapperHasher
-{
-    typedef Value Lookup;
-
-    static HashNumber hash(Value key) {
-        uint64_t bits = JSVAL_TO_IMPL(key).asBits;
-        return uint32_t(bits) ^ uint32_t(bits >> 32);
-    }
-
-    static bool match(const Value &l, const Value &k) { return l == k; }
-};
-
-typedef HashMap<Value, Value, WrapperHasher, SystemAllocPolicy> WrapperMap;
-
 } /* namespace js */
 
 extern JS_FRIEND_API(JSGCTraceKind)
 js_GetGCThingTraceKind(void *thing);
 
 extern JSBool
 js_InitGC(JSRuntime *rt, uint32_t maxbytes);
 
@@ -1372,16 +1357,19 @@ extern bool
 IsAboutToBeFinalized(const js::Value &value);
 
 extern bool
 js_IsAddressableGCThing(JSRuntime *rt, uintptr_t w, js::gc::AllocKind *thingKind, void **thing);
 
 namespace js {
 
 extern void
+MarkCompartmentActive(js::StackFrame *fp);
+
+extern void
 TraceRuntime(JSTracer *trc);
 
 extern JS_FRIEND_API(void)
 MarkContext(JSTracer *trc, JSContext *acx);
 
 /* Must be called with GC lock taken. */
 extern void
 TriggerGC(JSRuntime *rt, js::gcreason::Reason reason);
@@ -1391,35 +1379,44 @@ extern void
 TriggerCompartmentGC(JSCompartment *comp, js::gcreason::Reason reason);
 
 extern void
 MaybeGC(JSContext *cx);
 
 extern void
 ShrinkGCBuffers(JSRuntime *rt);
 
-} /* namespace js */
-
 /*
  * Kinds of js_GC invocation.
  */
 typedef enum JSGCInvocationKind {
     /* Normal invocation. */
     GC_NORMAL           = 0,
 
     /* Minimize GC triggers and release empty GC chunks right away. */
     GC_SHRINK             = 1
 } JSGCInvocationKind;
 
 /* Pass NULL for |comp| to get a full GC. */
 extern void
-js_GC(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, js::gcreason::Reason r);
+GC(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, js::gcreason::Reason reason);
+
+extern void
+GCSlice(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, js::gcreason::Reason reason);
+
+extern void
+GCDebugSlice(JSContext *cx, int64_t objCount);
+
+} /* namespace js */
 
 namespace js {
 
+void
+InitTracer(JSTracer *trc, JSRuntime *rt, JSContext *cx, JSTraceCallback callback);
+
 #ifdef JS_THREADSAFE
 
 class GCHelperThread {
     enum State {
         IDLE,
         SWEEPING,
         ALLOCATING,
         CANCEL_ALLOCATION,
@@ -1567,149 +1564,337 @@ struct GCChunkHasher {
 typedef HashSet<js::gc::Chunk *, GCChunkHasher, SystemAllocPolicy> GCChunkSet;
 
 template<class T>
 struct MarkStack {
     T *stack;
     T *tos;
     T *limit;
 
+    T *ballast;
+    T *ballastLimit;
+
+    MarkStack()
+      : stack(NULL),
+        tos(NULL),
+        limit(NULL),
+        ballast(NULL),
+        ballastLimit(NULL) { }
+
+    ~MarkStack() {
+        if (stack != ballast)
+            js_free(stack);
+        js_free(ballast);
+    }
+
+    bool init(size_t ballastcap) {
+        JS_ASSERT(!stack);
+
+        if (ballastcap == 0)
+            return true;
+
+        ballast = (T *)js_malloc(sizeof(T) * ballastcap);
+        if (!ballast)
+            return false;
+        ballastLimit = ballast + ballastcap;
+        stack = ballast;
+        limit = ballastLimit;
+        tos = stack;
+        return true;
+    }
+
     bool push(T item) {
-        if (tos == limit)
-            return false;
+        if (tos == limit) {
+            if (!enlarge())
+                return false;
+        }
+        JS_ASSERT(tos < limit);
         *tos++ = item;
         return true;
     }
 
     bool push(T item1, T item2, T item3) {
         T *nextTos = tos + 3;
-        if (nextTos > limit)
-            return false;
+        if (nextTos > limit) {
+            if (!enlarge())
+                return false;
+            nextTos = tos + 3;
+        }
+        JS_ASSERT(nextTos <= limit);
         tos[0] = item1;
         tos[1] = item2;
         tos[2] = item3;
         tos = nextTos;
         return true;
     }
 
     bool isEmpty() const {
         return tos == stack;
     }
 
     T pop() {
         JS_ASSERT(!isEmpty());
         return *--tos;
     }
 
-    template<size_t N>
-    MarkStack(T (&buffer)[N])
-      : stack(buffer),
-        tos(buffer),
-        limit(buffer + N) { }
+    ptrdiff_t position() const {
+        return tos - stack;
+    }
+
+    void reset() {
+        if (stack != ballast) {
+            js_free(stack);
+            stack = ballast;
+            limit = ballastLimit;
+        }
+        tos = stack;
+        JS_ASSERT(limit == ballastLimit);
+    }
+
+    bool enlarge() {
+        size_t tosIndex = tos - stack;
+        size_t cap = limit - stack;
+        size_t newcap = cap * 2;
+        if (newcap == 0)
+            newcap = 32;
+
+        T *newStack;
+        if (stack == ballast) {
+            newStack = (T *)js_malloc(sizeof(T) * newcap);
+            if (!newStack)
+                return false;
+            for (T *src = stack, *dst = newStack; src < tos; )
+                *dst++ = *src++;
+        } else {
+            newStack = (T *)js_realloc(stack, sizeof(T) * newcap);
+            if (!newStack)
+                return false;
+        }
+        stack = newStack;
+        tos = stack + tosIndex;
+        limit = newStack + newcap;
+        return true;
+    }
+};
+
+/*
+ * This class records how much work has been done in a given GC slice, so that
+ * we can return before pausing for too long. Some slices are allowed to run for
+ * unlimited time, and others are bounded. To reduce the number of gettimeofday
+ * calls, we only check the time every 1000 operations.
+ */
+struct SliceBudget {
+    int64_t deadline; /* in microseconds */
+    intptr_t counter;
+
+    static const intptr_t CounterReset = 1000;
+
+    static const int64_t Unlimited = 0;
+    static int64_t TimeBudget(int64_t millis);
+    static int64_t WorkBudget(int64_t work);
+
+    /* Equivalent to SliceBudget(UnlimitedBudget). */
+    SliceBudget();
+
+    /* Instantiate as SliceBudget(Time/WorkBudget(n)). */
+    SliceBudget(int64_t budget);
+
+    void reset() {
+        deadline = INT64_MAX;
+        counter = INTPTR_MAX;
+    }
+
+    void step() {
+        counter--;
+    }
+
+    bool checkOverBudget();
+
+    bool isOverBudget() {
+        if (counter > 0)
+            return false;
+        return checkOverBudget();
+    }
 };
 
 static const size_t MARK_STACK_LENGTH = 32768;
 
 struct GCMarker : public JSTracer {
+  private:
     /*
      * We use a common mark stack to mark GC things of different types and use
      * the explicit tags to distinguish them when it cannot be deduced from
      * the context of push or pop operation.
-     *
-     * Currently we need only 4 tags. However that can be extended to 8 if
-     * necessary as we tag only GC things.
      */
     enum StackTag {
         ValueArrayTag,
         ObjectTag,
         TypeTag,
         XmlTag,
-        LastTag = XmlTag
+        SavedValueArrayTag,
+        LastTag = SavedValueArrayTag
     };
 
-    static const uintptr_t StackTagMask = 3;
+    static const uintptr_t StackTagMask = 7;
 
     static void staticAsserts() {
         JS_STATIC_ASSERT(StackTagMask >= uintptr_t(LastTag));
         JS_STATIC_ASSERT(StackTagMask <= gc::Cell::CellMask);
     }
 
-  private:
-    /* The color is only applied to objects, functions and xml. */
-    uint32_t color;
+  public:
+    explicit GCMarker();
+    bool init(bool lazy);
 
-  public:
-    /* Pointer to the top of the stack of arenas we are delaying marking on. */
-    js::gc::Arena *unmarkedArenaStackTop;
-    /* Count of arenas that are currently in the stack. */
-    DebugOnly<size_t> markLaterArenas;
+    void start(JSRuntime *rt, JSContext *cx);
+    void stop();
+    void reset();
 
-#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
-    js::gc::ConservativeGCStats conservativeStats;
-    Vector<void *, 0, SystemAllocPolicy> conservativeRoots;
-    const char *conservativeDumpFileName;
-    void dumpConservativeRoots();
-#endif
+    void pushObject(JSObject *obj) {
+        pushTaggedPtr(ObjectTag, obj);
+    }
 
-    MarkStack<uintptr_t> stack;
+    void pushType(types::TypeObject *type) {
+        pushTaggedPtr(TypeTag, type);
+    }
 
-  public:
-    explicit GCMarker(JSContext *cx);
-    ~GCMarker();
+    void pushXML(JSXML *xml) {
+        pushTaggedPtr(XmlTag, xml);
+    }
 
     uint32_t getMarkColor() const {
         return color;
     }
 
     /*
      * The only valid color transition during a GC is from black to gray. It is
      * wrong to switch the mark color from gray to black. The reason is that the
      * cycle collector depends on the invariant that there are no black to gray
      * edges in the GC heap. This invariant lets the CC not trace through black
      * objects. If this invariant is violated, the cycle collector may free
      * objects that are still reachable.
      */
     void setMarkColorGray() {
+        JS_ASSERT(isDrained());
         JS_ASSERT(color == gc::BLACK);
         color = gc::GRAY;
     }
 
+    inline void delayMarkingArena(gc::ArenaHeader *aheader);
     void delayMarkingChildren(const void *thing);
-
+    void markDelayedChildren(gc::ArenaHeader *aheader);
+    bool markDelayedChildren(SliceBudget &budget);
     bool hasDelayedChildren() const {
         return !!unmarkedArenaStackTop;
     }
 
-    void markDelayedChildren();
+    bool isDrained() {
+        return isMarkStackEmpty() && !unmarkedArenaStackTop;
+    }
+
+    bool drainMarkStack(SliceBudget &budget);
+
+    /*
+     * Gray marking must be done after all black marking is complete. However,
+     * we do not have write barriers on XPConnect roots. Therefore, XPConnect
+     * roots must be accumulated in the first slice of incremental GC. We
+     * accumulate these roots in the GrayRootMarker and then mark them later,
+     * after black marking is complete. This accumulation can fail, but in that
+     * case we switch to non-incremental GC.
+     */
+    bool hasBufferedGrayRoots() const;
+    void startBufferingGrayRoots();
+    void endBufferingGrayRoots();
+    void markBufferedGrayRoots();
+
+    static void GrayCallback(JSTracer *trc, void **thing, JSGCTraceKind kind);
+
+    MarkStack<uintptr_t> stack;
+
+  private:
+#ifdef DEBUG
+    void checkCompartment(void *p);
+#else
+    void checkCompartment(void *p) {}
+#endif
+
+    void pushTaggedPtr(StackTag tag, void *ptr) {
+        checkCompartment(ptr);
+        uintptr_t addr = reinterpret_cast<uintptr_t>(ptr);
+        JS_ASSERT(!(addr & StackTagMask));
+        if (!stack.push(addr | uintptr_t(tag)))
+            delayMarkingChildren(ptr);
+    }
+
+    void pushValueArray(JSObject *obj, void *start, void *end) {
+        checkCompartment(obj);
+
+        if (start == end)
+            return;
+
+        JS_ASSERT(start <= end);
+        uintptr_t tagged = reinterpret_cast<uintptr_t>(obj) | GCMarker::ValueArrayTag;
+        uintptr_t startAddr = reinterpret_cast<uintptr_t>(start);
+        uintptr_t endAddr = reinterpret_cast<uintptr_t>(end);
+
+        /*
+         * Push in the reverse order so obj will be on top. If we cannot push
+         * the array, we trigger delay marking for the whole object.
+         */
+        if (!stack.push(endAddr, startAddr, tagged))
+            delayMarkingChildren(obj);
+    }
 
     bool isMarkStackEmpty() {
         return stack.isEmpty();
     }
 
-    void drainMarkStack();
+    bool restoreValueArray(JSObject *obj, void **vpp, void **endp);
+    void saveValueRanges();
+    inline void processMarkStackTop(SliceBudget &budget);
+
+    void appendGrayRoot(void *thing, JSGCTraceKind kind);
 
-    inline void processMarkStackTop();
+    /* The color is only applied to objects, functions and xml. */
+    uint32_t color;
+
+    DebugOnly<bool> started;
+
+    /* Pointer to the top of the stack of arenas we are delaying marking on. */
+    js::gc::ArenaHeader *unmarkedArenaStackTop;
+    /* Count of arenas that are currently in the stack. */
+    DebugOnly<size_t> markLaterArenas;
 
-    void pushObject(JSObject *obj) {
-        pushTaggedPtr(ObjectTag, obj);
-    }
+    struct GrayRoot {
+        void *thing;
+        JSGCTraceKind kind;
+#ifdef DEBUG
+        JSTraceNamePrinter debugPrinter;
+        const void *debugPrintArg;
+        size_t debugPrintIndex;
+#endif
 
-    void pushType(types::TypeObject *type) {
-        pushTaggedPtr(TypeTag, type);
+        GrayRoot(void *thing, JSGCTraceKind kind)
+          : thing(thing), kind(kind) {}
+    };
+
+    bool grayFailed;
+    Vector<GrayRoot, 0, SystemAllocPolicy> grayRoots;
+};
+
+struct BarrierGCMarker : public GCMarker {
+    bool init() {
+        return GCMarker::init(true);
     }
+};
 
-    void pushXML(JSXML *xml) {
-        pushTaggedPtr(XmlTag, xml);
-    }
 
-    void pushTaggedPtr(StackTag tag, void *ptr) {
-        uintptr_t addr = reinterpret_cast<uintptr_t>(ptr);
-        JS_ASSERT(!(addr & StackTagMask));
-        if (!stack.push(addr | uintptr_t(tag)))
-            delayMarkingChildren(ptr);
+struct FullGCMarker : public GCMarker {
+    bool init() {
+        return GCMarker::init(false);
     }
 };
 
 void
 MarkStackRangeConservatively(JSTracer *trc, Value *begin, Value *end);
 
 typedef void (*IterateCompartmentCallback)(JSContext *cx, void *data, JSCompartment *compartment);
 typedef void (*IterateChunkCallback)(JSContext *cx, void *data, gc::Chunk *chunk);
@@ -1752,17 +1937,18 @@ IterateCells(JSContext *cx, JSCompartmen
 } /* namespace js */
 
 extern void
 js_FinalizeStringRT(JSRuntime *rt, JSString *str);
 
 /*
  * Macro to test if a traversal is the marking phase of the GC.
  */
-#define IS_GC_MARKING_TRACER(trc) ((trc)->callback == NULL)
+#define IS_GC_MARKING_TRACER(trc) \
+    ((trc)->callback == NULL || (trc)->callback == GCMarker::GrayCallback)
 
 namespace js {
 namespace gc {
 
 JSCompartment *
 NewCompartment(JSContext *cx, JSPrincipals *principals);
 
 /* Tries to run a GC no matter what (used for GC zeal). */
@@ -1773,30 +1959,40 @@ RunDebugGC(JSContext *cx);
 /* Overwrites stack references to GC things which have not been rooted. */
 void CheckStackRoots(JSContext *cx);
 
 inline void MaybeCheckStackRoots(JSContext *cx) { CheckStackRoots(cx); }
 #else
 inline void MaybeCheckStackRoots(JSContext *cx) {}
 #endif
 
-const int ZealPokeThreshold = 1;
-const int ZealAllocThreshold = 2;
-const int ZealVerifierThreshold = 4;
+const int ZealPokeValue = 1;
+const int ZealAllocValue = 2;
+const int ZealFrameGCValue = 3;
+const int ZealVerifierValue = 4;
+const int ZealFrameVerifierValue = 5;
 
 #ifdef JS_GC_ZEAL
 
 /* Check that write barriers have been used correctly. See jsgc.cpp. */
 void
-VerifyBarriers(JSContext *cx, bool always = false);
+VerifyBarriers(JSContext *cx);
+
+void
+MaybeVerifyBarriers(JSContext *cx, bool always = false);
 
 #else
 
 static inline void
-VerifyBarriers(JSContext *cx, bool always = false)
+VerifyBarriers(JSContext *cx)
+{
+}
+
+static inline void
+MaybeVerifyBarriers(JSContext *cx, bool always = false)
 {
 }
 
 #endif
 
 } /* namespace gc */
 
 static inline JSCompartment *
--- a/js/src/jsgcinlines.h
+++ b/js/src/jsgcinlines.h
@@ -205,17 +205,17 @@ GCPoke(JSRuntime *rt, Value oldval)
 #if 1
     rt->gcPoke = true;
 #else
     rt->gcPoke = oldval.isGCThing();
 #endif
 
 #ifdef JS_GC_ZEAL
     /* Schedule a GC to happen "soon" after a GC poke. */
-    if (rt->gcZeal() >= js::gc::ZealPokeThreshold)
+    if (rt->gcZeal() == js::gc::ZealPokeValue)
         rt->gcNextScheduled = 1;
 #endif
 }
 
 /*
  * Invoke ArenaOp and CellOp on every arena and cell in a compartment which
  * have the specified thing kind.
  */
@@ -257,24 +257,35 @@ class CellIterImpl
     const FreeSpan *span;
     uintptr_t thing;
     Cell *cell;
 
   protected:
     CellIterImpl() {
     }
 
-    void init(JSCompartment *comp, AllocKind kind) {
+    void initSpan(JSCompartment *comp, AllocKind kind) {
         JS_ASSERT(comp->arenas.isSynchronizedFreeList(kind));
         firstThingOffset = Arena::firstThingOffset(kind);
         thingSize = Arena::thingSize(kind);
-        aheader = comp->arenas.getFirstArena(kind);
         firstSpan.initAsEmpty();
         span = &firstSpan;
         thing = span->first;
+    }
+
+    void init(ArenaHeader *singleAheader) {
+        aheader = singleAheader;
+        initSpan(aheader->compartment, aheader->getAllocKind());
+        next();
+        aheader = NULL;
+    }
+
+    void init(JSCompartment *comp, AllocKind kind) {
+        initSpan(comp, kind);
+        aheader = comp->arenas.getFirstArena(kind);
         next();
     }
 
   public:
     bool done() const {
         return !cell;
     }
 
@@ -306,41 +317,47 @@ class CellIterImpl
             thing = aheader->arenaAddress() | firstThingOffset;
             aheader = aheader->next;
         }
         cell = reinterpret_cast<Cell *>(thing);
         thing += thingSize;
     }
 };
 
-class CellIterUnderGC : public CellIterImpl {
-
+class CellIterUnderGC : public CellIterImpl
+{
   public:
     CellIterUnderGC(JSCompartment *comp, AllocKind kind) {
         JS_ASSERT(comp->rt->gcRunning);
         init(comp, kind);
     }
+
+    CellIterUnderGC(ArenaHeader *aheader) {
+        JS_ASSERT(aheader->compartment->rt->gcRunning);
+        init(aheader);
+    }
 };
 
 /*
  * When using the iterator outside the GC the caller must ensure that no GC or
  * allocations of GC things are possible and that the background finalization
  * for the given thing kind is not enabled or is done.
  */
-class CellIter: public CellIterImpl
+class CellIter : public CellIterImpl
 {
     ArenaLists *lists;
     AllocKind kind;
 #ifdef DEBUG
     size_t *counter;
 #endif
   public:
     CellIter(JSContext *cx, JSCompartment *comp, AllocKind kind)
       : lists(&comp->arenas),
-        kind(kind) {
+        kind(kind)
+    {
 #ifdef JS_THREADSAFE
         JS_ASSERT(comp->arenas.doneBackgroundFinalize(kind));
 #endif
         if (lists->isSynchronizedFreeList(kind)) {
             lists = NULL;
         } else {
             JS_ASSERT(!comp->rt->gcRunning);
             lists->copyFreeListToArena(kind);
@@ -392,16 +409,19 @@ NewGCThing(JSContext *cx, js::gc::AllocK
 #endif
 
     js::gc::MaybeCheckStackRoots(cx);
 
     JSCompartment *comp = cx->compartment;
     void *t = comp->arenas.allocateFromFreeList(kind, thingSize);
     if (!t)
         t = js::gc::ArenaLists::refillFreeList(cx, kind);
+
+    JS_ASSERT_IF(t && comp->needsBarrier(),
+                 static_cast<T *>(t)->arenaHeader()->allocatedDuringIncremental);
     return static_cast<T *>(t);
 }
 
 /* Alternate form which allocates a GC thing if doing so cannot trigger a GC. */
 template <typename T>
 inline T *
 TryNewGCThing(JSContext *cx, js::gc::AllocKind kind, size_t thingSize)
 {
@@ -414,16 +434,18 @@ TryNewGCThing(JSContext *cx, js::gc::All
     JS_ASSERT(!cx->runtime->noGCOrAllocationCheck);
 
 #ifdef JS_GC_ZEAL
     if (cx->runtime->needZealousGC())
         return NULL;
 #endif
 
     void *t = cx->compartment->arenas.allocateFromFreeList(kind, thingSize);
+    JS_ASSERT_IF(t && cx->compartment->needsBarrier(),
+                 static_cast<T *>(t)->arenaHeader()->allocatedDuringIncremental);
     return static_cast<T *>(t);
 }
 
 } /* namespace gc */
 } /* namespace js */
 
 inline JSObject *
 js_NewGCObject(JSContext *cx, js::gc::AllocKind kind)
--- a/js/src/jsgcmark.cpp
+++ b/js/src/jsgcmark.cpp
@@ -98,31 +98,37 @@ MarkInternal(JSTracer *trc, T *thing)
                  thing->compartment() == rt->gcCheckCompartment ||
                  thing->compartment() == rt->atomsCompartment);
 
     /*
      * Don't mark things outside a compartment if we are in a per-compartment
      * GC.
      */
     if (!rt->gcCurrentCompartment || thing->compartment() == rt->gcCurrentCompartment) {
-        if (IS_GC_MARKING_TRACER(trc)) {
+        if (!trc->callback) {
             PushMarkStack(static_cast<GCMarker *>(trc), thing);
         } else {
             void *tmp = (void *)thing;
             trc->callback(trc, &tmp, GetGCThingTraceKind(thing));
             JS_ASSERT(tmp == thing);
         }
     }
 
 #ifdef DEBUG
     trc->debugPrinter = NULL;
     trc->debugPrintArg = NULL;
 #endif
 }
 
+#define JS_ROOT_MARKING_ASSERT(trc)                                     \
+    JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc),                             \
+                 trc->runtime->gcIncrementalState == NO_INCREMENTAL ||  \
+                 trc->runtime->gcIncrementalState == MARK_ROOTS);
+
+
 template <typename T>
 static void
 MarkUnbarriered(JSTracer *trc, T *thing, const char *name)
 {
     JS_SET_TRACING_NAME(trc, name);
     MarkInternal(trc, thing);
 }
 
@@ -133,16 +139,17 @@ Mark(JSTracer *trc, HeapPtr<T> *thing, c
     JS_SET_TRACING_NAME(trc, name);
     MarkInternal(trc, thing->get());
 }
 
 template <typename T>
 static void
 MarkRoot(JSTracer *trc, T **thingp, const char *name)
 {
+    JS_ROOT_MARKING_ASSERT(trc);
     JS_SET_TRACING_NAME(trc, name);
     MarkInternal(trc, *thingp);
 }
 
 template <typename T>
 static void
 MarkRange(JSTracer *trc, size_t len, HeapPtr<T> *vec, const char *name)
 {
@@ -153,16 +160,17 @@ MarkRange(JSTracer *trc, size_t len, Hea
         }
     }
 }
 
 template <typename T>
 static void
 MarkRootRange(JSTracer *trc, size_t len, T **vec, const char *name)
 {
+    JS_ROOT_MARKING_ASSERT(trc);
     for (size_t i = 0; i < len; ++i) {
         JS_SET_TRACING_INDEX(trc, name, i);
         MarkInternal(trc, vec[i]);
     }
 }
 
 #define DeclMarkerImpl(base, type)                                                                \
 void                                                                                              \
@@ -241,16 +249,17 @@ MarkKind(JSTracer *trc, void *thing, JSG
         break;
 #endif
     }
 }
 
 void
 MarkGCThingRoot(JSTracer *trc, void *thing, const char *name)
 {
+    JS_ROOT_MARKING_ASSERT(trc);
     JS_SET_TRACING_NAME(trc, name);
     if (!thing)
         return;
     MarkKind(trc, thing, GetGCThingTraceKind(thing));
 }
 
 /*** ID Marking ***/
 
@@ -268,32 +277,34 @@ MarkId(JSTracer *trc, const HeapId &id, 
 {
     JS_SET_TRACING_NAME(trc, name);
     MarkIdInternal(trc, id);
 }
 
 void
 MarkIdRoot(JSTracer *trc, const jsid &id, const char *name)
 {
+    JS_ROOT_MARKING_ASSERT(trc);
     JS_SET_TRACING_NAME(trc, name);
     MarkIdInternal(trc, id);
 }
 
 void
 MarkIdRange(JSTracer *trc, size_t len, HeapId *vec, const char *name)
 {
     for (size_t i = 0; i < len; ++i) {
         JS_SET_TRACING_INDEX(trc, name, i);
         MarkIdInternal(trc, vec[i]);
     }
 }
 
 void
 MarkIdRootRange(JSTracer *trc, size_t len, jsid *vec, const char *name)
 {
+    JS_ROOT_MARKING_ASSERT(trc);
     for (size_t i = 0; i < len; ++i) {
         JS_SET_TRACING_INDEX(trc, name, i);
         MarkIdInternal(trc, vec[i]);
     }
 }
 
 /*** Value Marking ***/
 
@@ -311,32 +322,34 @@ MarkValue(JSTracer *trc, HeapValue *v, c
 {
     JS_SET_TRACING_NAME(trc, name);
     MarkValueInternal(trc, v->unsafeGet());
 }
 
 void
 MarkValueRoot(JSTracer *trc, Value *v, const char *name)
 {
+    JS_ROOT_MARKING_ASSERT(trc);
     JS_SET_TRACING_NAME(trc, name);
     MarkValueInternal(trc, v);
 }
 
 void
 MarkValueRange(JSTracer *trc, size_t len, HeapValue *vec, const char *name)
 {
     for (size_t i = 0; i < len; ++i) {
         JS_SET_TRACING_INDEX(trc, name, i);
         MarkValueInternal(trc, vec[i].unsafeGet());
     }
 }
 
 void
 MarkValueRootRange(JSTracer *trc, size_t len, Value *vec, const char *name)
 {
+    JS_ROOT_MARKING_ASSERT(trc);
     for (size_t i = 0; i < len; ++i) {
         JS_SET_TRACING_INDEX(trc, name, i);
         MarkValueInternal(trc, &vec[i]);
     }
 }
 
 /*** Special Marking ***/
 
@@ -369,16 +382,20 @@ void
 MarkCrossCompartmentValue(JSTracer *trc, HeapValue *v, const char *name)
 {
     if (v->isMarkable()) {
         Cell *cell = (Cell *)v->toGCThing();
         JSRuntime *rt = trc->runtime;
         if (rt->gcCurrentCompartment && cell->compartment() != rt->gcCurrentCompartment)
             return;
 
+        /* In case we're called from a write barrier. */
+        if (rt->gcIncrementalCompartment && cell->compartment() != rt->gcIncrementalCompartment)
+            return;
+
         MarkValue(trc, v, name);
     }
 }
 
 /*** Push Mark Stack ***/
 
 #define JS_COMPARTMENT_ASSERT(rt, thing)                                 \
     JS_ASSERT_IF((rt)->gcCurrentCompartment,                             \
@@ -538,17 +555,17 @@ ScanLinearString(GCMarker *gcmarker, JSL
  * at the same depth as it was on entry. This way we avoid using tags when
  * pushing ropes to the stack as ropes never leaks to other users of the
  * stack. This also assumes that a rope can only point to other ropes or
  * linear strings, it cannot refer to GC things of other types.
  */
 static void
 ScanRope(GCMarker *gcmarker, JSRope *rope)
 {
-    uintptr_t *savedTos = gcmarker->stack.tos;
+    ptrdiff_t savedPos = gcmarker->stack.position();
     for (;;) {
         JS_ASSERT(GetGCThingTraceKind(rope) == JSTRACE_STRING);
         JS_ASSERT(rope->JSString::isRope());
         JS_COMPARTMENT_ASSERT_STR(gcmarker->runtime, rope);
         JS_ASSERT(rope->isMarked());
         JSRope *next = NULL;
 
         JSString *right = rope->rightChild();
@@ -570,24 +587,24 @@ ScanRope(GCMarker *gcmarker, JSRope *rop
                  */
                 if (next && !gcmarker->stack.push(reinterpret_cast<uintptr_t>(next)))
                     gcmarker->delayMarkingChildren(next);
                 next = &left->asRope();
             }
         }
         if (next) {
             rope = next;
-        } else if (savedTos != gcmarker->stack.tos) {
-            JS_ASSERT(savedTos < gcmarker->stack.tos);
+        } else if (savedPos != gcmarker->stack.position()) {
+            JS_ASSERT(savedPos < gcmarker->stack.position());
             rope = reinterpret_cast<JSRope *>(gcmarker->stack.pop());
         } else {
             break;
         }
     }
-    JS_ASSERT(savedTos == gcmarker->stack.tos);
+    JS_ASSERT(savedPos == gcmarker->stack.position());
  }
 
 static inline void
 ScanString(GCMarker *gcmarker, JSString *str)
 {
     if (str->isLinear())
         ScanLinearString(gcmarker, &str->asLinear());
     else
@@ -603,34 +620,16 @@ PushMarkStack(GCMarker *gcmarker, JSStri
      * As string can only refer to other strings we fully scan its GC graph
      * using the explicit stack when navigating the rope tree to avoid
      * dealing with strings on the stack in drainMarkStack.
      */
     if (str->markIfUnmarked())
         ScanString(gcmarker, str);
 }
 
-static inline void
-PushValueArray(GCMarker *gcmarker, JSObject* obj, HeapValue *start, HeapValue *end)
-{
-    JS_ASSERT(start <= end);
-    uintptr_t tagged = reinterpret_cast<uintptr_t>(obj) | GCMarker::ValueArrayTag;
-    uintptr_t startAddr = reinterpret_cast<uintptr_t>(start);
-    uintptr_t endAddr = reinterpret_cast<uintptr_t>(end);
-
-    /* Push in the reverse order so obj will be on top. */
-    if (!gcmarker->stack.push(endAddr, startAddr, tagged)) {
-        /*
-         * If we cannot push the array, we trigger delay marking for the whole
-         * object.
-         */
-        gcmarker->delayMarkingChildren(obj);
-    }
-}
-
 void
 MarkChildren(JSTracer *trc, JSObject *obj)
 {
     MarkTypeObject(trc, &obj->typeFromGC(), "type");
 
     Shape *shape = obj->lastProperty();
     MarkShapeUnbarriered(trc, shape, "shape");
 
@@ -846,22 +845,173 @@ MarkChildren(JSTracer *trc, types::TypeO
 #ifdef JS_HAS_XML_SUPPORT
 static void
 MarkChildren(JSTracer *trc, JSXML *xml)
 {
     js_TraceXML(trc, xml);
 }
 #endif
 
+template<typename T>
+void
+PushArenaTyped(GCMarker *gcmarker, ArenaHeader *aheader)
+{
+    for (CellIterUnderGC i(aheader); !i.done(); i.next())
+        PushMarkStack(gcmarker, i.get<T>());
+}
+
+void
+PushArena(GCMarker *gcmarker, ArenaHeader *aheader)
+{
+    switch (MapAllocToTraceKind(aheader->getAllocKind())) {
+      case JSTRACE_OBJECT:
+        PushArenaTyped<JSObject>(gcmarker, aheader);
+        break;
+
+      case JSTRACE_STRING:
+        PushArenaTyped<JSString>(gcmarker, aheader);
+        break;
+
+      case JSTRACE_SCRIPT:
+        PushArenaTyped<JSScript>(gcmarker, aheader);
+        break;
+
+      case JSTRACE_SHAPE:
+        PushArenaTyped<js::Shape>(gcmarker, aheader);
+        break;
+
+      case JSTRACE_BASE_SHAPE:
+        PushArenaTyped<js::BaseShape>(gcmarker, aheader);
+        break;
+
+      case JSTRACE_TYPE_OBJECT:
+        PushArenaTyped<js::types::TypeObject>(gcmarker, aheader);
+        break;
+
+#if JS_HAS_XML_SUPPORT
+      case JSTRACE_XML:
+        PushArenaTyped<JSXML>(gcmarker, aheader);
+        break;
+#endif
+    }
+}
+
 } /* namespace gc */
 
 using namespace js::gc;
 
+struct ValueArrayLayout
+{
+    union {
+        HeapValue *end;
+        js::Class *clasp;
+    };
+    union {
+        HeapValue *start;
+        uintptr_t index;
+    };
+    JSObject *obj;
+
+    static void staticAsserts() {
+        /* This should have the same layout as three mark stack items. */
+        JS_STATIC_ASSERT(sizeof(ValueArrayLayout) == 3 * sizeof(uintptr_t));
+    }
+};
+
+/*
+ * During incremental GC, we return from drainMarkStack without having processed
+ * the entire stack. At that point, JS code can run and reallocate slot arrays
+ * that are stored on the stack. To prevent this from happening, we replace all
+ * ValueArrayTag stack items with SavedValueArrayTag. In the latter, slots
+ * pointers are replaced with slot indexes.
+ *
+ * We also replace the slot array end pointer (which can be derived from the obj
+ * pointer) with the object's class. During JS executation, array slowification
+ * can cause the layout of slots to change. We can observe that slowification
+ * happened if the class changed; in that case, we completely rescan the array.
+ */
+void
+GCMarker::saveValueRanges()
+{
+    for (uintptr_t *p = stack.tos; p > stack.stack; ) {
+        uintptr_t tag = *--p & StackTagMask;
+        if (tag == ValueArrayTag) {
+            p -= 2;
+            ValueArrayLayout *arr = reinterpret_cast<ValueArrayLayout *>(p);
+            JSObject *obj = arr->obj;
+
+            if (obj->getClass() == &ArrayClass) {
+                HeapValue *vp = obj->getDenseArrayElements();
+                JS_ASSERT(arr->start >= vp &&
+                          arr->end == vp + obj->getDenseArrayInitializedLength());
+                arr->index = arr->start - vp;
+            } else {
+                HeapValue *vp = obj->fixedSlots();
+                unsigned nfixed = obj->numFixedSlots();
+                if (arr->start >= vp && arr->start < vp + nfixed) {
+                    JS_ASSERT(arr->end == vp + Min(nfixed, obj->slotSpan()));
+                    arr->index = arr->start - vp;
+                } else {
+                    JS_ASSERT(arr->start >= obj->slots &&
+                              arr->end == obj->slots + obj->slotSpan() - nfixed);
+                    arr->index = (arr->start - obj->slots) + nfixed;
+                }
+            }
+            arr->clasp = obj->getClass();
+            p[2] |= SavedValueArrayTag;
+        } else if (tag == SavedValueArrayTag) {
+            p -= 2;
+        }
+    }
+}
+
+bool
+GCMarker::restoreValueArray(JSObject *obj, void **vpp, void **endp)
+{
+    uintptr_t start = stack.pop();
+    js::Class *clasp = reinterpret_cast<js::Class *>(stack.pop());
+
+    JS_ASSERT(obj->getClass() == clasp ||
+              (clasp == &ArrayClass && obj->getClass() == &SlowArrayClass));
+
+    if (clasp == &ArrayClass) {
+        if (obj->getClass() != &ArrayClass)
+            return false;
+
+        uint32_t initlen = obj->getDenseArrayInitializedLength();
+        HeapValue *vp = obj->getDenseArrayElements();
+        if (start < initlen) {
+            *vpp = vp + start;
+            *endp = vp + initlen;
+        } else {
+            /* The object shrunk, in which case no scanning is needed. */
+            *vpp = *endp = vp;
+        }
+    } else {
+        HeapValue *vp = obj->fixedSlots();
+        unsigned nfixed = obj->numFixedSlots();
+        unsigned nslots = obj->slotSpan();
+        if (start < nfixed) {
+            *vpp = vp + start;
+            *endp = vp + Min(nfixed, nslots);
+        } else if (start < nslots) {
+            *vpp = obj->slots + start - nfixed;
+            *endp = obj->slots + nslots - nfixed;
+        } else {
+            /* The object shrunk, in which case no scanning is needed. */
+            *vpp = *endp = obj->slots;
+        }
+    }
+
+    JS_ASSERT(*vpp <= *endp);
+    return true;
+}
+
 inline void
-GCMarker::processMarkStackTop()
+GCMarker::processMarkStackTop(SliceBudget &budget)
 {
     /*
      * The function uses explicit goto and implements the scanning of the
      * object directly. It allows to eliminate the tail recursion and
      * significantly improve the marking performance, see bug 641025.
      */
     HeapValue *vp, *end;
     JSObject *obj;
@@ -880,107 +1030,156 @@ GCMarker::processMarkStackTop()
         JS_ASSERT((addr3 - addr2) % sizeof(Value) == 0);
         vp = reinterpret_cast<HeapValue *>(addr2);
         end = reinterpret_cast<HeapValue *>(addr3);
         goto scan_value_array;
     }
 
     if (tag == ObjectTag) {
         obj = reinterpret_cast<JSObject *>(addr);
+        JS_COMPARTMENT_ASSERT(runtime, obj);
         goto scan_obj;
     }
 
     if (tag == TypeTag) {
         ScanTypeObject(this, reinterpret_cast<types::TypeObject *>(addr));
+    } else if (tag == SavedValueArrayTag) {
+        JS_ASSERT(!(addr & Cell::CellMask));
+        obj = reinterpret_cast<JSObject *>(addr);
+        if (restoreValueArray(obj, (void **)&vp, (void **)&end))
+            goto scan_value_array;
+        else
+            goto scan_obj;
     } else {
         JS_ASSERT(tag == XmlTag);
         MarkChildren(this, reinterpret_cast<JSXML *>(addr));
     }
+    budget.step();
     return;
 
   scan_value_array:
     JS_ASSERT(vp <= end);
     while (vp != end) {
+        budget.step();
+        if (budget.isOverBudget()) {
+            pushValueArray(obj, vp, end);
+            return;
+        }
+
         const Value &v = *vp++;
         if (v.isString()) {
             JSString *str = v.toString();
+            JS_COMPARTMENT_ASSERT_STR(runtime, str);
             if (str->markIfUnmarked())
                 ScanString(this, str);
         } else if (v.isObject()) {
             JSObject *obj2 = &v.toObject();
+            JS_COMPARTMENT_ASSERT(runtime, obj2);
             if (obj2->markIfUnmarked(getMarkColor())) {
-                PushValueArray(this, obj, vp, end);
+                pushValueArray(obj, vp, end);
                 obj = obj2;
                 goto scan_obj;
             }
         }
     }
     return;
 
   scan_obj:
     {
+        JS_COMPARTMENT_ASSERT(runtime, obj);
+
+        budget.step();
+        if (budget.isOverBudget()) {
+            pushObject(obj);
+            return;
+        }
+
         types::TypeObject *type = obj->typeFromGC();
         PushMarkStack(this, type);
 
         Shape *shape = obj->lastProperty();
         PushMarkStack(this, shape);
 
         /* Call the trace hook if necessary. */
         Class *clasp = shape->getObjectClass();
         if (clasp->trace) {
             if (clasp == &ArrayClass) {
                 JS_ASSERT(!shape->isNative());
                 vp = obj->getDenseArrayElements();
                 end = vp + obj->getDenseArrayInitializedLength();
                 goto scan_value_array;
+            } else {
+                JS_ASSERT_IF(runtime->gcIncrementalState != NO_INCREMENTAL,
+                             clasp->flags & JSCLASS_IMPLEMENTS_BARRIERS);
             }
             clasp->trace(this, obj);
         }
 
         if (!shape->isNative())
             return;
 
         unsigned nslots = obj->slotSpan();
         vp = obj->fixedSlots();
         if (obj->slots) {
             unsigned nfixed = obj->numFixedSlots();
             if (nslots > nfixed) {
-                PushValueArray(this, obj, vp, vp + nfixed);
+                pushValueArray(obj, vp, vp + nfixed);
                 vp = obj->slots;
                 end = vp + (nslots - nfixed);
                 goto scan_value_array;
             }
         }
         JS_ASSERT(nslots <= obj->numFixedSlots());
         end = vp + nslots;
         goto scan_value_array;
     }
 }
 
-void
-GCMarker::drainMarkStack()
+bool
+GCMarker::drainMarkStack(SliceBudget &budget)
 {
+#ifdef DEBUG
     JSRuntime *rt = runtime;
-    rt->gcCheckCompartment = rt->gcCurrentCompartment;
+
+    struct AutoCheckCompartment {
+        JSRuntime *runtime;
+        AutoCheckCompartment(JSRuntime *rt) : runtime(rt) {
+            runtime->gcCheckCompartment = runtime->gcCurrentCompartment;
+        }
+        ~AutoCheckCompartment() { runtime->gcCheckCompartment = NULL; }
+    } acc(rt);
+#endif
+
+    if (budget.isOverBudget())
+        return false;
 
     for (;;) {
-        while (!stack.isEmpty())
-            processMarkStackTop();
+        while (!stack.isEmpty()) {
+            processMarkStackTop(budget);
+            if (budget.isOverBudget()) {
+                saveValueRanges();
+                return false;
+            }
+        }
+
         if (!hasDelayedChildren())
             break;
 
         /*
          * Mark children of things that caused too deep recursion during the
          * above tracing. Don't do this until we're done with everything
          * else.
          */
-        markDelayedChildren();
+        if (!markDelayedChildren(budget)) {
+            saveValueRanges();
+            return false;
+        }
     }
 
-    rt->gcCheckCompartment = NULL;
+    return true;
 }
 
 void
 TraceChildren(JSTracer *trc, void *thing, JSGCTraceKind kind)
 {
     switch (kind) {
       case JSTRACE_OBJECT:
         MarkChildren(trc, static_cast<JSObject *>(thing));
--- a/js/src/jsgcmark.h
+++ b/js/src/jsgcmark.h
@@ -141,17 +141,21 @@ MarkChildren(JSTracer *trc, JSObject *ob
 /*
  * Trace through the shape and any shapes it contains to mark
  * non-shape children. This is exposed to the JS API as
  * JS_TraceShapeCycleCollectorChildren.
  */
 void
 MarkCycleCollectorChildren(JSTracer *trc, const Shape *shape);
 
+void
+PushArena(GCMarker *gcmarker, ArenaHeader *aheader);
+
 /*** Generic ***/
+
 /*
  * The Mark() functions interface should only be used by code that must be
  * templated.  Other uses should use the more specific, type-named functions.
  */
 
 inline void
 Mark(JSTracer *trc, HeapValue *v, const char *name)
 {
--- a/js/src/jsinfer.cpp
+++ b/js/src/jsinfer.cpp
@@ -2190,17 +2190,17 @@ TypeCompartment::nukeTypes(JSContext *cx
 
     /*
      * We may or may not be under the GC. In either case don't allocate, and
      * acquire the GC lock so we can update inferenceEnabled for all contexts.
      */
 
 #ifdef JS_THREADSAFE
     AutoLockGC maybeLock;
-    if (!cx->runtime->gcMarkAndSweep)
+    if (!cx->runtime->gcRunning)
         maybeLock.lock(cx->runtime);
 #endif
 
     inferenceEnabled = false;
 
     /* Update the cached inferenceEnabled bit in all contexts. */
     for (JSCList *cl = cx->runtime->contextList.next;
          cl != &cx->runtime->contextList;
--- a/js/src/jsinterp.cpp
+++ b/js/src/jsinterp.cpp
@@ -1142,17 +1142,17 @@ inline InterpreterFrames::~InterpreterFr
 void
 js::AssertValidPropertyCacheHit(JSContext *cx,
                                 JSObject *start, JSObject *found,
                                 PropertyCacheEntry *entry)
 {
     jsbytecode *pc;
     cx->stack.currentScript(&pc);
 
-    uint32_t sample = cx->runtime->gcNumber;
+    uint64_t sample = cx->runtime->gcNumber;
     PropertyCacheEntry savedEntry = *entry;
 
     PropertyName *name = GetNameFromBytecode(cx, pc, JSOp(*pc), js_CodeSpec[*pc]);
 
     JSObject *obj, *pobj;
     JSProperty *prop;
     JSBool ok;
 
@@ -1249,17 +1249,17 @@ TypeCheckNextBytecode(JSContext *cx, JSS
 #endif
 }
 
 JS_NEVER_INLINE bool
 js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode)
 {
     JSAutoResolveFlags rf(cx, RESOLVE_INFER);
 
-    gc::VerifyBarriers(cx, true);
+    gc::MaybeVerifyBarriers(cx, true);
 
     JS_ASSERT(!cx->compartment->activeAnalysis);
 
 #if JS_THREADED_INTERP
 #define CHECK_PCCOUNT_INTERRUPTS() JS_ASSERT_IF(script->pcCounters, jumpTable == interruptJumpTable)
 #else
 #define CHECK_PCCOUNT_INTERRUPTS() JS_ASSERT_IF(script->pcCounters, switchMask == -1)
 #endif
@@ -1284,17 +1284,17 @@ js::Interpret(JSContext *cx, StackFrame 
 
     register void * const *jumpTable = normalJumpTable;
 
     typedef GenericInterruptEnabler<void * const *> InterruptEnabler;
     InterruptEnabler interruptEnabler(&jumpTable, interruptJumpTable);
 
 # define DO_OP()            JS_BEGIN_MACRO                                    \
                                 CHECK_PCCOUNT_INTERRUPTS();                   \
-                                js::gc::VerifyBarriers(cx);                   \
+                                js::gc::MaybeVerifyBarriers(cx);              \
                                 JS_EXTENSION_(goto *jumpTable[op]);           \
                             JS_END_MACRO
 # define DO_NEXT_OP(n)      JS_BEGIN_MACRO                                    \
                                 TypeCheckNextBytecode(cx, script, n, regs);   \
                                 op = (JSOp) *(regs.pc += (n));                \
                                 DO_OP();                                      \
                             JS_END_MACRO
 
@@ -1561,17 +1561,17 @@ js::Interpret(JSContext *cx, StackFrame 
         JS_ASSERT(js_CodeSpec[op].length == 1);
         len = 1;
       advance_pc:
         regs.pc += len;
         op = (JSOp) *regs.pc;
 
       do_op:
         CHECK_PCCOUNT_INTERRUPTS();
-        js::gc::VerifyBarriers(cx);
+        js::gc::MaybeVerifyBarriers(cx);
         switchOp = intN(op) | switchMask;
       do_switch:
         switch (switchOp) {
 #endif
 
 #if JS_THREADED_INTERP
   interrupt:
 #else /* !JS_THREADED_INTERP */
@@ -4419,11 +4419,11 @@ END_CASE(JSOP_ARRAYPUSH)
 #ifdef JS_METHODJIT
     /*
      * This path is used when it's guaranteed the method can be finished
      * inside the JIT.
      */
   leave_on_safe_point:
 #endif
 
-    gc::VerifyBarriers(cx, true);
+    gc::MaybeVerifyBarriers(cx, true);
     return interpReturnOK;
 }
--- a/js/src/jsiter.cpp
+++ b/js/src/jsiter.cpp
@@ -84,17 +84,17 @@ using namespace js;
 using namespace js::gc;
 
 static void iterator_finalize(JSContext *cx, JSObject *obj);
 static void iterator_trace(JSTracer *trc, JSObject *obj);
 static JSObject *iterator_iterator(JSContext *cx, JSObject *obj, JSBool keysonly);
 
 Class js::IteratorClass = {
     "Iterator",
-    JSCLASS_HAS_PRIVATE |
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
     JSCLASS_HAS_CACHED_PROTO(JSProto_Iterator),
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
     JS_ResolveStub,
     JS_ConvertStub,
@@ -1414,17 +1414,17 @@ generator_trace(JSTracer *trc, JSObject 
         return;
 
     JS_ASSERT(gen->liveFrame() == gen->floatingFrame());
     MarkGenerator(trc, gen);
 }
 
 Class js::GeneratorClass = {
     "Generator",
-    JSCLASS_HAS_PRIVATE,
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS,
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
     JS_ResolveStub,
     JS_ConvertStub,
     generator_finalize,
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -2758,16 +2758,23 @@ NewObject(JSContext *cx, Class *clasp, t
     HeapValue *slots;
     if (!PreallocateObjectDynamicSlots(cx, shape, &slots))
         return NULL;
 
     JSObject *obj = JSObject::create(cx, kind, shape, typeRoot, slots);
     if (!obj)
         return NULL;
 
+    /*
+     * This will cancel an already-running incremental GC from doing any more
+     * slices, and it will prevent any future incremental GCs.
+     */
+    if (clasp->trace && !(clasp->flags & JSCLASS_IMPLEMENTS_BARRIERS))
+        cx->runtime->gcIncrementalEnabled = false;
+
     Probes::createObject(cx, obj);
     return obj;
 }
 
 JSObject *
 js::NewObjectWithGivenProto(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent,
                             gc::AllocKind kind)
 {
@@ -3470,27 +3477,27 @@ JSObject::TradeGuts(JSContext *cx, JSObj
         if (a->isNative())
             a->shape_->setNumFixedSlots(reserved.newafixed);
         else
             a->shape_ = reserved.newashape;
 
         a->slots = reserved.newaslots;
         a->initSlotRange(0, reserved.bvals.begin(), bcap);
         if (a->hasPrivate())
-            a->setPrivate(bpriv);
+            a->initPrivate(bpriv);
 
         if (b->isNative())
             b->shape_->setNumFixedSlots(reserved.newbfixed);
         else
             b->shape_ = reserved.newbshape;
 
         b->slots = reserved.newbslots;
         b->initSlotRange(0, reserved.avals.begin(), acap);
         if (b->hasPrivate())
-            b->setPrivate(apriv);
+            b->initPrivate(apriv);
 
         /* Make sure the destructor for reserved doesn't free the slots. */
         reserved.newaslots = NULL;
         reserved.newbslots = NULL;
     }
 
 #ifdef JSGC_GENERATIONAL
     Shape::writeBarrierPost(a->shape_, &a->shape_);
--- a/js/src/jsobj.h
+++ b/js/src/jsobj.h
@@ -949,16 +949,17 @@ struct JSObject : js::gc::Cell
 
     inline js::GlobalObject &global() const;
 
     /* Private data accessors. */
 
     inline bool hasPrivate() const;
     inline void *getPrivate() const;
     inline void setPrivate(void *data);
+    inline void initPrivate(void *data);
 
     /* Access private data for an object with a known number of fixed slots. */
     inline void *getPrivate(size_t nfixed) const;
 
     /* N.B. Infallible: NULL means 'no principal', not an error. */
     inline JSPrincipals *principals(JSContext *cx);
 
     /* Remove the type (and prototype) or parent from a new object. */
@@ -1350,16 +1351,17 @@ struct JSObject : js::gc::Cell
     static bool thisObject(JSContext *cx, const js::Value &v, js::Value *vp);
 
     bool swap(JSContext *cx, JSObject *other);
 
     inline void initArrayClass();
 
     static inline void writeBarrierPre(JSObject *obj);
     static inline void writeBarrierPost(JSObject *obj, void *addr);
+    static inline void readBarrier(JSObject *obj);
     inline void privateWriteBarrierPre(void **oldval);
     inline void privateWriteBarrierPost(void **oldval);
 
     /*
      * In addition to the generic object interface provided by JSObject,
      * specific types of objects may provide additional operations. To access,
      * these addition operations, callers should use the pattern:
      *
--- a/js/src/jsobjinlines.h
+++ b/js/src/jsobjinlines.h
@@ -114,16 +114,22 @@ JSObject::setPrivate(void *data)
 {
     void **pprivate = &privateRef(numFixedSlots());
 
     privateWriteBarrierPre(pprivate);
     *pprivate = data;
     privateWriteBarrierPost(pprivate);
 }
 
+inline void
+JSObject::initPrivate(void *data)
+{
+    privateRef(numFixedSlots()) = data;
+}
+
 inline bool
 JSObject::enumerate(JSContext *cx, JSIterateOp iterop, js::Value *statep, jsid *idp)
 {
     JSNewEnumerateOp op = getOps()->enumerate;
     return (op ? op : JS_EnumerateState)(cx, this, iterop, statep, idp);
 }
 
 inline bool
@@ -597,30 +603,42 @@ JSObject::initDenseArrayElements(uintN d
 
 inline void
 JSObject::moveDenseArrayElements(uintN dstStart, uintN srcStart, uintN count)
 {
     JS_ASSERT(dstStart + count <= getDenseArrayCapacity());
     JS_ASSERT(srcStart + count <= getDenseArrayInitializedLength());
 
     /*
-     * Use a custom write barrier here since it's performance sensitive. We
-     * only want to barrier the elements that are being overwritten.
-     */
-    uintN markStart, markEnd;
-    if (dstStart > srcStart) {
-        markStart = js::Max(srcStart + count, dstStart);
-        markEnd = dstStart + count;
+     * Using memmove here would skip write barriers. Also, we need to consider
+     * an array containing [A, B, C], in the following situation:
+     *
+     * 1. Incremental GC marks slot 0 of array (i.e., A), then returns to JS code.
+     * 2. JS code moves slots 1..2 into slots 0..1, so it contains [B, C, C].
+     * 3. Incremental GC finishes by marking slots 1 and 2 (i.e., C).
+     *
+     * Since normal marking never happens on B, it is very important that the
+     * write barrier is invoked here on B, despite the fact that it exists in
+     * the array before and after the move.
+    */
+    if (compartment()->needsBarrier()) {
+        if (dstStart < srcStart) {
+            js::HeapValue *dst = elements + dstStart;
+            js::HeapValue *src = elements + srcStart;
+            for (unsigned i = 0; i < count; i++, dst++, src++)
+                *dst = *src;
+        } else {
+            js::HeapValue *dst = elements + dstStart + count - 1;
+            js::HeapValue *src = elements + srcStart + count - 1;
+            for (unsigned i = 0; i < count; i++, dst--, src--)
+                *dst = *src;
+        }
     } else {
-        markStart = dstStart;
-        markEnd = js::Min(dstStart + count, srcStart);
+        memmove(elements + dstStart, elements + srcStart, count * sizeof(js::Value));
     }
-    prepareElementRangeForOverwrite(markStart, markEnd);
-
-    memmove(elements + dstStart, elements + srcStart, count * sizeof(js::Value));
 }
 
 inline void
 JSObject::moveDenseArrayElementsUnbarriered(uintN dstStart, uintN srcStart, uintN count)
 {
     JS_ASSERT(!compartment()->needsBarrier());
 
     JS_ASSERT(dstStart + count <= getDenseArrayCapacity());
@@ -2122,13 +2140,25 @@ JSObject::writeBarrierPre(JSObject *obj)
     if (comp->needsBarrier()) {
         JS_ASSERT(!comp->rt->gcRunning);
         MarkObjectUnbarriered(comp->barrierTracer(), obj, "write barrier");
     }
 #endif
 }
 
 inline void
+JSObject::readBarrier(JSObject *obj)
+{
+#ifdef JSGC_INCREMENTAL
+    JSCompartment *comp = obj->compartment();
+    if (comp->needsBarrier()) {
+        JS_ASSERT(!comp->rt->gcRunning);
+        MarkObjectUnbarriered(comp->barrierTracer(), obj, "read barrier");
+    }
+#endif
+}
+
+inline void
 JSObject::writeBarrierPost(JSObject *obj, void *addr)
 {
 }
 
 #endif /* jsobjinlines_h___ */
--- a/js/src/jspropertycache.cpp
+++ b/js/src/jspropertycache.cpp
@@ -277,17 +277,17 @@ PropertyCache::purge(JSContext *cx)
   { static FILE *fp;
     if (!fp)
         fp = fopen("/tmp/propcache.stats", "w");
     if (fp) {
         fputs("Property cache stats for ", fp);
 #ifdef JS_THREADSAFE
         fprintf(fp, "thread %lu, ", (unsigned long) cx->thread->id);
 #endif
-        fprintf(fp, "GC %u\n", cx->runtime->gcNumber);
+        fprintf(fp, "GC %lu\n", (unsigned long)cx->runtime->gcNumber);
 
 # define P(mem) fprintf(fp, "%11s %10lu\n", #mem, (unsigned long)mem)
         P(fills);
         P(nofills);
         P(rofills);
         P(disfills);
         P(oddfills);
         P(add2dictfills);
--- a/js/src/jsproxy.cpp
+++ b/js/src/jsproxy.cpp
@@ -1306,17 +1306,17 @@ static JSType
 proxy_TypeOf(JSContext *cx, JSObject *proxy)
 {
     JS_ASSERT(proxy->isProxy());
     return Proxy::typeOf(cx, proxy);
 }
 
 JS_FRIEND_DATA(Class) js::ObjectProxyClass = {
     "Proxy",
-    Class::NON_NATIVE | JSCLASS_HAS_RESERVED_SLOTS(4),
+    Class::NON_NATIVE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(4),
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
     JS_ResolveStub,
     proxy_Convert,
     proxy_Finalize,          /* finalize    */
@@ -1362,17 +1362,17 @@ JS_FRIEND_DATA(Class) js::ObjectProxyCla
         proxy_Fix,           /* fix             */
         NULL,                /* thisObject      */
         NULL,                /* clear           */
     }
 };
 
 JS_FRIEND_DATA(Class) js::OuterWindowProxyClass = {
     "Proxy",
-    Class::NON_NATIVE | JSCLASS_HAS_RESERVED_SLOTS(4),
+    Class::NON_NATIVE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(4),
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
     JS_ResolveStub,
     JS_ConvertStub,
     proxy_Finalize,          /* finalize    */
@@ -1440,17 +1440,17 @@ proxy_Construct(JSContext *cx, uintN arg
     JSObject *proxy = &JS_CALLEE(cx, vp).toObject();
     JS_ASSERT(proxy->isProxy());
     bool ok = Proxy::construct(cx, proxy, argc, JS_ARGV(cx, vp), vp);
     return ok;
 }
 
 JS_FRIEND_DATA(Class) js::FunctionProxyClass = {
     "Proxy",
-    Class::NON_NATIVE | JSCLASS_HAS_RESERVED_SLOTS(6),
+    Class::NON_NATIVE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(6),
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
     JS_ResolveStub,
     JS_ConvertStub,
     NULL,                    /* finalize */
--- a/js/src/jstypedarray.cpp
+++ b/js/src/jstypedarray.cpp
@@ -2175,16 +2175,17 @@ Class ArrayBuffer::slowClass = {
     JS_ResolveStub,
     JS_ConvertStub,
     JS_FinalizeStub
 };
 
 Class js::ArrayBufferClass = {
     "ArrayBuffer",
     JSCLASS_HAS_PRIVATE |
+    JSCLASS_IMPLEMENTS_BARRIERS |
     Class::NON_NATIVE |
     JSCLASS_HAS_RESERVED_SLOTS(ARRAYBUFFER_RESERVED_SLOTS) |
     JSCLASS_HAS_CACHED_PROTO(JSProto_ArrayBuffer),
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
@@ -2293,17 +2294,17 @@ JSFunctionSpec _typedArray::jsfuncs[] = 
     JS_ConvertStub,                                                            \
     JS_FinalizeStub                                                            \
 }
 
 #define IMPL_TYPED_ARRAY_FAST_CLASS(_typedArray)                               \
 {                                                                              \
     #_typedArray,                                                              \
     JSCLASS_HAS_RESERVED_SLOTS(TypedArray::FIELD_MAX) |                        \
-    JSCLASS_HAS_PRIVATE |                                                      \
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |                        \
     JSCLASS_FOR_OF_ITERATION |                                                 \
     Class::NON_NATIVE,                                                         \
     JS_PropertyStub,         /* addProperty */                                 \
     JS_PropertyStub,         /* delProperty */                                 \
     JS_PropertyStub,         /* getProperty */                                 \
     JS_StrictPropertyStub,   /* setProperty */                                 \
     JS_EnumerateStub,                                                          \
     JS_ResolveStub,                                                            \
--- a/js/src/jsweakmap.cpp
+++ b/js/src/jsweakmap.cpp
@@ -57,28 +57,28 @@
 using namespace js;
 
 namespace js {
 
 bool
 WeakMapBase::markAllIteratively(JSTracer *tracer)
 {
     bool markedAny = false;
-    JSRuntime *rt = tracer->context->runtime;
+    JSRuntime *rt = tracer->runtime;
     for (WeakMapBase *m = rt->gcWeakMapList; m; m = m->next) {
         if (m->markIteratively(tracer))
             markedAny = true;
     }
     return markedAny;
 }
 
 void
 WeakMapBase::sweepAll(JSTracer *tracer)
 {
-    JSRuntime *rt = tracer->context->runtime;
+    JSRuntime *rt = tracer->runtime;
     for (WeakMapBase *m = rt->gcWeakMapList; m; m = m->next)
         m->sweep(tracer);
 }
 
 void
 WeakMapBase::traceAllMappings(WeakMapTracer *tracer)
 {
     JSRuntime *rt = tracer->runtime;
@@ -309,34 +309,42 @@ WeakMap_mark(JSTracer *trc, JSObject *ob
 {
     if (ObjectValueMap *map = GetObjectMap(obj))
         map->trace(trc);
 }
 
 static void
 WeakMap_finalize(JSContext *cx, JSObject *obj)
 {
-    ObjectValueMap *map = GetObjectMap(obj);
-    cx->delete_(map);
+    if (ObjectValueMap *map = GetObjectMap(obj)) {
+        map->check();
+#ifdef DEBUG
+        map->~ObjectValueMap();
+        memset(map, 0xdc, sizeof(ObjectValueMap));
+        cx->free_(map);
+#else
+        cx->delete_(map);
+#endif
+    }
 }
 
 static JSBool
 WeakMap_construct(JSContext *cx, uintN argc, Value *vp)
 {
     JSObject *obj = NewBuiltinClassInstance(cx, &WeakMapClass);
     if (!obj)
         return false;
 
     vp->setObject(*obj);
     return true;
 }
 
 Class js::WeakMapClass = {
     "WeakMap",
-    JSCLASS_HAS_PRIVATE |
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
     JSCLASS_HAS_CACHED_PROTO(JSProto_WeakMap),
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
     JS_ResolveStub,
     JS_ConvertStub,
--- a/js/src/jsweakmap.h
+++ b/js/src/jsweakmap.h
@@ -122,17 +122,17 @@ class WeakMapBase {
             // many keys as possible have been marked, and add ourselves to the list of
             // known-live WeakMaps to be scanned in the iterative marking phase, by
             // markAllIteratively.
             JS_ASSERT(!tracer->eagerlyTraceWeakMaps);
 
             // Add ourselves to the list if we are not already in the list. We can already
             // be in the list if the weak map is marked more than once due delayed marking.
             if (next == WeakMapNotInList) {
-                JSRuntime *rt = tracer->context->runtime;
+                JSRuntime *rt = tracer->runtime;
                 next = rt->gcWeakMapList;
                 rt->gcWeakMapList = this;
             }
         } else {
             // If we're not actually doing garbage collection, the keys won't be marked
             // nicely as needed by the true ephemeral marking algorithm --- custom tracers
             // such as the cycle collector must use their own means for cycle detection.
             // So here we do a conservative approximation: pretend all keys are live.
@@ -151,16 +151,18 @@ class WeakMapBase {
 
     // Remove entries whose keys are dead from all weak maps marked as live in this
     // garbage collection.
     static void sweepAll(JSTracer *tracer);
 
     // Trace all delayed weak map bindings. Used by the cycle collector.
     static void traceAllMappings(WeakMapTracer *tracer);
 
+    void check() { JS_ASSERT(next == WeakMapNotInList); }
+
     // Remove everything from the live weak map list.
     static void resetWeakMapList(JSRuntime *rt);
 
   protected:
     // Instance member functions called by the above. Instantiations of WeakMap override
     // these with definitions appropriate for their Key and Value types.
     virtual void nonMarkingTrace(JSTracer *tracer) = 0;
     virtual bool markIteratively(JSTracer *tracer) = 0;
--- a/js/src/jsxml.cpp
+++ b/js/src/jsxml.cpp
@@ -5364,17 +5364,17 @@ js_ConcatenateXML(JSContext *cx, JSObjec
     vp->setObject(*listobj);
 out:
     js_LeaveLocalRootScopeWithResult(cx, *vp);
     return ok;
 }
 
 JS_FRIEND_DATA(Class) js::XMLClass = {
     js_XML_str,
-    JSCLASS_HAS_PRIVATE |
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
     JSCLASS_HAS_CACHED_PROTO(JSProto_XML),
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
     JS_ResolveStub,
     xml_convert,
@@ -7917,17 +7917,17 @@ xmlfilter_finalize(JSContext *cx, JSObje
     if (!filter)
         return;
 
     cx->delete_(filter);
 }
 
 Class js_XMLFilterClass = {
     "XMLFilter",
-    JSCLASS_HAS_PRIVATE | JSCLASS_IS_ANONYMOUS,
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_IS_ANONYMOUS,
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
     JS_ResolveStub,
     JS_ConvertStub,
     xmlfilter_finalize,
--- a/js/src/methodjit/Compiler.cpp
+++ b/js/src/methodjit/Compiler.cpp
@@ -3919,17 +3919,17 @@ mjit::Compiler::emitStubCall(void *ptr, 
     JaegerSpew(JSpew_Insns, " ---- END STUB CALL ---- \n");
     return cl;
 }
 
 void
 mjit::Compiler::interruptCheckHelper()
 {
     Jump jump;
-    if (cx->runtime->gcZeal() >= js::gc::ZealVerifierThreshold) {
+    if (cx->runtime->gcZeal() == js::gc::ZealVerifierValue) {
         /* For barrier verification, always take the interrupt so we can verify. */
         jump = masm.jump();
     } else {
         void *interrupt = (void*) &cx->runtime->interrupt;
 #if defined(JS_CPU_X86) || defined(JS_CPU_ARM) || defined(JS_CPU_MIPS)
         jump = masm.branch32(Assembler::NotEqual, AbsoluteAddress(interrupt), Imm32(0));
 #else
         /* Handle processors that can't load from absolute addresses. */
@@ -6887,17 +6887,19 @@ mjit::Compiler::jsop_regexp()
     JSObject *obj = script->getRegExp(GET_UINT32_INDEX(PC));
     RegExpStatics *res = globalObj ? globalObj->getRegExpStatics() : NULL;
 
     if (!globalObj ||
         &obj->global() != globalObj ||
         !cx->typeInferenceEnabled() ||
         analysis->localsAliasStack() ||
         types::TypeSet::HasObjectFlags(cx, globalObj->getType(cx),
-                                       types::OBJECT_FLAG_REGEXP_FLAGS_SET)) {
+                                       types::OBJECT_FLAG_REGEXP_FLAGS_SET) ||
+        cx->runtime->gcIncrementalState == gc::MARK)
+    {
         prepareStubCall(Uses(0));
         masm.move(ImmPtr(obj), Registers::ArgReg1);
         INLINE_STUBCALL(stubs::RegExp, REJOIN_FALLTHROUGH);
         frame.pushSynced(JSVAL_TYPE_OBJECT);
         return true;
     }
 
     RegExpObject *reobj = &obj->asRegExp();
@@ -6941,20 +6943,21 @@ mjit::Compiler::jsop_regexp()
                     frame.push(ObjectValue(*obj));
                     return true;
                 }
             }
         }
     }
 
     /*
-     * Force creation of the RegExpShared in the script's RegExpObject
-     * so that we grab it in the getNewObject template copy. Note that
-     * JIT code is discarded on every GC, which permits us to burn in
-     * the pointer to the RegExpShared.
+     * Force creation of the RegExpShared in the script's RegExpObject so that
+     * we grab it in the getNewObject template copy. Note that JIT code is
+     * discarded on every GC, which permits us to burn in the pointer to the
+     * RegExpShared. We don't do this during an incremental
+     * GC, since we don't discard JIT code after every marking slice.
      */
     if (!reobj->getShared(cx))
         return false;
 
     RegisterID result = frame.allocReg();
     Jump emptyFreeList = masm.getNewObject(cx, result, obj);
 
     stubcc.linkExit(emptyFreeList, Uses(0));
--- a/js/src/methodjit/Compiler.h
+++ b/js/src/methodjit/Compiler.h
@@ -479,17 +479,17 @@ private:
     Label argsCheckFallthrough;
     Jump argsCheckJump;
 #endif
     bool debugMode_;
     bool inlining_;
     bool hasGlobalReallocation;
     bool oomInVector;       // True if we have OOM'd appending to a vector. 
     bool overflowICSpace;   // True if we added a constant pool in a reserved space.
-    uint32_t gcNumber;
+    uint64_t gcNumber;
     enum { NoApplyTricks, LazyArgsObj } applyTricks;
     PCLengthEntry *pcLengths;
 
     Compiler *thisFromCtor() { return this; }
 
     friend class CompilerAllocPolicy;
   public:
     Compiler(JSContext *cx, JSScript *outerScript, unsigned chunkIndex, bool isConstructing);
--- a/js/src/methodjit/MethodJIT.h
+++ b/js/src/methodjit/MethodJIT.h
@@ -397,17 +397,17 @@ struct RecompilationMonitor
      * If either inline frame expansion or recompilation occurs, then ICs and
      * stubs should not depend on the frame or JITs being intact. The two are
      * separated for logging.
      */
     unsigned recompilations;
     unsigned frameExpansions;
 
     /* If a GC occurs it may discard jit code on the stack. */
-    unsigned gcNumber;
+    uint64_t gcNumber;
 
     RecompilationMonitor(JSContext *cx)
         : cx(cx),
           recompilations(cx->compartment->types.recompilations),
           frameExpansions(cx->compartment->types.frameExpansions),
           gcNumber(cx->runtime->gcNumber)
     {}
 
--- a/js/src/methodjit/PolyIC.cpp
+++ b/js/src/methodjit/PolyIC.cpp
@@ -97,17 +97,17 @@ class PICLinker : public LinkerHelper
 class PICStubCompiler : public BaseCompiler
 {
   protected:
     const char *type;
     VMFrame &f;
     JSScript *script;
     ic::PICInfo &pic;
     void *stub;
-    uint32_t gcNumber;
+    uint64_t gcNumber;
 
   public:
     bool canCallHook;
 
     PICStubCompiler(const char *type, VMFrame &f, JSScript *script, ic::PICInfo &pic, void *stub)
       : BaseCompiler(f.cx), type(type), f(f), script(script), pic(pic), stub(stub),
         gcNumber(f.cx->runtime->gcNumber), canCallHook(pic.canCallHook)
     { }
--- a/js/src/methodjit/StubCalls.cpp
+++ b/js/src/methodjit/StubCalls.cpp
@@ -873,17 +873,17 @@ stubs::DebuggerStatement(VMFrame &f, jsb
             break;
         }
     }
 }
 
 void JS_FASTCALL
 stubs::Interrupt(VMFrame &f, jsbytecode *pc)
 {
-    gc::VerifyBarriers(f.cx);
+    gc::MaybeVerifyBarriers(f.cx);
 
     if (!js_HandleExecutionInterrupt(f.cx))
         THROW();
 }
 
 void JS_FASTCALL
 stubs::RecompileForInline(VMFrame &f)
 {
--- a/js/src/shell/js.cpp
+++ b/js/src/shell/js.cpp
@@ -1281,16 +1281,17 @@ GC(JSContext *cx, uintN argc, jsval *vp)
 static const struct ParamPair {
     const char      *name;
     JSGCParamKey    param;
 } paramMap[] = {
     {"maxBytes",            JSGC_MAX_BYTES },
     {"maxMallocBytes",      JSGC_MAX_MALLOC_BYTES},
     {"gcBytes",             JSGC_BYTES},
     {"gcNumber",            JSGC_NUMBER},
+    {"sliceTimeBudget",     JSGC_SLICE_TIME_BUDGET}
 };
 
 static JSBool
 GCParameter(JSContext *cx, uintN argc, jsval *vp)
 {
     JSString *str;
     if (argc == 0) {
         str = JS_ValueToString(cx, JSVAL_VOID);
@@ -1422,16 +1423,45 @@ ScheduleGC(JSContext *cx, uintN argc, js
         return JS_FALSE;
     if (argc == 2)
         compartment = js_ValueToBoolean(vp[3]);
 
     JS_ScheduleGC(cx, count, compartment);
     *vp = JSVAL_VOID;
     return JS_TRUE;
 }
+
+static JSBool
+VerifyBarriers(JSContext *cx, uintN argc, jsval *vp)
+{
+    gc::VerifyBarriers(cx);
+    *vp = JSVAL_VOID;
+    return JS_TRUE;
+}
+
+static JSBool
+GCSlice(JSContext *cx, uintN argc, jsval *vp)
+{
+    uint32_t budget;
+
+    if (argc != 1) {
+        JS_ReportErrorNumber(cx, my_GetErrorMessage, NULL,
+                             (argc < 1)
+                             ? JSSMSG_NOT_ENOUGH_ARGS
+                             : JSSMSG_TOO_MANY_ARGS,
+                             "gcslice");
+        return JS_FALSE;
+    }
+    if (!JS_ValueToECMAUint32(cx, vp[2], &budget))
+        return JS_FALSE;
+
+    GCDebugSlice(cx, budget);
+    *vp = JSVAL_VOID;
+    return JS_TRUE;
+}
 #endif /* JS_GC_ZEAL */
 
 typedef struct JSCountHeapNode JSCountHeapNode;
 
 struct JSCountHeapNode {
     void                *thing;
     JSGCTraceKind       kind;
     JSCountHeapNode     *next;
@@ -1468,17 +1498,17 @@ CountHeapNotify(JSTracer *trc, void **th
     if (entry->key)
         return;
     entry->key = thing;
 
     node = countTracer->recycleList;
     if (node) {
         countTracer->recycleList = node->next;
     } else {
-        node = (JSCountHeapNode *) JS_malloc(trc->context, sizeof *node);
+        node = (JSCountHeapNode *) js_malloc(sizeof *node);
         if (!node) {
             countTracer->ok = JS_FALSE;
             return;
         }
     }
     node->thing = thing;
     node->kind = kind;
     node->next = countTracer->traceList;
@@ -1570,17 +1600,17 @@ CountHeap(JSContext *cx, uintN argc, jsv
             counter++;
         countTracer.traceList = node->next;
         node->next = countTracer.recycleList;
         countTracer.recycleList = node;
         JS_TraceChildren(&countTracer.base, node->thing, node->kind);
     }
     while ((node = countTracer.recycleList) != NULL) {
         countTracer.recycleList = node->next;
-        JS_free(cx, node);
+        js_free(node);
     }
     JS_DHashTableFinish(&countTracer.visited);
 
     return countTracer.ok && JS_NewNumberValue(cx, (jsdouble) counter, vp);
 }
 
 static jsrefcount finalizeCount = 0;
 
@@ -3996,16 +4026,18 @@ static JSFunctionSpec shell_functions[] 
     JS_FN("gc",             ::GC,           0,0),
     JS_FN("gcparam",        GCParameter,    2,0),
     JS_FN("countHeap",      CountHeap,      0,0),
     JS_FN("makeFinalizeObserver", MakeFinalizeObserver, 0,0),
     JS_FN("finalizeCount",  FinalizeCount,  0,0),
 #ifdef JS_GC_ZEAL
     JS_FN("gczeal",         GCZeal,         2,0),
     JS_FN("schedulegc",     ScheduleGC,     1,0),
+    JS_FN("verifybarriers", VerifyBarriers, 0,0),
+    JS_FN("gcslice",        GCSlice,        1,0),
 #endif
     JS_FN("internalConst",  InternalConst,  1,0),
     JS_FN("setDebug",       SetDebug,       1,0),
     JS_FN("setDebuggerHandler", SetDebuggerHandler, 1,0),
     JS_FN("setThrowHook",   SetThrowHook,   1,0),
     JS_FN("trap",           Trap,           3,0),
     JS_FN("untrap",         Untrap,         2,0),
     JS_FN("line2pc",        LineToPC,       0,0),
@@ -4109,16 +4141,18 @@ static const char *const shell_help_mess
 "finalizeCount()\n"
 "  return the current value of the finalization counter that is incremented\n"
 "  each time an object returned by the makeFinalizeObserver is finalized",
 #ifdef JS_GC_ZEAL
 "gczeal(level, [freq], [compartmentGC?])\n"
 "                         How zealous the garbage collector should be",
 "schedulegc(num, [compartmentGC?])\n"
 "                         Schedule a GC to happen after num allocations",
+"verifybarriers()         Start or end a run of the write barrier verifier",
+"gcslice(n)               Run an incremental GC slice that marks ~n objects",
 #endif
 "internalConst(name)\n"
 "  Query an internal constant for the engine. See InternalConst source for the\n"
 "  list of constant names",
 "setDebug(debug)          Set debug mode",
 "setDebuggerHandler(f)    Set handler for debugger keyword to f",
 "setThrowHook(f)          Set throw hook to f",
 "trap([fun, [pc,]] exp)   Trap bytecode execution",
@@ -5452,17 +5486,17 @@ main(int argc, char **argv, char **envp)
 
     if (!InitWatchdog(rt))
         return 1;
 
     cx = NewContext(rt);
     if (!cx)
         return 1;
 
-    JS_SetGCParameter(rt, JSGC_MODE, JSGC_MODE_COMPARTMENT);
+    JS_SetGCParameter(rt, JSGC_MODE, JSGC_MODE_INCREMENTAL);
     JS_SetGCParameterForThread(cx, JSGC_MAX_CODE_CACHE_BYTES, 16 * 1024 * 1024);
 
     /* Must be done before creating the global object */
     if (op.getBoolOption('D'))
         JS_ToggleOptions(cx, JSOPTION_PCCOUNT);
 
     result = Shell(cx, &op, envp);
 
--- a/js/src/vm/Debugger.cpp
+++ b/js/src/vm/Debugger.cpp
@@ -1318,17 +1318,19 @@ Debugger::finalize(JSContext *cx, JSObje
         JS_ASSERT(cx->runtime->gcCurrentCompartment == dbg->object->compartment());
         for (GlobalObjectSet::Enum e(dbg->debuggees); !e.empty(); e.popFront())
             dbg->removeDebuggeeGlobal(cx, e.front(), NULL, &e);
     }
     cx->delete_(dbg);
 }
 
 Class Debugger::jsclass = {
-    "Debugger", JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUG_COUNT),
+    "Debugger",
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
+    JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUG_COUNT),
     JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
     JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, Debugger::finalize,
     NULL,                 /* reserved0   */
     NULL,                 /* checkAccess */
     NULL,                 /* call        */
     NULL,                 /* construct   */
     NULL,                 /* xdrObject   */
     NULL,                 /* hasInstance */
@@ -1849,17 +1851,19 @@ DebuggerScript_trace(JSTracer *trc, JSOb
         /* This comes from a private pointer, so no barrier needed. */
         if (JSScript *script = GetScriptReferent(obj))
             MarkScriptUnbarriered(trc, script, "Debugger.Script referent");
 
     }
 }
 
 Class DebuggerScript_class = {
-    "Script", JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGSCRIPT_COUNT),
+    "Script",
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
+    JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGSCRIPT_COUNT),
     JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
     JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, NULL,
     NULL,                 /* reserved0   */
     NULL,                 /* checkAccess */
     NULL,                 /* call        */
     NULL,                 /* construct   */
     NULL,                 /* xdrObject   */
     NULL,                 /* hasInstance */
@@ -2951,17 +2955,19 @@ DebuggerObject_trace(JSTracer *trc, JSOb
          * is okay.
          */
         if (JSObject *referent = (JSObject *) obj->getPrivate())
             MarkObjectUnbarriered(trc, referent, "Debugger.Object referent");
     }
 }
 
 Class DebuggerObject_class = {
-    "Object", JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGOBJECT_COUNT),
+    "Object",
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
+    JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGOBJECT_COUNT),
     JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
     JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, NULL,
     NULL,                 /* reserved0   */
     NULL,                 /* checkAccess */
     NULL,                 /* call        */
     NULL,                 /* construct   */
     NULL,                 /* xdrObject   */
     NULL,                 /* hasInstance */
@@ -3593,17 +3599,19 @@ DebuggerEnv_trace(JSTracer *trc, JSObjec
          * is okay.
          */
         if (Env *referent = (JSObject *) obj->getPrivate())
             MarkObjectUnbarriered(trc, referent, "Debugger.Environment referent");
     }
 }
 
 Class DebuggerEnv_class = {
-    "Environment", JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGENV_COUNT),
+    "Environment",
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
+    JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGENV_COUNT),
     JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
     JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, NULL,
     NULL,                 /* reserved0   */
     NULL,                 /* checkAccess */
     NULL,                 /* call        */
     NULL,                 /* construct   */
     NULL,                 /* xdrObject   */
     NULL,                 /* hasInstance */
--- a/js/src/vm/RegExpObject-inl.h
+++ b/js/src/vm/RegExpObject-inl.h
@@ -76,16 +76,24 @@ inline RegExpShared *
 RegExpObject::getShared(JSContext *cx)
 {
     if (RegExpShared *shared = maybeShared())
         return shared;
     return createShared(cx);
 }
 
 inline void
+RegExpObject::setShared(JSContext *cx, RegExpShared *shared)
+{
+    if (shared)
+        shared->prepareForUse(cx);
+    JSObject::setPrivate(shared);
+}
+
+inline void
 RegExpObject::setLastIndex(const Value &v)
 {
     setSlot(LAST_INDEX_SLOT, v);
 }
 
 inline void
 RegExpObject::setLastIndex(double d)
 {
@@ -143,11 +151,17 @@ inline RegExpShared *
 RegExpToShared(JSContext *cx, JSObject &obj)
 {
     JS_ASSERT(ObjectClassIs(obj, ESClass_RegExp, cx));
     if (obj.isRegExp())
         return obj.asRegExp().getShared(cx);
     return Proxy::regexp_toShared(cx, &obj);
 }
 
+inline void
+RegExpShared::prepareForUse(JSContext *cx)
+{
+    gcNumberWhenUsed = cx->runtime->gcNumber;
+}
+
 } /* namespace js */
 
 #endif
--- a/js/src/vm/RegExpObject.cpp
+++ b/js/src/vm/RegExpObject.cpp
@@ -57,58 +57,58 @@ JS_STATIC_ASSERT(MultilineFlag == JSREG_
 JS_STATIC_ASSERT(StickyFlag == JSREG_STICKY);
 
 /* RegExpObjectBuilder */
 
 RegExpObjectBuilder::RegExpObjectBuilder(JSContext *cx, RegExpObject *reobj)
   : cx(cx), reobj_(reobj)
 {
     if (reobj_)
-        reobj_->setPrivate(NULL);
+        reobj_->setShared(cx, NULL);
 }
 
 bool
 RegExpObjectBuilder::getOrCreate()
 {
     if (reobj_)
         return true;
 
     JSObject *obj = NewBuiltinClassInstance(cx, &RegExpClass);
     if (!obj)
         return false;
-    obj->setPrivate(NULL);
+    obj->initPrivate(NULL);
 
     reobj_ = &obj->asRegExp();
     return true;
 }
 
 bool
 RegExpObjectBuilder::getOrCreateClone(RegExpObject *proto)
 {
     JS_ASSERT(!reobj_);
 
     JSObject *clone = NewObjectWithGivenProto(cx, &RegExpClass, proto, proto->getParent());
     if (!clone)
         return false;
-    clone->setPrivate(NULL);
+    clone->initPrivate(NULL);
 
     reobj_ = &clone->asRegExp();
     return true;
 }
 
 RegExpObject *
 RegExpObjectBuilder::build(JSAtom *source, RegExpShared &shared)
 {
     if (!getOrCreate())
         return NULL;
 
     if (!reobj_->init(cx, source, shared.getFlags()))
         return NULL;
 
-    reobj_->setPrivate(&shared);
+    reobj_->setShared(cx, &shared);
     return reobj_;
 }
 
 RegExpObject *
 RegExpObjectBuilder::build(JSAtom *source, RegExpFlag flags)
 {
     if (!getOrCreate())
         return NULL;
@@ -325,23 +325,28 @@ RegExpCode::execute(JSContext *cx, const
     return RegExpRunStatus_Success;
 }
 
 /* RegExpObject */
 
 static void
 regexp_trace(JSTracer *trc, JSObject *obj)
 {
-    if (trc->runtime->gcRunning)
+     /*
+      * We have to check both conditions, since:
+      *   1. During TraceRuntime, gcRunning is set
+      *   2. When a write barrier executes, IS_GC_MARKING_TRACER is true.
+      */
+    if (trc->runtime->gcRunning && IS_GC_MARKING_TRACER(trc))
         obj->setPrivate(NULL);
 }
 
 Class js::RegExpClass = {
     js_RegExp_str,
-    JSCLASS_HAS_PRIVATE |
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
     JSCLASS_HAS_RESERVED_SLOTS(RegExpObject::RESERVED_SLOTS) |
     JSCLASS_HAS_CACHED_PROTO(JSProto_RegExp),
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,        /* enumerate */
     JS_ResolveStub,
@@ -355,18 +360,18 @@ Class js::RegExpClass = {
     js_XDRRegExpObject,
 #else
     NULL
 #endif
     NULL,                    /* hasInstance */
     regexp_trace
 };
 
-RegExpShared::RegExpShared(RegExpFlag flags)
-  : parenCount(0), flags(flags), activeUseCount(0)
+RegExpShared::RegExpShared(JSRuntime *rt, RegExpFlag flags)
+  : parenCount(0), flags(flags), activeUseCount(0), gcNumberWhenUsed(rt->gcNumber)
 {}
 
 RegExpObject *
 RegExpObject::create(JSContext *cx, RegExpStatics *res, const jschar *chars, size_t length,
                      RegExpFlag flags, TokenStream *tokenStream)
 {
     RegExpFlag staticsFlags = res->getFlags();
     return createNoStatics(cx, chars, length, RegExpFlag(flags | staticsFlags), tokenStream);
@@ -397,17 +402,17 @@ RegExpObject::createNoStatics(JSContext 
 RegExpShared *
 RegExpObject::createShared(JSContext *cx)
 {
     JS_ASSERT(!maybeShared());
     RegExpShared *shared = cx->compartment->regExps.get(cx, getSource(), getFlags());
     if (!shared)
         return NULL;
 
-    setPrivate(shared);
+    setShared(cx, shared);
     return shared;
 }
 
 Shape *
 RegExpObject::assignInitialShape(JSContext *cx)
 {
     JS_ASSERT(isRegExp());
     JS_ASSERT(nativeEmpty());
@@ -611,38 +616,39 @@ RegExpCompartment::init(JSContext *cx)
         js_ReportOutOfMemory(cx);
         return false;
     }
 
     return true;
 }
 
 void
-RegExpCompartment::purge()
+RegExpCompartment::sweep(JSRuntime *rt)
 {
     for (Map::Enum e(map_); !e.empty(); e.popFront()) {
+        /* See the comment on RegExpShared lifetime in RegExpObject.h. */
         RegExpShared *shared = e.front().value;
-        if (shared->activeUseCount == 0) {
+        if (shared->activeUseCount == 0 && shared->gcNumberWhenUsed < rt->gcStartNumber) {
             Foreground::delete_(shared);
             e.removeFront();
         }
     }
 }
 
 inline RegExpShared *
 RegExpCompartment::get(JSContext *cx, JSAtom *keyAtom, JSAtom *source, RegExpFlag flags, Type type)
 {
-    DebugOnly<size_t> gcNumberBefore = cx->runtime->gcNumber;
+    DebugOnly<uint64_t> gcNumberBefore = cx->runtime->gcNumber;
 
     Key key(keyAtom, flags, type);
     Map::AddPtr p = map_.lookupForAdd(key);
     if (p)
         return p->value;
 
-    RegExpShared *shared = cx->runtime->new_<RegExpShared>(flags);
+    RegExpShared *shared = cx->runtime->new_<RegExpShared>(cx->runtime, flags);
     if (!shared || !shared->compile(cx, source))
         goto error;
 
     /*
      * The compilation path only mallocs so cannot GC. Thus, it is safe to add
      * the regexp directly.
      */
     JS_ASSERT(cx->runtime->gcNumber == gcNumberBefore);
--- a/js/src/vm/RegExpObject.h
+++ b/js/src/vm/RegExpObject.h
@@ -164,16 +164,17 @@ class RegExpObject : public JSObject
     bool ignoreCase() const { return getSlot(IGNORE_CASE_FLAG_SLOT).toBoolean(); }
     bool global() const     { return getSlot(GLOBAL_FLAG_SLOT).toBoolean(); }
     bool multiline() const  { return getSlot(MULTILINE_FLAG_SLOT).toBoolean(); }
     bool sticky() const     { return getSlot(STICKY_FLAG_SLOT).toBoolean(); }
 
     inline RegExpShared &shared() const;
     inline RegExpShared *maybeShared();
     inline RegExpShared *getShared(JSContext *cx);
+    inline void setShared(JSContext *cx, RegExpShared *shared);
 
   private:
     friend class RegExpObjectBuilder;
 
     /*
      * Compute the initial shape to associate with fresh RegExp objects,
      * encoding their initial properties. Return the shape after
      * changing this regular expression object's last property to it.
@@ -185,16 +186,19 @@ class RegExpObject : public JSObject
     /*
      * Precondition: the syntax for |source| has already been validated.
      * Side effect: sets the private field.
      */
     RegExpShared *createShared(JSContext *cx);
 
     RegExpObject() MOZ_DELETE;
     RegExpObject &operator=(const RegExpObject &reo) MOZ_DELETE;
+
+    /* Call setShared in preference to setPrivate. */
+    void setPrivate(void *priv) MOZ_DELETE;
 };
 
 class RegExpObjectBuilder
 {
     JSContext       *cx;
     RegExpObject    *reobj_;
 
     bool getOrCreate();
@@ -288,29 +292,49 @@ class RegExpCode
 
     RegExpRunStatus
     execute(JSContext *cx, const jschar *chars, size_t length, size_t start,
             int *output, size_t outputCount);
 };
 
 }  /* namespace detail */
 
-/* The compiled representation of a regexp. */
+/*
+ * A RegExpShared is the compiled representation of a regexp. A RegExpShared is
+ * pointed to by potentially multiple RegExpObjects. Additionally, C++ code may
+ * have pointers to RegExpShareds on the stack. The RegExpShareds are tracked in
+ * a RegExpCompartment hashtable, and most are destroyed on every GC.
+ *
+ * During a GC, the trace hook for RegExpObject clears any pointers to
+ * RegExpShareds so that there will be no dangling pointers when they are
+ * deleted. However, some RegExpShareds are not deleted:
+ *
+ *   1. Any RegExpShared with pointers from the C++ stack is not deleted.
+ *   2. Any RegExpShared that was installed in a RegExpObject during an
+ *      incremental GC is not deleted. This is because the RegExpObject may have
+ *      been traced through before the new RegExpShared was installed, in which
+ *      case deleting the RegExpShared would turn the RegExpObject's reference
+ *      into a dangling pointer
+ *
+ * The activeUseCount and gcNumberWhenUsed fields are used to track these two
+ * conditions.
+ */
 class RegExpShared
 {
     friend class RegExpCompartment;
 
     detail::RegExpCode code;
     uintN              parenCount;
     RegExpFlag         flags;
-    size_t             activeUseCount;
+    size_t             activeUseCount;   /* See comment above. */
+    uint64_t           gcNumberWhenUsed; /* See comment above. */
 
     bool compile(JSContext *cx, JSAtom *source);
 
-    RegExpShared(RegExpFlag flags);
+    RegExpShared(JSRuntime *rt, RegExpFlag flags);
     JS_DECLARE_ALLOCATION_FRIENDS_FOR_PRIVATE_CONSTRUCTOR;
 
   public:
     /*
      * Extend the lifetime of a given RegExpShared to at least the lifetime of
      * the Guard object. See Regular Expression comment at the top.
      */
     class Guard {
@@ -333,16 +357,19 @@ class RegExpShared
                 re_->activeUseCount--;
             }
         }
         bool initialized() const { return !!re_; }
         RegExpShared *operator->() { JS_ASSERT(initialized()); return re_; }
         RegExpShared &operator*() { JS_ASSERT(initialized()); return *re_; }
     };
 
+    /* Called when a RegExpShared is installed into a RegExpObject. */
+    inline void prepareForUse(JSContext *cx);
+
     /* Primary interface: run this regular expression on the given string. */
 
     RegExpRunStatus
     execute(JSContext *cx, const jschar *chars, size_t length, size_t *lastIndex,
             MatchPairs **output);
 
     /* Accessors */
 
@@ -383,17 +410,17 @@ class RegExpCompartment
 
     RegExpShared *get(JSContext *cx, JSAtom *key, JSAtom *source, RegExpFlag flags, Type type);
 
   public:
     RegExpCompartment(JSRuntime *rt);
     ~RegExpCompartment();
 
     bool init(JSContext *cx);
-    void purge();
+    void sweep(JSRuntime *rt);
 
     /* Return a regexp corresponding to the given (source, flags) pair. */
     RegExpShared *get(JSContext *cx, JSAtom *source, RegExpFlag flags);
 
     /* Like 'get', but compile 'maybeOpt' (if non-null). */
     RegExpShared *get(JSContext *cx, JSAtom *source, JSString *maybeOpt);
 
     /*
--- a/js/src/vm/RegExpStatics.cpp
+++ b/js/src/vm/RegExpStatics.cpp
@@ -66,17 +66,17 @@ resc_trace(JSTracer *trc, JSObject *obj)
     void *pdata = obj->getPrivate();
     JS_ASSERT(pdata);
     RegExpStatics *res = static_cast<RegExpStatics *>(pdata);
     res->mark(trc);
 }
 
 Class js::RegExpStaticsClass = {
     "RegExpStatics",
-    JSCLASS_HAS_PRIVATE,
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS,
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
     JS_ResolveStub,
     JS_ConvertStub,
     resc_finalize,
--- a/js/src/vm/Stack.cpp
+++ b/js/src/vm/Stack.cpp
@@ -527,16 +527,25 @@ StackSpace::mark(JSTracer *trc)
             pc = fp->prevpc(&site);
             JS_ASSERT_IF(fp->prev(), !site);
         }
         gc::MarkValueRootRange(trc, seg->slotsBegin(), slotsEnd, "vm_stack");
         nextSegEnd = (Value *)seg;
     }
 }
 
+void
+StackSpace::markActiveCompartments()
+{
+    for (StackSegment *seg = seg_; seg; seg = seg->prevInMemory()) {
+        for (StackFrame *fp = seg->maybefp(); (Value *)fp > (Value *)seg; fp = fp->prev())
+            MarkCompartmentActive(fp);
+    }
+}
+
 JS_FRIEND_API(bool)
 StackSpace::ensureSpaceSlow(JSContext *cx, MaybeReportError report, Value *from, ptrdiff_t nvals,
                             JSCompartment *dest) const
 {
     assertInvariants();
 
     /* See CX_COMPARTMENT comment. */
     if (dest == (JSCompartment *)CX_COMPARTMENT)
--- a/js/src/vm/Stack.h
+++ b/js/src/vm/Stack.h
@@ -1550,16 +1550,19 @@ class StackSpace
      */
     inline Value *getStackLimit(JSContext *cx, MaybeReportError report);
     bool tryBumpLimit(JSContext *cx, Value *from, uintN nvals, Value **limit);
 
     /* Called during GC: mark segments, frames, and slots under firstUnused. */
     void mark(JSTracer *trc);
     void markFrameSlots(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc);
 
+    /* Called during GC: sets active flag on compartments with active frames. */
+    void markActiveCompartments();
+
     /* We only report the committed size;  uncommitted size is uninteresting. */
     JS_FRIEND_API(size_t) sizeOfCommitted();
 };
 
 /*****************************************************************************/
 
 class ContextStack
 {
--- a/js/xpconnect/idl/nsIXPConnect.idl
+++ b/js/xpconnect/idl/nsIXPConnect.idl
@@ -393,17 +393,17 @@ interface nsIXPCFunctionThisTranslator :
 
 enum nsGCType {
     nsGCNormal,
     nsGCShrinking,
     nsGCIncremental
 };
 %}
 
-[uuid(686bb1d0-4711-11e1-b86c-0800200c9a66)]
+[uuid(e92bf5e0-494c-11e1-b86c-0800200c9a66)]
 interface nsIXPConnect : nsISupports
 {
 %{ C++
   NS_DEFINE_STATIC_CID_ACCESSOR(NS_XPCONNECT_CID)
 %}
 
     /**
      * Initializes classes on a global object that has already been created.
@@ -730,16 +730,22 @@ interface nsIXPConnect : nsISupports
     /**
      * Trigger a JS garbage collection.
      * Use a js::gcreason::Reason from jsfriendapi.h for the kind.
      * Use the nsGCType enum for the kind.
      */
     void GarbageCollect(in PRUint32 reason, in PRUint32 kind);
 
     /**
+     * Signals a good place to do an incremental GC slice, because the
+     * browser is drawing a frame.
+     */
+    void NotifyDidPaint();
+
+    /**
      * Define quick stubs on the given object, @a proto.
      *
      * @param cx
      *     A context.  Requires request.
      * @param proto
      *     The (newly created) prototype object for a DOM class.  The JS half
      *     of an XPCWrappedNativeProto.
      * @param flags
--- a/js/xpconnect/src/XPCInlines.h
+++ b/js/xpconnect/src/XPCInlines.h
@@ -599,17 +599,18 @@ inline
 void XPCWrappedNativeTearOff::SetJSObject(JSObject*  JSObj)
 {
         mJSObject = JSObj;
 }
 
 inline
 XPCWrappedNativeTearOff::~XPCWrappedNativeTearOff()
 {
-    NS_ASSERTION(!(GetInterface()||GetNative()||GetJSObjectPreserveColor()), "tearoff not empty in dtor");
+    NS_ASSERTION(!(GetInterface()||GetNative()||GetJSObjectPreserveColor()),
+                 "tearoff not empty in dtor");
 }
 
 /***************************************************************************/
 
 inline JSBool
 XPCWrappedNative::HasInterfaceNoQI(const nsIID& iid)
 {
     return nsnull != GetSet()->FindInterfaceWithIID(iid);
--- a/js/xpconnect/src/XPCJSRuntime.cpp
+++ b/js/xpconnect/src/XPCJSRuntime.cpp
@@ -906,16 +906,18 @@ JSBool XPCJSRuntime::GCCallback(JSContex
 #ifdef XPC_TRACK_DEFERRED_RELEASES
             printf("XPC - Begin deferred Release of %d nsISupports pointers\n",
                    self->mNativesToReleaseArray.Length());
 #endif
             DoDeferredRelease(self->mNativesToReleaseArray);
 #ifdef XPC_TRACK_DEFERRED_RELEASES
             printf("XPC - End deferred Releases\n");
 #endif
+
+            self->GetXPConnect()->ClearGCBeforeCC();
             break;
         }
         default:
             break;
     }
 
     nsTArray<JSGCCallback> callbacks(self->extraGCCallbacks);
     for (PRUint32 i = 0; i < callbacks.Length(); ++i) {
@@ -1885,16 +1887,28 @@ AccumulateTelemetryCallback(int id, uint
         Telemetry::Accumulate(Telemetry::GC_MS, sample);
         break;
       case JS_TELEMETRY_GC_MARK_MS:
         Telemetry::Accumulate(Telemetry::GC_MARK_MS, sample);
         break;
       case JS_TELEMETRY_GC_SWEEP_MS:
         Telemetry::Accumulate(Telemetry::GC_SWEEP_MS, sample);
         break;
+      case JS_TELEMETRY_GC_SLICE_MS:
+        Telemetry::Accumulate(Telemetry::GC_SLICE_MS, sample);
+        break;
+      case JS_TELEMETRY_GC_MMU_50:
+        Telemetry::Accumulate(Telemetry::GC_MMU_50, sample);
+        break;
+      case JS_TELEMETRY_GC_RESET:
+        Telemetry::Accumulate(Telemetry::GC_RESET, sample);
+        break;
+      case JS_TELEMETRY_GC_INCREMENTAL_DISABLED:
+        Telemetry::Accumulate(Telemetry::GC_INCREMENTAL_DISABLED, sample);
+        break;
     }
 }
 
 bool XPCJSRuntime::gNewDOMBindingsEnabled;
 
 bool PreserveWrapper(JSContext *cx, JSObject *obj)
 {
     JS_ASSERT(IS_WRAPPER_CLASS(js::GetObjectClass(obj)));
--- a/js/xpconnect/src/nsXPConnect.cpp
+++ b/js/xpconnect/src/nsXPConnect.cpp
@@ -401,34 +401,34 @@ nsXPConnect::Collect(PRUint32 reason, PR
     // will already be marked by the JS GC and will thus be colored black
     // themselves. Any C++ objects they hold will have a missing (untraversed)
     // edge from the JS object to the C++ object and so it will be marked black
     // too. This decreases the number of objects that the cycle collector has to
     // deal with.
     // To improve debugging, if DEBUG_CC is defined all JS objects are
     // traversed.
 
-    mNeedGCBeforeCC = false;
-
     XPCCallContext ccx(NATIVE_CALLER);
     if (!ccx.IsValid())
         return;
 
     JSContext *cx = ccx.GetJSContext();
 
     // We want to scan the current thread for GC roots only if it was in a
     // request prior to the Collect call to avoid false positives during the
     // cycle collection. So to compensate for JS_BeginRequest in
     // XPCCallContext::Init we disable the conservative scanner if that call
     // has started the request on this thread.
     js::AutoSkipConservativeScan ascs(cx);
     MOZ_ASSERT(reason < js::gcreason::NUM_REASONS);
     js::gcreason::Reason gcreason = (js::gcreason::Reason)reason;
     if (kind == nsGCShrinking) {
         js::ShrinkingGC(cx, gcreason);
+    } else if (kind == nsGCIncremental) {
+        js::IncrementalGC(cx, gcreason);
     } else {
         MOZ_ASSERT(kind == nsGCNormal);
         js::GCForReason(cx, gcreason);
     }
 }
 
 NS_IMETHODIMP
 nsXPConnect::GarbageCollect(PRUint32 reason, PRUint32 kind)
@@ -2820,16 +2820,33 @@ nsXPConnect::GetTelemetryValue(JSContext
     v = DOUBLE_TO_JSVAL(i);
     if (!JS_DefineProperty(cx, obj, "customIter", v, NULL, NULL, attrs))
         return NS_ERROR_OUT_OF_MEMORY;
 
     *rval = OBJECT_TO_JSVAL(obj);
     return NS_OK;
 }
 
+NS_IMETHODIMP
+nsXPConnect::NotifyDidPaint()
+{
+    JSRuntime *rt = mRuntime->GetJSRuntime();
+    if (!js::WantGCSlice(rt))
+        return NS_OK;
+
+    XPCCallContext ccx(NATIVE_CALLER);
+    if (!ccx.IsValid())
+        return UnexpectedFailure(NS_ERROR_FAILURE);
+
+    JSContext *cx = ccx.GetJSContext();
+
+    js::NotifyDidPaint(cx);
+    return NS_OK;
+}
+
 /* These are here to be callable from a debugger */
 JS_BEGIN_EXTERN_C
 JS_EXPORT_API(void) DumpJSStack()
 {
     nsresult rv;
     nsCOMPtr<nsIXPConnect> xpc(do_GetService(nsIXPConnect::GetCID(), &rv));
     if (NS_SUCCEEDED(rv) && xpc)
         xpc->DebugDumpJSStack(true, true, false);
--- a/js/xpconnect/src/xpcprivate.h
+++ b/js/xpconnect/src/xpcprivate.h
@@ -313,17 +313,18 @@ typedef nsDataHashtable<xpc::PtrAndPrinc
         result = (char*) nsMemory::Clone(src,                                 \
                                          sizeof(char)*(strlen(src)+1));       \
     else                                                                      \
         result = nsnull;                                                      \
     *dest = result;                                                           \
     return (result || !src) ? NS_OK : NS_ERROR_OUT_OF_MEMORY
 
 
-#define WRAPPER_SLOTS (JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(1))
+#define WRAPPER_SLOTS (JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | \
+                       JSCLASS_HAS_RESERVED_SLOTS(1))
 
 #define INVALID_OBJECT ((JSObject *)1)
 
 /***************************************************************************/
 // Auto locking support class...
 
 // We PROMISE to never screw this up.
 #ifdef _MSC_VER
@@ -515,16 +516,17 @@ public:
     // Called by module code on dll shutdown.
     static void ReleaseXPConnectSingleton();
 
     virtual ~nsXPConnect();
 
     JSBool IsShuttingDown() const {return mShuttingDown;}
 
     void EnsureGCBeforeCC() { mNeedGCBeforeCC = true; }
+    void ClearGCBeforeCC() { mNeedGCBeforeCC = false; }
 
     nsresult GetInfoForIID(const nsIID * aIID, nsIInterfaceInfo** info);
     nsresult GetInfoForName(const char * name, nsIInterfaceInfo** info);
 
     // nsCycleCollectionParticipant
     NS_IMETHOD Root(void *p);
     NS_IMETHOD Unlink(void *p);
     NS_IMETHOD Unroot(void *p);
--- a/js/xpconnect/src/xpcpublic.h
+++ b/js/xpconnect/src/xpcpublic.h
@@ -70,17 +70,18 @@ xpc_CreateGlobalObject(JSContext *cx, JS
 
 nsresult
 xpc_CreateMTGlobalObject(JSContext *cx, JSClass *clasp,
                          nsISupports *ptr, JSObject **global,
                          JSCompartment **compartment);
 
 #define XPCONNECT_GLOBAL_FLAGS                                                \
     JSCLASS_XPCONNECT_GLOBAL | JSCLASS_HAS_PRIVATE |                          \
-    JSCLASS_PRIVATE_IS_NSISUPPORTS | JSCLASS_GLOBAL_FLAGS_WITH_SLOTS(1)
+    JSCLASS_PRIVATE_IS_NSISUPPORTS | JSCLASS_IMPLEMENTS_BARRIERS |            \
+    JSCLASS_GLOBAL_FLAGS_WITH_SLOTS(1)
 
 void
 TraceXPCGlobal(JSTracer *trc, JSObject *obj);
 
 // XXX where should this live?
 NS_EXPORT_(void)
 xpc_LocalizeContext(JSContext *cx);
 
@@ -177,18 +178,22 @@ xpc_GCThingIsGrayCCThing(void *thing);
 extern void
 xpc_UnmarkGrayObjectRecursive(JSObject* obj);
 
 // Remove the gray color from the given JSObject and any other objects that can
 // be reached through it.
 inline void
 xpc_UnmarkGrayObject(JSObject *obj)
 {
-    if (obj && xpc_IsGrayGCThing(obj))
-        xpc_UnmarkGrayObjectRecursive(obj);
+    if (obj) {
+        if (xpc_IsGrayGCThing(obj))
+            xpc_UnmarkGrayObjectRecursive(obj);
+        else if (js::IsIncrementalBarrierNeededOnObject(obj))
+            js::IncrementalReferenceBarrier(obj);
+    }
 }
 
 // If aVariant is an XPCVariant, this marks the object to be in aGeneration.
 // This also unmarks the gray JSObject.
 extern void
 xpc_MarkInCCGeneration(nsISupports* aVariant, PRUint32 aGeneration);
 
 // Unmarks aWrappedJS's JSObject.
--- a/layout/base/nsPresShell.cpp
+++ b/layout/base/nsPresShell.cpp
@@ -5423,16 +5423,34 @@ PresShell::ProcessSynthMouseMoveEvent(bo
     shell->DispatchSynthMouseMove(&event, !aFromScroll);
   }
 
   if (!aFromScroll) {
     mSynthMouseMoveEvent.Forget();
   }
 }
 
+class nsAutoNotifyDidPaint
+{
+public:
+  nsAutoNotifyDidPaint(bool aWillSendDidPaint)
+    : mWillSendDidPaint(aWillSendDidPaint)
+  {
+  }
+  ~nsAutoNotifyDidPaint()
+  {
+    if (!mWillSendDidPaint && nsContentUtils::XPConnect()) {
+      nsContentUtils::XPConnect()->NotifyDidPaint();
+    }
+  }
+
+private:
+  bool mWillSendDidPaint;
+};
+
 void
 PresShell::Paint(nsIView*           aViewToPaint,
                  nsIWidget*         aWidgetToPaint,
                  const nsRegion&    aDirtyRegion,
                  const nsIntRegion& aIntDirtyRegion,
                  bool               aWillSendDidPaint)
 {
 #ifdef NS_FUNCTION_TIMER
@@ -5446,16 +5464,18 @@ PresShell::Paint(nsIView*           aVie
                            NSCoordToFloat(bounds__.YMost()));
 #endif
 
   SAMPLE_LABEL("Paint", "PresShell::Paint");
   NS_ASSERTION(!mIsDestroying, "painting a destroyed PresShell");
   NS_ASSERTION(aViewToPaint, "null view");
   NS_ASSERTION(aWidgetToPaint, "Can't paint without a widget");
 
+  nsAutoNotifyDidPaint notifyDidPaint(aWillSendDidPaint);
+
   nsPresContext* presContext = GetPresContext();
   AUTO_LAYOUT_PHASE_ENTRY_POINT(presContext, Paint);
 
   nsIFrame* frame = aViewToPaint->GetFrame();
 
   bool isRetainingManager;
   LayerManager* layerManager =
     aWidgetToPaint->GetLayerManager(&isRetainingManager);
@@ -7216,16 +7236,20 @@ PresShell::DidPaint()
   NS_ASSERTION(mPresContext->IsRoot(), "Should only call DidPaint on root presshells");
 
   nsRootPresContext* rootPresContext = mPresContext->GetRootPresContext();
   // This should only be called on root presshells, but maybe if a document
   // tree is torn down we might not be a root presshell...
   if (rootPresContext == mPresContext) {
     rootPresContext->UpdatePluginGeometry();
   }
+
+  if (nsContentUtils::XPConnect()) {
+    nsContentUtils::XPConnect()->NotifyDidPaint();
+  }
 }
 
 bool
 PresShell::IsVisible()
 {
   if (!mViewManager)
     return false;
 
--- a/modules/libpref/src/init/all.js
+++ b/modules/libpref/src/init/all.js
@@ -651,16 +651,18 @@ pref("javascript.options.jit_hardening",
 pref("javascript.options.typeinference", true);
 // This preference limits the memory usage of javascript.
 // If you want to change these values for your device,
 // please find Bug 417052 comment 17 and Bug 456721
 // Comment 32 and Bug 613551.
 pref("javascript.options.mem.high_water_mark", 128);
 pref("javascript.options.mem.max", -1);
 pref("javascript.options.mem.gc_per_compartment", true);
+pref("javascript.options.mem.gc_incremental", true);
+pref("javascript.options.mem.gc_incremental_slice_ms", 10);
 pref("javascript.options.mem.log", false);
 pref("javascript.options.gc_on_memory_pressure", true);
 
 // advanced prefs
 pref("advanced.mailftp",                    false);
 pref("image.animation_mode",                "normal");
 
 // Same-origin policy for file URIs, "false" is traditional
--- a/toolkit/components/telemetry/TelemetryHistograms.h
+++ b/toolkit/components/telemetry/TelemetryHistograms.h
@@ -76,16 +76,20 @@ HISTOGRAM(FORGET_SKIPPABLE_MAX, 1, 10000
 /**
  * GC telemetry
  */
 HISTOGRAM(GC_REASON, 1, 20, 20, LINEAR, "Reason (enum value) for initiating a GC")
 HISTOGRAM_BOOLEAN(GC_IS_COMPARTMENTAL, "Is it a compartmental GC?")
 HISTOGRAM(GC_MS, 1, 10000, 50, EXPONENTIAL, "Time spent running JS GC (ms)")
 HISTOGRAM(GC_MARK_MS, 1, 10000, 50, EXPONENTIAL, "Time spent running JS GC mark phase (ms)")
 HISTOGRAM(GC_SWEEP_MS, 1, 10000, 50, EXPONENTIAL, "Time spent running JS GC sweep phase (ms)")
+HISTOGRAM(GC_SLICE_MS, 1, 10000, 50, EXPONENTIAL, "Time spent running a JS GC slice (ms)")
+HISTOGRAM(GC_MMU_50, 1, 100, 20, LINEAR, "Minimum percentage of time spent outside GC over any 50ms window")
+HISTOGRAM_BOOLEAN(GC_RESET, "Was an incremental GC canceled?")
+HISTOGRAM_BOOLEAN(GC_INCREMENTAL_DISABLED, "Is incremental GC permanently disabled?")
 
 HISTOGRAM(TELEMETRY_PING, 1, 3000, 10, EXPONENTIAL, "Time taken to submit telemetry info (ms)")
 HISTOGRAM_BOOLEAN(TELEMETRY_SUCCESS,  "Successful telemetry submission")
 HISTOGRAM(MEMORY_JS_COMPARTMENTS_SYSTEM, 1, 1000, 50, EXPONENTIAL, "Total JavaScript compartments used for add-ons and internals.")
 HISTOGRAM(MEMORY_JS_COMPARTMENTS_USER, 1, 1000, 50, EXPONENTIAL, "Total JavaScript compartments used for web pages")
 HISTOGRAM(MEMORY_JS_GC_HEAP, 1024, 512 * 1024, 50, EXPONENTIAL, "Memory used by the garbage-collected JavaScript heap (KB)")
 HISTOGRAM(MEMORY_RESIDENT, 32 * 1024, 1024 * 1024, 50, EXPONENTIAL, "Resident memory size (KB)")
 HISTOGRAM(MEMORY_STORAGE_SQLITE, 1024, 512 * 1024, 50, EXPONENTIAL, "Memory used by SQLite (KB)")
--- a/toolkit/content/aboutSupport.js
+++ b/toolkit/content/aboutSupport.js
@@ -111,16 +111,17 @@ window.onload = function () {
   } catch (e) {
   }
   document.getElementById("version-box").textContent = version;
 
   // Update the other sections.
   populatePreferencesSection();
   populateExtensionsSection();
   populateGraphicsSection();
+  populateJavaScriptSection();
 }
 
 function populateExtensionsSection() {
   AddonManager.getAddonsByTypes(["extension"], function(extensions) {
     extensions.sort(function(a,b) {
       if (a.isActive != b.isActive)
         return b.isActive ? 1 : -1;
       let lc = a.name.localeCompare(b.name);
@@ -377,16 +378,23 @@ function populateGraphicsSection() {
   appendChildren(graphics_tbody, [
     createParentElement("tr", [
       createHeader(bundle.GetStringFromName("acceleratedWindows")),
       createElement("td", msg),
     ])
   ]);
 }
 
+function populateJavaScriptSection() {
+  let enabled = window.QueryInterface(Ci.nsIInterfaceRequestor)
+        .getInterface(Ci.nsIDOMWindowUtils)
+        .isIncrementalGCEnabled();
+  document.getElementById("javascript-incremental-gc").textContent = enabled ? "1" : "0";
+}
+
 function getPrefValue(aName) {
   let value = "";
   let type = Services.prefs.getPrefType(aName);
   switch (type) {
     case Ci.nsIPrefBranch.PREF_STRING:
       value = Services.prefs.getComplexValue(aName, Ci.nsISupportsString).data;
       break;
     case Ci.nsIPrefBranch.PREF_BOOL:
--- a/toolkit/content/aboutSupport.xhtml
+++ b/toolkit/content/aboutSupport.xhtml
@@ -238,13 +238,31 @@
 
         <tbody id="graphics-info-properties">
         </tbody>
 
         <tbody id="graphics-failures-tbody">
         </tbody>
       </table>
 
+      <!-- - - - - - - - - - - - - - - - - - - - - -->
+      <h2 class="major-section">
+        &aboutSupport.jsTitle;
+      </h2>
+
+      <table>
+        <tbody>
+          <tr>
+            <th class="column">
+              &aboutSupport.jsIncrementalGC;
+            </th>
+
+            <td id="javascript-incremental-gc">
+            </td>
+          </tr>
+	</tbody>
+      </table>
+
     </div>
 
   </body>
 
 </html>
--- a/toolkit/locales/en-US/chrome/global/aboutSupport.dtd
+++ b/toolkit/locales/en-US/chrome/global/aboutSupport.dtd
@@ -39,12 +39,15 @@ variant of aboutSupport.showDir.label. -
 <!ENTITY aboutSupport.showWin.label "Show Folder">
 
 <!ENTITY aboutSupport.modifiedKeyPrefsTitle "Important Modified Preferences">
 <!ENTITY aboutSupport.modifiedPrefsName "Name">
 <!ENTITY aboutSupport.modifiedPrefsValue "Value">
 
 <!ENTITY aboutSupport.graphicsTitle "Graphics">
 
+<!ENTITY aboutSupport.jsTitle "JavaScript">
+<!ENTITY aboutSupport.jsIncrementalGC "Incremental GC">
+
 <!ENTITY aboutSupport.installationHistoryTitle "Installation History">
 <!ENTITY aboutSupport.updateHistoryTitle "Update History">
 
 <!ENTITY aboutSupport.copyToClipboard.label "Copy all to clipboard">