Merge last PGO-green changeset of mozilla-inbound to mozilla-central
authorEd Morley <bmo@edmorley.co.uk>
Sat, 18 Feb 2012 11:16:37 +0000
changeset 87152 20478b6732123ec2ace4d3568c4ed8221472c0b2
parent 87138 550779e6bab444b48dac408c6f361ab503744472 (current diff)
parent 87151 19d7edbf60bc9816ec1c094298d8dfa81872824e (diff)
child 87153 eb82a1f486718666b18b78433ca223bb8fcb5585
child 87163 d285af1007a9ce472a47284bec3d24d67d361142
push id22083
push userbmo@edmorley.co.uk
push dateSat, 18 Feb 2012 11:19:19 +0000
treeherdermozilla-central@20478b673212 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
milestone13.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Merge last PGO-green changeset of mozilla-inbound to mozilla-central
browser/base/content/tabbrowser.xml
--- a/browser/base/content/tabbrowser.xml
+++ b/browser/base/content/tabbrowser.xml
@@ -3546,22 +3546,24 @@
 
           this.tabbrowser.swapBrowsersAndCloseOther(newTab, draggedTab);
 
           // We need to select the tab after we've done
           // swapBrowsersAndCloseOther, so that the updateCurrentBrowser
           // it triggers will correctly update our URL bar.
           this.tabbrowser.selectedTab = newTab;
         } else {
-          let url = browserDragAndDrop.drop(event, { });
+          // Pass true to disallow dropping javascript: or data: urls
+          let url;
+          try {
+            url = browserDragAndDrop.drop(event, { }, true);
+          } catch (ex) {}
 
           // valid urls don't contain spaces ' '; if we have a space it isn't a valid url.
-          // Also disallow dropping javascript: or data: urls--bail out
-          if (!url || !url.length || url.indexOf(" ", 0) != -1 ||
-              /^\s*(javascript|data):/.test(url))
+          if (!url || url.indexOf(" ") != -1)
             return;
 
           let bgLoad = Services.prefs.getBoolPref("browser.tabs.loadInBackground");
 
           if (event.shiftKey)
             bgLoad = !bgLoad;
 
           let tab = this._getDragTargetTab(event);
--- a/browser/base/content/test/Makefile.in
+++ b/browser/base/content/test/Makefile.in
@@ -263,16 +263,17 @@ endif
                  test_bug628179.html \
                  browser_wyciwyg_urlbarCopying.js \
                  test_wyciwyg_copying.html \
                  authenticate.sjs \
                  browser_minimize.js \
                  browser_aboutSyncProgress.js \
                  browser_middleMouse_inherit.js \
                  redirect_bug623155.sjs \
+                 browser_tabDrop.js \
                  $(NULL)
 
 ifneq (cocoa,$(MOZ_WIDGET_TOOLKIT))
 _BROWSER_FILES += \
 		browser_bug462289.js \
 		$(NULL)
 else
 _BROWSER_FILES += \
new file mode 100644
--- /dev/null
+++ b/browser/base/content/test/browser_tabDrop.js
@@ -0,0 +1,71 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+function test() {
+  waitForExplicitFinish();
+
+  let newTab = gBrowser.selectedTab = gBrowser.addTab("about:blank", {skipAnimation: true});
+  registerCleanupFunction(function () {
+    gBrowser.removeTab(newTab);
+  });
+
+  let scriptLoader = Cc["@mozilla.org/moz/jssubscript-loader;1"].
+                     getService(Ci.mozIJSSubScriptLoader);
+  let chromeUtils = {};
+  scriptLoader.loadSubScript("chrome://mochikit/content/tests/SimpleTest/ChromeUtils.js", chromeUtils);
+
+  let tabContainer = gBrowser.tabContainer;
+  var receivedDropCount = 0;
+  function dropListener() {
+    receivedDropCount++;
+    if (receivedDropCount == triggeredDropCount) {
+      is(openedTabs, validDropCount, "correct number of tabs were opened");
+      executeSoon(finish);
+    }
+  }
+  tabContainer.addEventListener("drop", dropListener, false);
+  registerCleanupFunction(function () {
+    tabContainer.removeEventListener("drop", dropListener, false);
+  });
+
+  var openedTabs = 0;
+  function tabOpenListener(e) {
+    openedTabs++;
+    let tab = e.target;
+    executeSoon(function () {
+      gBrowser.removeTab(tab);
+    });
+  }
+
+  tabContainer.addEventListener("TabOpen", tabOpenListener, false);
+  registerCleanupFunction(function () {
+    tabContainer.removeEventListener("TabOpen", tabOpenListener, false);
+  });
+
+  var triggeredDropCount = 0;
+  var validDropCount = 0;
+  function drop(text, valid) {
+    triggeredDropCount++;
+    if (valid)
+      validDropCount++;
+    executeSoon(function () {
+      // A drop type of "link" onto an existing tab would normally trigger a
+      // load in that same tab, but tabbrowser code in _getDragTargetTab treats
+      // drops on the outer edges of a tab differently (loading a new tab
+      // instead). The events created by synthesizeDrop have all of their
+      // coordinates set to 0 (screenX/screenY), so they're treated as drops
+      // on the outer edge of the tab, thus they open new tabs.
+      chromeUtils.synthesizeDrop(newTab, newTab, [[{type: "text/plain", data: text}]], "link", window, EventUtils);
+    });
+  }
+
+  // Begin and end with valid drops to make sure we wait for all drops before
+  // ending the test
+  drop("mochi.test/first", true);
+  drop("javascript:'bad'");
+  drop("jAvascript:'bad'");
+  drop("space bad");
+  drop("mochi.test/second", true);
+  drop("data:text/html,bad");
+  drop("mochi.test/third", true);
+}
--- a/content/media/test/Makefile.in
+++ b/content/media/test/Makefile.in
@@ -104,17 +104,16 @@ include $(topsrcdir)/config/rules.mk
 		test_bug448534.html \
 		test_bug463162.xhtml \
 		test_bug465498.html \
 		test_bug493187.html \
 		test_bug495145.html \
 		test_bug495300.html \
 		test_bug686942.html \
 		test_can_play_type.html \
-		test_closing_connections.html \
 		test_constants.html \
 		test_controls.html \
 		test_currentTime.html \
 		test_decode_error.html \
 		test_decoder_disable.html \
 		test_defaultMuted.html \
 		test_delay_load.html \
 		test_error_on_404.html \
@@ -171,16 +170,18 @@ endif
 # Bug 492821:
 #   test_videoDocumentTitle.html
 # Bug 493692:
 #   test_preload_suspend.html
 # Bug 567954 and Bug 574586:
 #   test_mixed_principals.html
 # Disabled since we don't play Wave files standalone, for now
 #		test_audioDocumentTitle.html
+# Bug 634564:
+#		test_closing_connections.html \
 
 # sample files
 _TEST_FILES += \
 		320x240.ogv \
 		448636.ogv \
 		audio-overhang.ogg \
 		audio-gaps.ogg \
 		beta-phrasebook.ogg \
--- a/docshell/base/nsDocShell.cpp
+++ b/docshell/base/nsDocShell.cpp
@@ -4114,19 +4114,17 @@ nsDocShell::DisplayLoadError(nsresult aE
             error.get(),
             strs, formatStrCount, getter_Copies(str));
         NS_ENSURE_SUCCESS(rv, rv);
         messageStr.Assign(str.get());
     }
 
     // Display the error as a page or an alert prompt
     NS_ENSURE_FALSE(messageStr.IsEmpty(), NS_ERROR_FAILURE);
-    // Note: For now, display an alert instead of an error page if we have no
-    // URI object. Missing URI objects are handled badly by session history.
-    if (mUseErrorPages && aURI) {
+    if (mUseErrorPages) {
         // Display an error page
         LoadErrorPage(aURI, aURL, errorPage.get(), error.get(),
                       messageStr.get(), cssClass.get(), aFailedChannel);
     } 
     else
     {
         // The prompter reqires that our private window has a document (or it
         // asserts). Satisfy that assertion now since GetDocument will force
@@ -4185,16 +4183,20 @@ nsDocShell::LoadErrorPage(nsIURI *aURI, 
     if (aURI)
     {
         nsresult rv = aURI->GetSpec(url);
         rv |= aURI->GetOriginCharset(charset);
         NS_ENSURE_SUCCESS(rv, rv);
     }
     else if (aURL)
     {
+        // We need a URI object to store a session history entry, so make up a URI
+        nsresult rv = NS_NewURI(getter_AddRefs(mFailedURI), "about:blank");
+        NS_ENSURE_SUCCESS(rv, rv);
+
         CopyUTF16toUTF8(aURL, url);
     }
     else
     {
         return NS_ERROR_INVALID_POINTER;
     }
 
     // Create a URL to pass all the error information through to the page.
--- a/dom/base/nsDOMWindowUtils.cpp
+++ b/dom/base/nsDOMWindowUtils.cpp
@@ -2087,16 +2087,23 @@ nsDOMWindowUtils::GetFileReferences(cons
   }
 
   *aRefCnt = *aDBRefCnt = *aSliceRefCnt = -1;
   *aResult = false;
   return NS_OK;
 }
 
 NS_IMETHODIMP
+nsDOMWindowUtils::IsIncrementalGCEnabled(JSContext* cx, bool* aResult)
+{
+  *aResult = js::IsIncrementalGCEnabled(JS_GetRuntime(cx));
+  return NS_OK;
+}
+
+NS_IMETHODIMP
 nsDOMWindowUtils::StartPCCountProfiling(JSContext* cx)
 {
   js::StartPCCountProfiling(cx);
   return NS_OK;
 }
 
 NS_IMETHODIMP
 nsDOMWindowUtils::StopPCCountProfiling(JSContext* cx)
--- a/dom/base/nsJSEnvironment.cpp
+++ b/dom/base/nsJSEnvironment.cpp
@@ -130,16 +130,19 @@ static PRLogModuleInfo* gJSDiagnostics;
 #define NS_GC_DELAY                 4000 // ms
 
 #define NS_SHRINK_GC_BUFFERS_DELAY  4000 // ms
 
 // The amount of time we wait from the first request to GC to actually
 // doing the first GC.
 #define NS_FIRST_GC_DELAY           10000 // ms
 
+// Maximum amount of time that should elapse between incremental GC slices
+#define NS_INTERSLICE_GC_DELAY      100 // ms
+
 // The amount of time we wait between a request to CC (after GC ran)
 // and doing the actual CC.
 #define NS_CC_DELAY                 5000 // ms
 
 #define NS_CC_SKIPPABLE_DELAY       250 // ms
 
 #define NS_CC_FORCED                (5 * 60 * PR_USEC_PER_SEC) // 5 min
 
@@ -149,16 +152,19 @@ static PRLogModuleInfo* gJSDiagnostics;
 
 static nsITimer *sGCTimer;
 static nsITimer *sShrinkGCBuffersTimer;
 static nsITimer *sCCTimer;
 
 static PRTime sLastCCEndTime;
 
 static bool sGCHasRun;
+static bool sCCLockedOut;
+
+static js::GCSliceCallback sPrevGCSliceCallback;
 
 // The number of currently pending document loads. This count isn't
 // guaranteed to always reflect reality and can't easily as we don't
 // have an easy place to know when a load ends or is interrupted in
 // all cases. This counter also gets reset if we end up GC'ing while
 // we're waiting for a slow page to load. IOW, this count may be 0
 // even when there are pending loads.
 static PRUint32 sPendingLoadCount;
@@ -3269,16 +3275,21 @@ nsJSContext::ShrinkGCBuffersNow()
 void
 nsJSContext::CycleCollectNow(nsICycleCollectorListener *aListener,
                              PRInt32 aExtraForgetSkippableCalls)
 {
   if (!NS_IsMainThread()) {
     return;
   }
 
+  if (sCCLockedOut) {
+    // We're in the middle of an incremental GC; finish it first
+    nsJSContext::GarbageCollectNow(js::gcreason::CC_FORCED, nsGCNormal);
+  }
+
   SAMPLE_LABEL("GC", "CycleCollectNow");
   NS_TIME_FUNCTION_MIN(1.0);
 
   KillCCTimer();
 
   PRTime start = PR_Now();
 
   PRUint32 suspected = nsCycleCollector_suspectedCount();
@@ -3352,17 +3363,17 @@ nsJSContext::CycleCollectNow(nsICycleCol
 
 // static
 void
 GCTimerFired(nsITimer *aTimer, void *aClosure)
 {
   NS_RELEASE(sGCTimer);
 
   uintptr_t reason = reinterpret_cast<uintptr_t>(aClosure);
-  nsJSContext::GarbageCollectNow(static_cast<js::gcreason::Reason>(reason), nsGCNormal);
+  nsJSContext::GarbageCollectNow(static_cast<js::gcreason::Reason>(reason), nsGCIncremental);
 }
 
 void
 ShrinkGCBuffersTimerFired(nsITimer *aTimer, void *aClosure)
 {
   NS_RELEASE(sShrinkGCBuffersTimer);
 
   nsJSContext::ShrinkGCBuffersNow();
@@ -3370,16 +3381,19 @@ ShrinkGCBuffersTimerFired(nsITimer *aTim
 
 // static
 void
 CCTimerFired(nsITimer *aTimer, void *aClosure)
 {
   if (sDidShutdown) {
     return;
   }
+  if (sCCLockedOut) {
+    return;
+  }
   ++sCCTimerFireCount;
   if (sCCTimerFireCount < (NS_CC_DELAY / NS_CC_SKIPPABLE_DELAY)) {
     PRUint32 suspected = nsCycleCollector_suspectedCount();
     if ((sPreviousSuspectedCount + 100) > suspected) {
       // Just few new suspected objects, return early.
       return;
     }
     
@@ -3438,36 +3452,38 @@ nsJSContext::LoadEnd()
 
   // Its probably a good idea to GC soon since we have finished loading.
   sLoadingInProgress = false;
   PokeGC(js::gcreason::LOAD_END);
 }
 
 // static
 void
-nsJSContext::PokeGC(js::gcreason::Reason aReason)
+nsJSContext::PokeGC(js::gcreason::Reason aReason, int aDelay)
 {
   if (sGCTimer) {
     // There's already a timer for GC'ing, just return
     return;
   }
 
   CallCreateInstance("@mozilla.org/timer;1", &sGCTimer);
 
   if (!sGCTimer) {
     // Failed to create timer (probably because we're in XPCOM shutdown)
     return;
   }
 
   static bool first = true;
 
   sGCTimer->InitWithFuncCallback(GCTimerFired, reinterpret_cast<void *>(aReason),
-                                 first
-                                 ? NS_FIRST_GC_DELAY
-                                 : NS_GC_DELAY,
+                                 aDelay
+                                 ? aDelay
+                                 : (first
+                                    ? NS_FIRST_GC_DELAY
+                                    : NS_GC_DELAY),
                                  nsITimer::TYPE_ONE_SHOT);
 
   first = false;
 }
 
 // static
 void
 nsJSContext::PokeShrinkGCBuffers()
@@ -3544,71 +3560,92 @@ nsJSContext::KillCCTimer()
 
 void
 nsJSContext::GC(js::gcreason::Reason aReason)
 {
   PokeGC(aReason);
 }
 
 static void
-DOMGCFinishedCallback(JSRuntime *rt, JSCompartment *comp, const char *status)
+DOMGCSliceCallback(JSRuntime *aRt, js::GCProgress aProgress, const js::GCDescription &aDesc)
 {
   NS_ASSERTION(NS_IsMainThread(), "GCs must run on the main thread");
 
-  if (sPostGCEventsToConsole) {
+  if (aDesc.logMessage && sPostGCEventsToConsole) {
     PRTime now = PR_Now();
     PRTime delta = 0;
     if (sFirstCollectionTime) {
       delta = now - sFirstCollectionTime;
     } else {
       sFirstCollectionTime = now;
     }
 
     NS_NAMED_LITERAL_STRING(kFmt, "GC(T+%.1f) %s");
     nsString msg;
     msg.Adopt(nsTextFormatter::smprintf(kFmt.get(),
-                                        double(delta) / PR_USEC_PER_SEC, status));
+                                        double(delta) / PR_USEC_PER_SEC,
+                                        aDesc.logMessage));
     nsCOMPtr<nsIConsoleService> cs = do_GetService(NS_CONSOLESERVICE_CONTRACTID);
     if (cs) {
       cs->LogStringMessage(msg.get());
     }
   }
 
-  sCCollectedWaitingForGC = 0;
-  sCleanupSinceLastGC = false;
-
-  if (sGCTimer) {
-    // If we were waiting for a GC to happen, kill the timer.
+  // Prevent cycle collections during incremental GC.
+  if (aProgress == js::GC_CYCLE_BEGIN) {
+    sCCLockedOut = true;
+  } else if (aProgress == js::GC_CYCLE_END) {
+    sCCLockedOut = false;
+  }
+
+  // The GC has more work to do, so schedule another GC slice.
+  if (aProgress == js::GC_SLICE_END) {
     nsJSContext::KillGCTimer();
-
-    // If this is a compartment GC, restart it. We still want
-    // a full GC to happen. Compartment GCs usually happen as a
-    // result of last-ditch or MaybeGC. In both cases its
-    // probably a time of heavy activity and we want to delay
-    // the full GC, but we do want it to happen eventually.
-    if (comp) {
-      nsJSContext::PokeGC(js::gcreason::POST_COMPARTMENT);
-
-      // We poked the GC, so we can kill any pending CC here.
-      nsJSContext::KillCCTimer();
+    nsJSContext::KillCCTimer();
+
+    nsJSContext::PokeGC(js::gcreason::INTER_SLICE_GC, NS_INTERSLICE_GC_DELAY);
+  }
+
+  if (aProgress == js::GC_CYCLE_END) {
+    sCCollectedWaitingForGC = 0;
+    sCleanupSinceLastGC = false;
+
+    if (sGCTimer) {
+      // If we were waiting for a GC to happen, kill the timer.
+      nsJSContext::KillGCTimer();
+
+      // If this is a compartment GC, restart it. We still want
+      // a full GC to happen. Compartment GCs usually happen as a
+      // result of last-ditch or MaybeGC. In both cases its
+      // probably a time of heavy activity and we want to delay
+      // the full GC, but we do want it to happen eventually.
+      if (aDesc.isCompartment) {
+        nsJSContext::PokeGC(js::gcreason::POST_COMPARTMENT);
+
+        // We poked the GC, so we can kill any pending CC here.
+        nsJSContext::KillCCTimer();
+      }
+    } else {
+      // If this was a full GC, poke the CC to run soon.
+      if (!aDesc.isCompartment) {
+        sGCHasRun = true;
+        nsJSContext::MaybePokeCC();
+      }
     }
-  } else {
-    // If this was a full GC, poke the CC to run soon.
-    if (!comp) {
-      sGCHasRun = true;
-      nsJSContext::MaybePokeCC();
+
+    // If we didn't end up scheduling a GC, make sure that we release GC buffers
+    // soon after canceling previous shrinking attempt.
+    nsJSContext::KillShrinkGCBuffersTimer();
+    if (!sGCTimer) {
+      nsJSContext::PokeShrinkGCBuffers();
     }
   }
 
-  // If we didn't end up scheduling a GC, make sure that we release GC buffers
-  // soon after canceling previous shrinking attempt 
-  nsJSContext::KillShrinkGCBuffersTimer();
-  if (!sGCTimer) {
-    nsJSContext::PokeShrinkGCBuffers();
-  }
+  if (sPrevGCSliceCallback)
+    (*sPrevGCSliceCallback)(aRt, aProgress, aDesc);
 }
 
 // Script object mananagement - note duplicate implementation
 // in nsJSRuntime below...
 nsresult
 nsJSContext::HoldScriptObject(void* aScriptObject)
 {
     NS_ASSERTION(sIsInitialized, "runtime not initialized");
@@ -3692,16 +3729,17 @@ nsJSRuntime::ParseVersion(const nsString
 
 //static
 void
 nsJSRuntime::Startup()
 {
   // initialize all our statics, so that we can restart XPCOM
   sGCTimer = sCCTimer = nsnull;
   sGCHasRun = false;
+  sCCLockedOut = false;
   sLastCCEndTime = 0;
   sPendingLoadCount = 0;
   sLoadingInProgress = false;
   sCCollectedWaitingForGC = 0;
   sPostGCEventsToConsole = false;
   gNameSpaceManager = nsnull;
   sRuntimeService = nsnull;
   sRuntime = nsnull;
@@ -3763,20 +3801,37 @@ SetMemoryMaxPrefChangedCallback(const ch
   PRUint32 max = (pref <= 0 || pref >= 0x1000) ? -1 : (PRUint32)pref * 1024 * 1024;
   JS_SetGCParameter(nsJSRuntime::sRuntime, JSGC_MAX_BYTES, max);
   return 0;
 }
 
 static int
 SetMemoryGCModePrefChangedCallback(const char* aPrefName, void* aClosure)
 {
-  bool enableCompartmentGC = Preferences::GetBool(aPrefName);
-  JS_SetGCParameter(nsJSRuntime::sRuntime, JSGC_MODE, enableCompartmentGC
-                                                      ? JSGC_MODE_COMPARTMENT
-                                                      : JSGC_MODE_GLOBAL);
+  PRBool enableCompartmentGC = Preferences::GetBool("javascript.options.mem.gc_per_compartment");
+  PRBool enableIncrementalGC = Preferences::GetBool("javascript.options.mem.gc_incremental");
+  JSGCMode mode;
+  if (enableIncrementalGC) {
+    mode = JSGC_MODE_INCREMENTAL;
+  } else if (enableCompartmentGC) {
+    mode = JSGC_MODE_COMPARTMENT;
+  } else {
+    mode = JSGC_MODE_GLOBAL;
+  }
+  JS_SetGCParameter(nsJSRuntime::sRuntime, JSGC_MODE, mode);
+  return 0;
+}
+
+static int
+SetMemoryGCSliceTimePrefChangedCallback(const char* aPrefName, void* aClosure)
+{
+  PRInt32 pref = Preferences::GetInt(aPrefName, -1);
+  // handle overflow and negative pref values
+  if (pref > 0 && pref < 100000)
+    JS_SetGCParameter(nsJSRuntime::sRuntime, JSGC_SLICE_TIME_BUDGET, pref);
   return 0;
 }
 
 static JSPrincipals *
 ObjectPrincipalFinder(JSContext *cx, JSObject *obj)
 {
   if (!sSecurityManager)
     return nsnull;
@@ -3853,17 +3908,17 @@ nsJSRuntime::Init()
   NS_ENSURE_SUCCESS(rv, rv);
 
   rv = sRuntimeService->GetRuntime(&sRuntime);
   NS_ENSURE_SUCCESS(rv, rv);
 
   // Let's make sure that our main thread is the same as the xpcom main thread.
   NS_ASSERTION(NS_IsMainThread(), "bad");
 
-  ::JS_SetGCFinishedCallback(sRuntime, DOMGCFinishedCallback);
+  sPrevGCSliceCallback = js::SetGCSliceCallback(sRuntime, DOMGCSliceCallback);
 
   JSSecurityCallbacks *callbacks = JS_GetRuntimeSecurityCallbacks(sRuntime);
   NS_ASSERTION(callbacks, "SecMan should have set security callbacks!");
 
   callbacks->findObjectPrincipals = ObjectPrincipalFinder;
 
   // Set up the structured clone callbacks.
   static JSStructuredCloneCallbacks cloneCallbacks = {
@@ -3898,16 +3953,26 @@ nsJSRuntime::Init()
   SetMemoryMaxPrefChangedCallback("javascript.options.mem.max",
                                   nsnull);
 
   Preferences::RegisterCallback(SetMemoryGCModePrefChangedCallback,
                                 "javascript.options.mem.gc_per_compartment");
   SetMemoryGCModePrefChangedCallback("javascript.options.mem.gc_per_compartment",
                                      nsnull);
 
+  Preferences::RegisterCallback(SetMemoryGCModePrefChangedCallback,
+                                "javascript.options.mem.gc_incremental");
+  SetMemoryGCModePrefChangedCallback("javascript.options.mem.gc_incremental",
+                                     nsnull);
+
+  Preferences::RegisterCallback(SetMemoryGCSliceTimePrefChangedCallback,
+                                "javascript.options.mem.gc_incremental_slice_ms");
+  SetMemoryGCSliceTimePrefChangedCallback("javascript.options.mem.gc_incremental_slice_ms",
+                                          nsnull);
+
   nsCOMPtr<nsIObserverService> obs = mozilla::services::GetObserverService();
   if (!obs)
     return NS_ERROR_FAILURE;
 
   Preferences::AddBoolVarCache(&sGCOnMemoryPressure,
                                "javascript.options.gc_on_memory_pressure",
                                true);
 
--- a/dom/base/nsJSEnvironment.h
+++ b/dom/base/nsJSEnvironment.h
@@ -183,17 +183,17 @@ public:
 
   static void GarbageCollectNow(js::gcreason::Reason reason, PRUint32 gckind = nsGCNormal);
   static void ShrinkGCBuffersNow();
   // If aExtraForgetSkippableCalls is -1, forgetSkippable won't be
   // called even if the previous collection was GC.
   static void CycleCollectNow(nsICycleCollectorListener *aListener = nsnull,
                               PRInt32 aExtraForgetSkippableCalls = 0);
 
-  static void PokeGC(js::gcreason::Reason aReason);
+  static void PokeGC(js::gcreason::Reason aReason, int aDelay = 0);
   static void KillGCTimer();
 
   static void PokeShrinkGCBuffers();
   static void KillShrinkGCBuffersTimer();
 
   static void MaybePokeCC();
   static void KillCCTimer();
 
--- a/dom/interfaces/base/nsIDOMWindowUtils.idl
+++ b/dom/interfaces/base/nsIDOMWindowUtils.idl
@@ -65,17 +65,17 @@ interface nsIDOMEvent;
 interface nsITransferable;
 interface nsIQueryContentEventResult;
 interface nsIDOMWindow;
 interface nsIDOMBlob;
 interface nsIDOMFile;
 interface nsIFile;
 interface nsIDOMTouch;
 
-[scriptable, uuid(ab6e9c71-8aa1-40bb-8bf9-65e16429055f)]
+[scriptable, uuid(73b48170-55d5-11e1-b86c-0800200c9a66)]
 interface nsIDOMWindowUtils : nsISupports {
 
   /**
    * Image animation mode of the window. When this attribute's value
    * is changed, the implementation should set all images in the window
    * to the given value. That is, when set to kDontAnimMode, all images
    * will stop animating. The attribute's value must be one of the
    * animationMode values from imgIContainer.
@@ -988,16 +988,22 @@ interface nsIDOMWindowUtils : nsISupport
    *
    */
   boolean getFileReferences(in AString aDatabaseName, in long long aId,
                             [optional] out long aRefCnt,
                             [optional] out long aDBRefCnt,
                             [optional] out long aSliceRefCnt);
 
   /**
+   * Return whether incremental GC has been disabled due to a binary add-on.
+   */
+  [implicit_jscontext]
+  boolean isIncrementalGCEnabled();
+
+  /**
    * Begin opcode-level profiling of all JavaScript execution in the window's
    * runtime.
    */
   [implicit_jscontext]
   void startPCCountProfiling();
 
   /**
    * Stop opcode-level profiling of JavaScript execution in the runtime, and
--- a/dom/plugins/base/nsJSNPRuntime.cpp
+++ b/dom/plugins/base/nsJSNPRuntime.cpp
@@ -174,17 +174,17 @@ NPObjWrapper_Construct(JSContext *cx, ui
 
 static JSBool
 CreateNPObjectMember(NPP npp, JSContext *cx, JSObject *obj, NPObject *npobj,
                      jsid id, NPVariant* getPropertyResult, jsval *vp);
 
 static JSClass sNPObjectJSWrapperClass =
   {
     NPRUNTIME_JSCLASS_NAME,
-    JSCLASS_HAS_PRIVATE | JSCLASS_NEW_RESOLVE | JSCLASS_NEW_ENUMERATE,
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_NEW_RESOLVE | JSCLASS_NEW_ENUMERATE,
     NPObjWrapper_AddProperty, NPObjWrapper_DelProperty,
     NPObjWrapper_GetProperty, NPObjWrapper_SetProperty,
     (JSEnumerateOp)NPObjWrapper_newEnumerate,
     (JSResolveOp)NPObjWrapper_NewResolve, NPObjWrapper_Convert,
     NPObjWrapper_Finalize, nsnull, nsnull, NPObjWrapper_Call,
     NPObjWrapper_Construct, nsnull, nsnull
   };
 
--- a/dom/plugins/base/nsPluginInstanceOwner.cpp
+++ b/dom/plugins/base/nsPluginInstanceOwner.cpp
@@ -3678,42 +3678,52 @@ void nsPluginInstanceOwner::SetFrame(nsO
         // called, so OnDestroyImage() can't yet have been called.  So we need
         // to do ourselves what OnDestroyImage() would have done.
         NS_RELEASE_THIS();
       }
 #endif
       container->SetCurrentImage(nsnull);
     }
 
-    // If we had an old frame and we're not going to have a new one then
-    // we should unregister for some things.
+#if defined(XP_MACOSX) && !defined(NP_NO_QUICKDRAW)
     if (!aFrame) {
-      // Unregister scroll position listeners
+      // At this point we had a frame but it is going away and we're not getting a new one.
+      // Unregister for a scroll position listening, which is only required for Carbon
+      // event model plugins on Mac OS X. It's OK to unregister when we didn't register,
+      // so don't be strict about unregistering. Better to unregister when we didn't have to
+      // than to not unregister when we should.
       for (nsIFrame* f = mObjectFrame; f; f = nsLayoutUtils::GetCrossDocParentFrame(f)) {
         nsIScrollableFrame* sf = do_QueryFrame(f);
         if (sf) {
           sf->RemoveScrollPositionListener(this);
         }
       }
     }
+#endif
 
     // Make sure the old frame isn't holding a reference to us.
     mObjectFrame->SetInstanceOwner(nsnull);
   } else {
+    // Scroll position listening is only required for Carbon event model plugins on Mac OS X.
+    // Note that we probably have a crash bug in the way we register/unregister, bug 723190.
+    // Bug 723190 is mitigated by limiting registration to Carbon event model plugins.
+#if defined(XP_MACOSX) && !defined(NP_NO_QUICKDRAW)
     if (aFrame) {
-      // We didn't have an object frame before but we do now!
-      // We need to register a scroll position listener on every scrollable
-      // frame up to the top
-      for (nsIFrame* f = aFrame; f; f = nsLayoutUtils::GetCrossDocParentFrame(f)) {
-        nsIScrollableFrame* sf = do_QueryFrame(f);
-        if (sf) {
-          sf->AddScrollPositionListener(this);
+      // We didn't have an object frame before but we do now. We need to register a scroll
+      // position listener on every scrollable frame up to the top.
+      if (GetEventModel() == NPEventModelCarbon) {
+        for (nsIFrame* f = aFrame; f; f = nsLayoutUtils::GetCrossDocParentFrame(f)) {
+          nsIScrollableFrame* sf = do_QueryFrame(f);
+          if (sf) {
+            sf->AddScrollPositionListener(this);
+          }
         }
       }
     }
+#endif
   }
 
   // Swap in the new frame (or no frame)
   mObjectFrame = aFrame;
 
   // Set up a new frame
   if (mObjectFrame) {
     mObjectFrame->SetInstanceOwner(this);
--- a/dom/src/events/nsJSEventListener.cpp
+++ b/dom/src/events/nsJSEventListener.cpp
@@ -228,16 +228,18 @@ nsJSEventListener::HandleEvent(nsIDOMEve
   JSContext* cx = nsnull;
   nsCOMPtr<nsIJSContextStack> stack =
     do_GetService("@mozilla.org/js/xpc/ContextStack;1");
   NS_ASSERTION(stack && NS_SUCCEEDED(stack->Peek(&cx)) && cx &&
                GetScriptContextFromJSContext(cx) == mContext,
                "JSEventListener has wrong script context?");
 #endif
   nsCOMPtr<nsIVariant> vrv;
+  xpc_UnmarkGrayObject(mScopeObject);
+  xpc_UnmarkGrayObject(mHandler);
   rv = mContext->CallEventHandler(mTarget, mScopeObject, mHandler, iargv,
                                   getter_AddRefs(vrv));
 
   if (NS_SUCCEEDED(rv)) {
     PRUint16 dataType = nsIDataType::VTYPE_VOID;
     if (vrv)
       vrv->GetDataType(&dataType);
 
--- a/dom/workers/ListenerManager.cpp
+++ b/dom/workers/ListenerManager.cpp
@@ -102,16 +102,19 @@ struct Listener : PRCList
     listener->mPhase = aPhase;
     listener->mWantsUntrusted = aWantsUntrusted;
     return listener;
   }
 
   static void
   Remove(JSContext* aCx, Listener* aListener)
   {
+    if (js::IsIncrementalBarrierNeeded(aCx))
+      js::IncrementalValueBarrier(aListener->mListenerVal);
+
     PR_REMOVE_LINK(aListener);
     JS_free(aCx, aListener);
   }
 
   jsval mListenerVal;
   ListenerManager::Phase mPhase;
   bool mWantsUntrusted;
 };
--- a/dom/workers/Worker.cpp
+++ b/dom/workers/Worker.cpp
@@ -295,17 +295,17 @@ private:
     }
 
     return worker->PostMessage(aCx, message);
   }
 };
 
 JSClass Worker::sClass = {
   "Worker",
-  JSCLASS_HAS_PRIVATE,
+  JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS,
   JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
   JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, Finalize, NULL, NULL, NULL,
   NULL, NULL, NULL, Trace, NULL
 };
 
 JSPropertySpec Worker::sProperties[] = {
   { sEventStrings[STRING_onerror], STRING_onerror, PROPERTY_FLAGS,
     GetEventListener, SetEventListener },
@@ -410,17 +410,17 @@ private:
     if (worker) {
       worker->TraceInstance(aTrc);
     }
   }
 };
 
 JSClass ChromeWorker::sClass = {
   "ChromeWorker",
-  JSCLASS_HAS_PRIVATE,
+  JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS,
   JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
   JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, Finalize, NULL, NULL, NULL,
   NULL, NULL, NULL, Trace, NULL
 };
 
 WorkerPrivate*
 Worker::GetInstancePrivate(JSContext* aCx, JSObject* aObj,
                            const char* aFunctionName)
--- a/dom/workers/WorkerScope.cpp
+++ b/dom/workers/WorkerScope.cpp
@@ -794,17 +794,17 @@ private:
     }
 
     return scope->mWorker->PostMessageToParent(aCx, message);
   }
 };
 
 JSClass DedicatedWorkerGlobalScope::sClass = {
   "DedicatedWorkerGlobalScope",
-  JSCLASS_GLOBAL_FLAGS | JSCLASS_HAS_PRIVATE | JSCLASS_NEW_RESOLVE,
+  JSCLASS_GLOBAL_FLAGS | JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_NEW_RESOLVE,
   JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
   JS_EnumerateStub, reinterpret_cast<JSResolveOp>(Resolve), JS_ConvertStub,
   Finalize, NULL, NULL, NULL, NULL, NULL, NULL, Trace, NULL
 };
 
 JSPropertySpec DedicatedWorkerGlobalScope::sProperties[] = {
   { sEventStrings[STRING_onmessage], STRING_onmessage, PROPERTY_FLAGS,
     GetEventListener, SetEventListener },
--- a/dom/workers/XMLHttpRequest.cpp
+++ b/dom/workers/XMLHttpRequest.cpp
@@ -215,17 +215,17 @@ private:
     }
 
     return priv->SetEventListenerOnEventTarget(aCx, name + 2, aVp);
   }
 };
 
 JSClass XMLHttpRequestUpload::sClass = {
   "XMLHttpRequestUpload",
-  JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(SLOT_COUNT),
+  JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(SLOT_COUNT),
   JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
   JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, Finalize,
   NULL, NULL, NULL, NULL, NULL, NULL, Trace, NULL
 };
 
 JSPropertySpec XMLHttpRequestUpload::sProperties[] = {
   { sEventStrings[STRING_onabort], STRING_onabort, PROPERTY_FLAGS,
     GetEventListener, SetEventListener },
@@ -764,17 +764,17 @@ private:
     }
 
     return priv->OverrideMimeType(aCx, mimeType);
   }
 };
 
 JSClass XMLHttpRequest::sClass = {
   "XMLHttpRequest",
-  JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(SLOT_COUNT),
+  JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(SLOT_COUNT),
   JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
   JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, Finalize,
   NULL, NULL, NULL, NULL, NULL, NULL, Trace, NULL
 };
 
 JSPropertySpec XMLHttpRequest::sProperties[] = {
 
 #define GENERIC_READONLY_PROPERTY(_name) \
--- a/js/jsd/jsd_xpc.cpp
+++ b/js/jsd/jsd_xpc.cpp
@@ -102,18 +102,18 @@
 #define NS_CATMAN_CTRID   "@mozilla.org/categorymanager;1"
 #define NS_JSRT_CTRID     "@mozilla.org/js/xpc/RuntimeService;1"
 
 #define AUTOREG_CATEGORY  "xpcom-autoregistration"
 #define APPSTART_CATEGORY "app-startup"
 #define JSD_AUTOREG_ENTRY "JSDebugger Startup Observer"
 #define JSD_STARTUP_ENTRY "JSDebugger Startup Observer"
 
-static JSBool
-jsds_GCCallbackProc (JSContext *cx, JSGCStatus status);
+static void
+jsds_GCSliceCallbackProc (JSRuntime *rt, js::GCProgress progress, const js::GCDescription &desc);
 
 /*******************************************************************************
  * global vars
  ******************************************************************************/
 
 const char implementationString[] = "Mozilla JavaScript Debugger Service";
 
 const char jsdServiceCtrID[] = "@mozilla.org/js/jsd/debugger-service;1";
@@ -123,19 +123,19 @@ const char jsdASObserverCtrID[] = "servi
 #ifdef DEBUG_verbose
 PRUint32 gScriptCount   = 0;
 PRUint32 gValueCount    = 0;
 PRUint32 gPropertyCount = 0;
 PRUint32 gContextCount  = 0;
 PRUint32 gFrameCount  = 0;
 #endif
 
-static jsdService   *gJsds       = 0;
-static JSGCCallback  gLastGCProc = jsds_GCCallbackProc;
-static JSGCStatus    gGCStatus   = JSGC_END;
+static jsdService          *gJsds               = 0;
+static js::GCSliceCallback gPrevGCSliceCallback = jsds_GCSliceCallbackProc;
+static bool                gGCRunning           = false;
 
 static struct DeadScript {
     PRCList     links;
     JSDContext *jsdc;
     jsdIScript *script;
 } *gDeadScripts = nsnull;
 
 enum PatternType {
@@ -455,21 +455,18 @@ jsds_FilterHook (JSDContext *jsdc, JSDTh
     
 }
 
 /*******************************************************************************
  * c callbacks
  *******************************************************************************/
 
 static void
-jsds_NotifyPendingDeadScripts (JSContext *cx)
+jsds_NotifyPendingDeadScripts (JSRuntime *rt)
 {
-#ifdef CAUTIOUS_SCRIPTHOOK
-    JSRuntime *rt = JS_GetRuntime(cx);
-#endif
     jsdService *jsds = gJsds;
 
     nsCOMPtr<jsdIScriptHook> hook;
     if (jsds) {
         NS_ADDREF(jsds);
         jsds->GetScriptHook (getter_AddRefs(hook));
         jsds->DoPause(nsnull, true);
     }
@@ -506,41 +503,33 @@ jsds_NotifyPendingDeadScripts (JSContext
     }
 
     if (jsds) {
         jsds->DoUnPause(nsnull, true);
         NS_RELEASE(jsds);
     }
 }
 
-static JSBool
-jsds_GCCallbackProc (JSContext *cx, JSGCStatus status)
+static void
+jsds_GCSliceCallbackProc (JSRuntime *rt, js::GCProgress progress, const js::GCDescription &desc)
 {
-#ifdef DEBUG_verbose
-    printf ("new gc status is %i\n", status);
-#endif
-    if (status == JSGC_END) {
-        /* just to guard against reentering. */
-        gGCStatus = JSGC_BEGIN;
+    if (progress == js::GC_CYCLE_END || progress == js::GC_SLICE_END) {
+        NS_ASSERTION(gGCRunning, "GC slice callback was missed");
+
         while (gDeadScripts)
-            jsds_NotifyPendingDeadScripts (cx);
+            jsds_NotifyPendingDeadScripts (rt);
+
+        gGCRunning = false;
+    } else {
+        NS_ASSERTION(!gGCRunning, "should not re-enter GC");
+        gGCRunning = true;
     }
 
-    gGCStatus = status;
-    if (gLastGCProc && !gLastGCProc (cx, status)) {
-        /*
-         * If gLastGCProc returns false, then the GC will abort without making
-         * another callback with status=JSGC_END, so set the status to JSGC_END
-         * here.
-         */
-        gGCStatus = JSGC_END;
-        return JS_FALSE;
-    }
-    
-    return JS_TRUE;
+    if (gPrevGCSliceCallback)
+        (*gPrevGCSliceCallback)(rt, progress, desc);
 }
 
 static uintN
 jsds_ErrorHookProc (JSDContext *jsdc, JSContext *cx, const char *message,
                     JSErrorReport *report, void *callerdata)
 {
     static bool running = false;
 
@@ -746,17 +735,17 @@ jsds_ScriptHookProc (JSDContext* jsdc, J
          * to remove the reference held in the JSDScript private data. */
         nsCOMPtr<jsdIScript> jsdis = 
             static_cast<jsdIScript *>(JSD_GetScriptPrivate(jsdscript));
         if (!jsdis)
             return;
 
         jsdis->Invalidate();
 
-        if (gGCStatus == JSGC_END) {
+        if (!gGCRunning) {
             nsCOMPtr<jsdIScriptHook> hook;
             gJsds->GetScriptHook(getter_AddRefs(hook));
             if (!hook)
                 return;
 
             /* if GC *isn't* running, we can tell the user about the script
              * delete now. */
 #ifdef CAUTIOUS_SCRIPTHOOK
@@ -2575,19 +2564,19 @@ jsdService::DeactivateDebugger ()
 NS_IMETHODIMP
 jsdService::ActivateDebugger (JSRuntime *rt)
 {
     if (mOn)
         return (rt == mRuntime) ? NS_OK : NS_ERROR_ALREADY_INITIALIZED;
 
     mRuntime = rt;
 
-    if (gLastGCProc == jsds_GCCallbackProc)
+    if (gPrevGCSliceCallback == jsds_GCSliceCallbackProc)
         /* condition indicates that the callback proc has not been set yet */
-        gLastGCProc = JS_SetGCCallbackRT (rt, jsds_GCCallbackProc);
+        gPrevGCSliceCallback = js::SetGCSliceCallback (rt, jsds_GCSliceCallbackProc);
 
     mCx = JSD_DebuggerOnForUser (rt, NULL, NULL);
     if (!mCx)
         return NS_ERROR_FAILURE;
 
     JSContext *cx   = JSD_GetDefaultJSContext (mCx);
     JSObject  *glob = JS_GetGlobalObject (cx);
 
@@ -2647,29 +2636,24 @@ jsdService::Off (void)
 {
     if (!mOn)
         return NS_OK;
     
     if (!mCx || !mRuntime)
         return NS_ERROR_NOT_INITIALIZED;
     
     if (gDeadScripts) {
-        if (gGCStatus != JSGC_END)
+        if (gGCRunning)
             return NS_ERROR_NOT_AVAILABLE;
 
         JSContext *cx = JSD_GetDefaultJSContext(mCx);
         while (gDeadScripts)
-            jsds_NotifyPendingDeadScripts (cx);
+            jsds_NotifyPendingDeadScripts (JS_GetRuntime(cx));
     }
 
-    /*
-    if (gLastGCProc != jsds_GCCallbackProc)
-        JS_SetGCCallbackRT (mRuntime, gLastGCProc);
-    */
-
     DeactivateDebugger();
 
 #ifdef DEBUG
     printf ("+++ JavaScript debugging hooks removed.\n");
 #endif
 
     nsresult rv;
     nsCOMPtr<nsIXPConnect> xpc = do_GetService(nsIXPConnect::GetCID(), &rv);
@@ -3369,17 +3353,17 @@ jsdService::~jsdService()
     mBreakpointHook = nsnull;
     mDebugHook = nsnull;
     mDebuggerHook = nsnull;
     mInterruptHook = nsnull;
     mScriptHook = nsnull;
     mThrowHook = nsnull;
     mTopLevelHook = nsnull;
     mFunctionHook = nsnull;
-    gGCStatus = JSGC_END;
+    gGCRunning = false;
     Off();
     gJsds = nsnull;
 }
 
 jsdService *
 jsdService::GetService ()
 {
     if (!gJsds)
--- a/js/src/Makefile.in
+++ b/js/src/Makefile.in
@@ -114,17 +114,16 @@ CPPSRCS		= \
 		jsdbgapi.cpp \
 		jsdhash.cpp \
 		jsdtoa.cpp \
 		jsexn.cpp \
 		jsfriendapi.cpp \
 		jsfun.cpp \
 		jsgc.cpp \
 		jsgcmark.cpp \
-		jsgcstats.cpp \
 		jscrashreport.cpp \
 		jshash.cpp \
 		jsinfer.cpp \
 		jsinterp.cpp \
 		jsiter.cpp \
 		jslog2.cpp \
 		jsmath.cpp \
 		jsnativestack.cpp \
@@ -188,17 +187,16 @@ INSTALLED_HEADERS = \
 		jsclass.h \
 		jsclist.h \
 		jscompat.h \
 		jsdbgapi.h \
 		jsdhash.h \
 		jsfriendapi.h \
 		jsgc.h \
 		jscell.h \
-		jsgcstats.h \
 		jshash.h \
 		jslock.h \
 		json.h \
 		jsproxy.h \
 		jsprf.h \
 		jsproto.tbl \
 		jsprvtd.h \
 		jspubtd.h \
--- a/js/src/builtin/MapObject.cpp
+++ b/js/src/builtin/MapObject.cpp
@@ -141,17 +141,17 @@ HashableValue::equals(const HashableValu
     return b;
 }
 
 
 /*** Map *****************************************************************************************/
 
 Class MapObject::class_ = {
     "Map",
-    JSCLASS_HAS_PRIVATE |
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
     JSCLASS_HAS_CACHED_PROTO(JSProto_Map),
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
     JS_ResolveStub,
     JS_ConvertStub,
@@ -292,17 +292,17 @@ js_InitMapClass(JSContext *cx, JSObject 
     return MapObject::initClass(cx, obj);
 }
 
 
 /*** Set *****************************************************************************************/
 
 Class SetObject::class_ = {
     "Set",
-    JSCLASS_HAS_PRIVATE |
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
     JSCLASS_HAS_CACHED_PROTO(JSProto_Set),
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
     JS_ResolveStub,
     JS_ConvertStub,
--- a/js/src/ctypes/CTypes.cpp
+++ b/js/src/ctypes/CTypes.cpp
@@ -250,34 +250,34 @@ static JSClass sCDataProtoClass = {
   0,
   JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
   JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, JS_FinalizeStub,
   JSCLASS_NO_OPTIONAL_MEMBERS
 };
 
 static JSClass sCTypeClass = {
   "CType",
-  JSCLASS_HAS_RESERVED_SLOTS(CTYPE_SLOTS),
+  JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(CTYPE_SLOTS),
   JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
   JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, CType::Finalize,
   NULL, NULL, CType::ConstructData, CType::ConstructData, NULL,
   CType::HasInstance, CType::Trace, NULL
 };
 
 static JSClass sCDataClass = {
   "CData",
   JSCLASS_HAS_RESERVED_SLOTS(CDATA_SLOTS),
   JS_PropertyStub, JS_PropertyStub, ArrayType::Getter, ArrayType::Setter,
   JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, CData::Finalize,
   NULL, NULL, FunctionType::Call, FunctionType::Call, NULL, NULL, NULL, NULL
 };
 
 static JSClass sCClosureClass = {
   "CClosure",
-  JSCLASS_HAS_RESERVED_SLOTS(CCLOSURE_SLOTS),
+  JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(CCLOSURE_SLOTS),
   JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
   JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, CClosure::Finalize,
   NULL, NULL, NULL, NULL, NULL, NULL, CClosure::Trace, NULL
 };
 
 #define CTYPESFN_FLAGS \
   (JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT)
 
--- a/js/src/frontend/Parser.cpp
+++ b/js/src/frontend/Parser.cpp
@@ -252,17 +252,17 @@ Parser::newFunctionBox(JSObject *obj, Pa
     return funbox;
 }
 
 void
 Parser::trace(JSTracer *trc)
 {
     ObjectBox *objbox = traceListHead;
     while (objbox) {
-        MarkObjectRoot(trc, objbox->object, "parser.object");
+        MarkObjectRoot(trc, &objbox->object, "parser.object");
         if (objbox->isFunctionBox)
             static_cast<FunctionBox *>(objbox)->bindings.trace(trc);
         objbox = objbox->traceLink;
     }
 
     for (TreeContext *tc = this->tc; tc; tc = tc->parent)
         tc->trace(trc);
 }
--- a/js/src/gc/Barrier-inl.h
+++ b/js/src/gc/Barrier-inl.h
@@ -261,11 +261,36 @@ HeapId::operator=(const HeapId &v)
 {
     pre();
     JS_ASSERT(!IsPoisonedId(v.value));
     value = v.value;
     post();
     return *this;
 }
 
+inline const Value &
+ReadBarrieredValue::get() const
+{
+    if (value.isObject())
+        JSObject::readBarrier(&value.toObject());
+    else if (value.isString())
+        JSString::readBarrier(value.toString());
+    else
+        JS_ASSERT(!value.isMarkable());
+
+    return value;
+}
+
+inline
+ReadBarrieredValue::operator const Value &() const
+{
+    return get();
+}
+
+inline JSObject &
+ReadBarrieredValue::toObject() const
+{
+    return get().toObject();
+}
+
 } /* namespace js */
 
 #endif /* jsgc_barrier_inl_h___ */
--- a/js/src/gc/Barrier.h
+++ b/js/src/gc/Barrier.h
@@ -401,16 +401,17 @@ class HeapId
 
     inline HeapId &operator=(jsid id);
     inline HeapId &operator=(const HeapId &v);
 
     bool operator==(jsid id) const { return value == id; }
     bool operator!=(jsid id) const { return value != id; }
 
     jsid get() const { return value; }
+    jsid *unsafeGet() { return &value; }
     operator jsid() const { return value; }
 
   private:
     inline void pre();
     inline void post();
 
     HeapId(const HeapId &v);
 };
@@ -451,11 +452,25 @@ class ReadBarriered
     void set(T *v) { value = v; }
 
     operator bool() { return !!value; }
 
     template<class U>
     operator MarkablePtr<U>() const { return MarkablePtr<U>(value); }
 };
 
+class ReadBarrieredValue
+{
+    Value value;
+
+  public:
+    ReadBarrieredValue() : value(UndefinedValue()) {}
+    ReadBarrieredValue(const Value &value) : value(value) {}
+
+    inline const Value &get() const;
+    inline operator const Value &() const;
+
+    inline JSObject &toObject() const;
+};
+
 }
 
 #endif /* jsgc_barrier_h___ */
--- a/js/src/gc/Statistics.cpp
+++ b/js/src/gc/Statistics.cpp
@@ -33,19 +33,20 @@
  * decision by deleting the provisions above and replace them with the notice
  * and other provisions required by the GPL or the LGPL. If you do not delete
  * the provisions above, a recipient may use your version of this file under
  * the terms of any one of the MPL, the GPL or the LGPL.
  *
  * ***** END LICENSE BLOCK ***** */
 
 #include <stdio.h>
-#include <ctype.h>
+#include <stdarg.h>
 
 #include "jscntxt.h"
+#include "jscompartment.h"
 #include "jscrashformat.h"
 #include "jscrashreport.h"
 #include "jsprf.h"
 #include "jsprobes.h"
 #include "jsutil.h"
 #include "prmjtime.h"
 
 #include "gc/Statistics.h"
@@ -64,88 +65,124 @@ ExplainReason(gcreason::Reason reason)
 
         default:
           JS_NOT_REACHED("bad GC reason");
           return "?";
 #undef SWITCH_REASON
     }
 }
 
-Statistics::ColumnInfo::ColumnInfo(const char *title, double t, double total)
-  : title(title)
+void
+Statistics::fmt(const char *f, ...)
 {
-    JS_snprintf(str, sizeof(str), "%.1f", t);
-    JS_snprintf(totalStr, sizeof(totalStr), "%.1f", total);
-    width = 6;
+    va_list va;
+    size_t off = strlen(buffer);
+
+    va_start(va, f);
+    JS_vsnprintf(buffer + off, BUFFER_SIZE - off, f, va);
+    va_end(va);
 }
 
-Statistics::ColumnInfo::ColumnInfo(const char *title, double t)
-  : title(title)
+void
+Statistics::fmtIfNonzero(const char *name, double t)
 {
-    JS_snprintf(str, sizeof(str), "%.1f", t);
-    strcpy(totalStr, "n/a");
-    width = 6;
+    if (t) {
+        if (needComma)
+            fmt(", ");
+        fmt("%s: %.1f", name, t);
+        needComma = true;
+    }
 }
 
-Statistics::ColumnInfo::ColumnInfo(const char *title, unsigned int data)
-  : title(title)
+void
+Statistics::formatPhases(int64_t *times)
 {
-    JS_snprintf(str, sizeof(str), "%d", data);
-    strcpy(totalStr, "n/a");
-    width = 4;
+    needComma = false;
+    fmtIfNonzero("mark", t(times[PHASE_MARK]));
+    fmtIfNonzero("mark-roots", t(times[PHASE_MARK_ROOTS]));
+    fmtIfNonzero("mark-delayed", t(times[PHASE_MARK_DELAYED]));
+    fmtIfNonzero("mark-other", t(times[PHASE_MARK_OTHER]));
+    fmtIfNonzero("sweep", t(times[PHASE_SWEEP]));
+    fmtIfNonzero("sweep-obj", t(times[PHASE_SWEEP_OBJECT]));
+    fmtIfNonzero("sweep-string", t(times[PHASE_SWEEP_STRING]));
+    fmtIfNonzero("sweep-script", t(times[PHASE_SWEEP_SCRIPT]));
+    fmtIfNonzero("sweep-shape", t(times[PHASE_SWEEP_SHAPE]));
+    fmtIfNonzero("discard-code", t(times[PHASE_DISCARD_CODE]));
+    fmtIfNonzero("discard-analysis", t(times[PHASE_DISCARD_ANALYSIS]));
+    fmtIfNonzero("xpconnect", t(times[PHASE_XPCONNECT]));
+    fmtIfNonzero("deallocate", t(times[PHASE_DESTROY]));
 }
 
-Statistics::ColumnInfo::ColumnInfo(const char *title, const char *data)
-  : title(title)
+/* Except for the first and last, slices of less than 12ms are not reported. */
+static const int64_t SLICE_MIN_REPORT_TIME = 12 * PRMJ_USEC_PER_MSEC;
+
+const char *
+Statistics::formatData()
 {
-    JS_ASSERT(strlen(data) < sizeof(str));
-    strcpy(str, data);
-    strcpy(totalStr, "n/a ");
-    width = 0;
-}
+    buffer[0] = 0x00;
+
+    int64_t total = 0, longest = 0;
 
-static const int NUM_COLUMNS = 17;
+    for (SliceData *slice = slices.begin(); slice != slices.end(); slice++) {
+        total += slice->duration();
+        if (slice->duration() > longest)
+            longest = slice->duration();
+    }
 
-void
-Statistics::makeTable(ColumnInfo *cols)
-{
-    int i = 0;
+    double mmu20 = computeMMU(20 * PRMJ_USEC_PER_MSEC);
+    double mmu50 = computeMMU(50 * PRMJ_USEC_PER_MSEC);
 
-    cols[i++] = ColumnInfo("Type", compartment ? "Comp" : "Glob");
+    fmt("TotalTime: %.1fms, Type: %s", t(total), compartment ? "compartment" : "global");
+    fmt(", MMU(20ms): %d%%, MMU(50ms): %d%%", int(mmu20 * 100), int(mmu50 * 100));
+
+    if (slices.length() > 1)
+        fmt(", MaxPause: %.1f", t(longest));
+    else
+        fmt(", Reason: %s", ExplainReason(slices[0].reason));
 
-    cols[i++] = ColumnInfo("Total", t(PHASE_GC), total(PHASE_GC));
-    cols[i++] = ColumnInfo("Wait", beginDelay(PHASE_MARK, PHASE_GC));
-    cols[i++] = ColumnInfo("Mark", t(PHASE_MARK), total(PHASE_MARK));
-    cols[i++] = ColumnInfo("Sweep", t(PHASE_SWEEP), total(PHASE_SWEEP));
-    cols[i++] = ColumnInfo("FinObj", t(PHASE_SWEEP_OBJECT), total(PHASE_SWEEP_OBJECT));
-    cols[i++] = ColumnInfo("FinStr", t(PHASE_SWEEP_STRING), total(PHASE_SWEEP_STRING));
-    cols[i++] = ColumnInfo("FinScr", t(PHASE_SWEEP_SCRIPT), total(PHASE_SWEEP_SCRIPT));
-    cols[i++] = ColumnInfo("FinShp", t(PHASE_SWEEP_SHAPE), total(PHASE_SWEEP_SHAPE));
-    cols[i++] = ColumnInfo("DisCod", t(PHASE_DISCARD_CODE), total(PHASE_DISCARD_CODE));
-    cols[i++] = ColumnInfo("DisAnl", t(PHASE_DISCARD_ANALYSIS), total(PHASE_DISCARD_ANALYSIS));
-    cols[i++] = ColumnInfo("XPCnct", t(PHASE_XPCONNECT), total(PHASE_XPCONNECT));
-    cols[i++] = ColumnInfo("Destry", t(PHASE_DESTROY), total(PHASE_DESTROY));
-    cols[i++] = ColumnInfo("End", endDelay(PHASE_GC, PHASE_DESTROY));
+    if (wasReset)
+        fmt(", ***RESET***");
+
+    fmt(", +chunks: %d, -chunks: %d\n", counts[STAT_NEW_CHUNK], counts[STAT_DESTROY_CHUNK]);
+
+    if (slices.length() > 1) {
+        for (size_t i = 0; i < slices.length(); i++) {
+            int64_t width = slices[i].duration();
+            if (i != 0 && i != slices.length() - 1 && width < SLICE_MIN_REPORT_TIME)
+                continue;
 
-    cols[i++] = ColumnInfo("+Chu", counts[STAT_NEW_CHUNK]);
-    cols[i++] = ColumnInfo("-Chu", counts[STAT_DESTROY_CHUNK]);
+            fmt("    Slice %d @ %.1fms (Pause: %.1f, Reason: %s): ",
+                i,
+                t(slices[i].end - slices[0].start),
+                t(width),
+                ExplainReason(slices[i].reason));
+            formatPhases(slices[i].phaseTimes);
+            fmt("\n");
+        }
 
-    cols[i++] = ColumnInfo("Reason", ExplainReason(triggerReason));
+        fmt("    Totals: ");
+    }
 
-    JS_ASSERT(i == NUM_COLUMNS);
+    formatPhases(phaseTimes);
+    fmt("\n");
+
+    return buffer;
 }
 
 Statistics::Statistics(JSRuntime *rt)
   : runtime(rt),
-    triggerReason(gcreason::NO_REASON)
+    startupTime(PRMJ_Now()),
+    fp(NULL),
+    fullFormat(false),
+    compartment(NULL),
+    wasReset(false),
+    needComma(false)
 {
+    PodArrayZero(phaseTotals);
     PodArrayZero(counts);
-    PodArrayZero(totals);
-
-    startupTime = PRMJ_Now();
 
     char *env = getenv("MOZ_GCTIMER");
     if (!env || strcmp(env, "none") == 0) {
         fp = NULL;
         return;
     }
 
     if (strcmp(env, "stdout") == 0) {
@@ -154,184 +191,199 @@ Statistics::Statistics(JSRuntime *rt)
     } else if (strcmp(env, "stderr") == 0) {
         fullFormat = false;
         fp = stderr;
     } else {
         fullFormat = true;
 
         fp = fopen(env, "a");
         JS_ASSERT(fp);
-
-        fprintf(fp, "     AppTime");
-
-        ColumnInfo cols[NUM_COLUMNS];
-        makeTable(cols);
-        for (int i = 0; i < NUM_COLUMNS; i++)
-            fprintf(fp, ", %*s", cols[i].width, cols[i].title);
-        fprintf(fp, "\n");
     }
 }
 
 Statistics::~Statistics()
 {
     if (fp) {
         if (fullFormat) {
-            fprintf(fp, "------>TOTAL");
-
-            ColumnInfo cols[NUM_COLUMNS];
-            makeTable(cols);
-            for (int i = 0; i < NUM_COLUMNS && cols[i].totalStr[0]; i++)
-                fprintf(fp, ", %*s", cols[i].width, cols[i].totalStr);
-            fprintf(fp, "\n");
+            buffer[0] = 0x00;
+            formatPhases(phaseTotals);
+            fprintf(fp, "TOTALS\n%s\n\n-------\n", buffer);
         }
 
         if (fp != stdout && fp != stderr)
             fclose(fp);
     }
 }
 
-struct GCCrashData
-{
-    int isRegen;
-    int isCompartment;
-};
-
-void
-Statistics::beginGC(JSCompartment *comp, gcreason::Reason reason)
+double
+Statistics::t(int64_t t)
 {
-    compartment = comp;
-
-    PodArrayZero(phaseStarts);
-    PodArrayZero(phaseEnds);
-    PodArrayZero(phaseTimes);
-
-    triggerReason = reason;
-
-    beginPhase(PHASE_GC);
-    Probes::GCStart();
-
-    GCCrashData crashData;
-    crashData.isCompartment = !!compartment;
-    crash::SaveCrashData(crash::JS_CRASH_TAG_GC, &crashData, sizeof(crashData));
-}
-
-double
-Statistics::t(Phase phase)
-{
-    return double(phaseTimes[phase]) / PRMJ_USEC_PER_MSEC;
+    return double(t) / PRMJ_USEC_PER_MSEC;
 }
 
-double
-Statistics::total(Phase phase)
-{
-    return double(totals[phase]) / PRMJ_USEC_PER_MSEC;
-}
-
-double
-Statistics::beginDelay(Phase phase1, Phase phase2)
-{
-    return double(phaseStarts[phase1] - phaseStarts[phase2]) / PRMJ_USEC_PER_MSEC;
-}
-
-double
-Statistics::endDelay(Phase phase1, Phase phase2)
-{
-    return double(phaseEnds[phase1] - phaseEnds[phase2]) / PRMJ_USEC_PER_MSEC;
-}
-
-void
-Statistics::statsToString(char *buffer, size_t size)
+int64_t
+Statistics::gcDuration()
 {
-    JS_ASSERT(size);
-    buffer[0] = 0x00;
-
-    ColumnInfo cols[NUM_COLUMNS];
-    makeTable(cols);
-
-    size_t pos = 0;
-    for (int i = 0; i < NUM_COLUMNS; i++) {
-        int len = strlen(cols[i].title) + 1 + strlen(cols[i].str);
-        if (i > 0)
-            len += 2;
-        if (pos + len >= size)
-            break;
-        if (i > 0)
-            strcat(buffer, ", ");
-        strcat(buffer, cols[i].title);
-        strcat(buffer, ":");
-        strcat(buffer, cols[i].str);
-        pos += len;
-    }
+    return slices.back().end - slices[0].start;
 }
 
 void
 Statistics::printStats()
 {
     if (fullFormat) {
-        fprintf(fp, "%12.0f", double(phaseStarts[PHASE_GC] - startupTime) / PRMJ_USEC_PER_MSEC);
-
-        ColumnInfo cols[NUM_COLUMNS];
-        makeTable(cols);
-        for (int i = 0; i < NUM_COLUMNS; i++)
-            fprintf(fp, ", %*s", cols[i].width, cols[i].str);
-        fprintf(fp, "\n");
+        fprintf(fp, "GC(T+%.3fs) %s\n",
+                t(slices[0].start - startupTime) / 1000.0,
+                formatData());
     } else {
         fprintf(fp, "%f %f %f\n",
-                t(PHASE_GC), t(PHASE_MARK), t(PHASE_SWEEP));
+                t(gcDuration()),
+                t(phaseTimes[PHASE_MARK]),
+                t(phaseTimes[PHASE_SWEEP]));
     }
     fflush(fp);
 }
 
 void
+Statistics::beginGC()
+{
+    PodArrayZero(phaseStarts);
+    PodArrayZero(phaseTimes);
+
+    slices.clearAndFree();
+    wasReset = false;
+
+    Probes::GCStart();
+}
+
+void
 Statistics::endGC()
 {
     Probes::GCEnd();
-    endPhase(PHASE_GC);
     crash::SnapshotGCStack();
 
     for (int i = 0; i < PHASE_LIMIT; i++)
-        totals[i] += phaseTimes[i];
+        phaseTotals[i] += phaseTimes[i];
 
     if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback) {
-        (*cb)(JS_TELEMETRY_GC_REASON, triggerReason);
         (*cb)(JS_TELEMETRY_GC_IS_COMPARTMENTAL, compartment ? 1 : 0);
-        (*cb)(JS_TELEMETRY_GC_MS, t(PHASE_GC));
-        (*cb)(JS_TELEMETRY_GC_MARK_MS, t(PHASE_MARK));
-        (*cb)(JS_TELEMETRY_GC_SWEEP_MS, t(PHASE_SWEEP));
-    }
+        (*cb)(JS_TELEMETRY_GC_MS, t(gcDuration()));
+        (*cb)(JS_TELEMETRY_GC_MARK_MS, t(phaseTimes[PHASE_MARK]));
+        (*cb)(JS_TELEMETRY_GC_SWEEP_MS, t(phaseTimes[PHASE_SWEEP]));
+        (*cb)(JS_TELEMETRY_GC_RESET, wasReset);
+        (*cb)(JS_TELEMETRY_GC_INCREMENTAL_DISABLED, !runtime->gcIncrementalEnabled);
 
-    if (JSGCFinishedCallback cb = runtime->gcFinishedCallback) {
-        char buffer[1024];
-        statsToString(buffer, sizeof(buffer));
-        (*cb)(runtime, compartment, buffer);
+        double mmu50 = computeMMU(50 * PRMJ_USEC_PER_MSEC);
+        (*cb)(JS_TELEMETRY_GC_MMU_50, mmu50 * 100);
     }
 
     if (fp)
         printStats();
 
     PodArrayZero(counts);
 }
 
 void
+Statistics::beginSlice(JSCompartment *comp, gcreason::Reason reason)
+{
+    compartment = comp;
+
+    bool first = runtime->gcIncrementalState == gc::NO_INCREMENTAL;
+    if (first)
+        beginGC();
+
+    SliceData data(reason, PRMJ_Now());
+    (void) slices.append(data); /* Ignore any OOMs here. */
+
+    if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback)
+        (*cb)(JS_TELEMETRY_GC_REASON, reason);
+
+    if (GCSliceCallback cb = runtime->gcSliceCallback) {
+        GCDescription desc(NULL, !!compartment);
+        (*cb)(runtime, first ? GC_CYCLE_BEGIN : GC_SLICE_BEGIN, desc);
+    }
+}
+
+void
+Statistics::endSlice()
+{
+    slices.back().end = PRMJ_Now();
+
+    if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback)
+        (*cb)(JS_TELEMETRY_GC_SLICE_MS, t(slices.back().end - slices.back().start));
+
+    bool last = runtime->gcIncrementalState == gc::NO_INCREMENTAL;
+    if (last)
+        endGC();
+
+    if (GCSliceCallback cb = runtime->gcSliceCallback) {
+        if (last)
+            (*cb)(runtime, GC_CYCLE_END, GCDescription(formatData(), !!compartment));
+        else
+            (*cb)(runtime, GC_SLICE_END, GCDescription(NULL, !!compartment));
+    }
+}
+
+void
 Statistics::beginPhase(Phase phase)
 {
     phaseStarts[phase] = PRMJ_Now();
 
     if (phase == gcstats::PHASE_MARK)
         Probes::GCStartMarkPhase();
     else if (phase == gcstats::PHASE_SWEEP)
         Probes::GCStartSweepPhase();
 }
 
 void
 Statistics::endPhase(Phase phase)
 {
-    phaseEnds[phase] = PRMJ_Now();
-    phaseTimes[phase] += phaseEnds[phase] - phaseStarts[phase];
+    int64_t now = PRMJ_Now();
+    int64_t t = now - phaseStarts[phase];
+    slices.back().phaseTimes[phase] += t;
+    phaseTimes[phase] += t;
 
     if (phase == gcstats::PHASE_MARK)
         Probes::GCEndMarkPhase();
     else if (phase == gcstats::PHASE_SWEEP)
         Probes::GCEndSweepPhase();
 }
 
+/*
+ * MMU (minimum mutator utilization) is a measure of how much garbage collection
+ * is affecting the responsiveness of the system. MMU measurements are given
+ * with respect to a certain window size. If we report MMU(50ms) = 80%, then
+ * that means that, for any 50ms window of time, at least 80% of the window is
+ * devoted to the mutator. In other words, the GC is running for at most 20% of
+ * the window, or 10ms. The GC can run multiple slices during the 50ms window
+ * as long as the total time it spends is at most 10ms.
+ */
+double
+Statistics::computeMMU(int64_t window)
+{
+    JS_ASSERT(!slices.empty());
+
+    int64_t gc = slices[0].end - slices[0].start;
+    int64_t gcMax = gc;
+
+    if (gc >= window)
+        return 0.0;
+
+    int startIndex = 0;
+    for (size_t endIndex = 1; endIndex < slices.length(); endIndex++) {
+        gc += slices[endIndex].end - slices[endIndex].start;
+
+        while (slices[endIndex].end - slices[startIndex].end >= window) {
+            gc -= slices[startIndex].end - slices[startIndex].start;
+            startIndex++;
+        }
+
+        int64_t cur = gc;
+        if (slices[endIndex].end - slices[startIndex].start > window)
+            cur -= (slices[endIndex].end - slices[startIndex].start - window);
+        if (cur > gcMax)
+            gcMax = cur;
+    }
+
+    return double(window - gcMax) / window;
+}
+
 } /* namespace gcstats */
 } /* namespace js */
--- a/js/src/gc/Statistics.h
+++ b/js/src/gc/Statistics.h
@@ -47,18 +47,20 @@
 #include "jsutil.h"
 
 struct JSCompartment;
 
 namespace js {
 namespace gcstats {
 
 enum Phase {
-    PHASE_GC,
     PHASE_MARK,
+    PHASE_MARK_ROOTS,
+    PHASE_MARK_DELAYED,
+    PHASE_MARK_OTHER,
     PHASE_SWEEP,
     PHASE_SWEEP_OBJECT,
     PHASE_SWEEP_STRING,
     PHASE_SWEEP_SCRIPT,
     PHASE_SWEEP_SHAPE,
     PHASE_DISCARD_CODE,
     PHASE_DISCARD_ANALYSIS,
     PHASE_XPCONNECT,
@@ -69,76 +71,96 @@ enum Phase {
 
 enum Stat {
     STAT_NEW_CHUNK,
     STAT_DESTROY_CHUNK,
 
     STAT_LIMIT
 };
 
+static const size_t BUFFER_SIZE = 8192;
+
 struct Statistics {
     Statistics(JSRuntime *rt);
     ~Statistics();
 
-    void beginGC(JSCompartment *comp, gcreason::Reason reason);
-    void endGC();
-
     void beginPhase(Phase phase);
     void endPhase(Phase phase);
 
+    void beginSlice(JSCompartment *comp, gcreason::Reason reason);
+    void endSlice();
+
+    void reset() { wasReset = true; }
+
     void count(Stat s) {
         JS_ASSERT(s < STAT_LIMIT);
         counts[s]++;
     }
 
   private:
     JSRuntime *runtime;
 
-    uint64_t startupTime;
+    int64_t startupTime;
 
     FILE *fp;
     bool fullFormat;
 
-    gcreason::Reason triggerReason;
     JSCompartment *compartment;
+    bool wasReset;
+
+    struct SliceData {
+        SliceData(gcreason::Reason reason, int64_t start)
+          : reason(reason), start(start)
+        {
+            PodArrayZero(phaseTimes);
+        }
+
+        gcreason::Reason reason;
+        int64_t start, end;
+        int64_t phaseTimes[PHASE_LIMIT];
 
-    uint64_t phaseStarts[PHASE_LIMIT];
-    uint64_t phaseEnds[PHASE_LIMIT];
-    uint64_t phaseTimes[PHASE_LIMIT];
-    uint64_t totals[PHASE_LIMIT];
+        int64_t duration() const { return end - start; }
+    };
+
+    Vector<SliceData, 8, SystemAllocPolicy> slices;
+
+    /* Most recent time when the given phase started. */
+    int64_t phaseStarts[PHASE_LIMIT];
+
+    /* Total time in a given phase for this GC. */
+    int64_t phaseTimes[PHASE_LIMIT];
+
+    /* Total time in a given phase over all GCs. */
+    int64_t phaseTotals[PHASE_LIMIT];
+
+    /* Number of events of this type for this GC. */
     unsigned int counts[STAT_LIMIT];
 
-    double t(Phase phase);
-    double total(Phase phase);
-    double beginDelay(Phase phase1, Phase phase2);
-    double endDelay(Phase phase1, Phase phase2);
-    void printStats();
-    void statsToString(char *buffer, size_t size);
+    char buffer[BUFFER_SIZE];
+    bool needComma;
+
+    void beginGC();
+    void endGC();
 
-    struct ColumnInfo {
-        const char *title;
-        char str[32];
-        char totalStr[32];
-        int width;
+    int64_t gcDuration();
+    double t(int64_t t);
+    void printStats();
+    void fmt(const char *f, ...);
+    void fmtIfNonzero(const char *name, double t);
+    void formatPhases(int64_t *times);
+    const char *formatData();
 
-        ColumnInfo() {}
-        ColumnInfo(const char *title, double t, double total);
-        ColumnInfo(const char *title, double t);
-        ColumnInfo(const char *title, unsigned int data);
-        ColumnInfo(const char *title, const char *data);
-    };
-
-    void makeTable(ColumnInfo *cols);
+    double computeMMU(int64_t resolution);
 };
 
-struct AutoGC {
-    AutoGC(Statistics &stats, JSCompartment *comp, gcreason::Reason reason
-           JS_GUARD_OBJECT_NOTIFIER_PARAM)
-      : stats(stats) { JS_GUARD_OBJECT_NOTIFIER_INIT; stats.beginGC(comp, reason); }
-    ~AutoGC() { stats.endGC(); }
+struct AutoGCSlice {
+    AutoGCSlice(Statistics &stats, JSCompartment *comp, gcreason::Reason reason
+                JS_GUARD_OBJECT_NOTIFIER_PARAM)
+      : stats(stats) { JS_GUARD_OBJECT_NOTIFIER_INIT; stats.beginSlice(comp, reason); }
+    ~AutoGCSlice() { stats.endSlice(); }
 
     Statistics &stats;
     JS_DECL_USE_GUARD_OBJECT_NOTIFIER
 };
 
 struct AutoPhase {
     AutoPhase(Statistics &stats, Phase phase JS_GUARD_OBJECT_NOTIFIER_PARAM)
       : stats(stats), phase(phase) { JS_GUARD_OBJECT_NOTIFIER_INIT; stats.beginPhase(phase); }
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -718,41 +718,47 @@ JSRuntime::JSRuntime()
 #endif
     gcSystemAvailableChunkListHead(NULL),
     gcUserAvailableChunkListHead(NULL),
     gcKeepAtoms(0),
     gcBytes(0),
     gcMaxBytes(0),
     gcMaxMallocBytes(0),
     gcNumArenasFreeCommitted(0),
-    gcNumber(0),
-    gcIncrementalTracer(NULL),
     gcVerifyData(NULL),
     gcChunkAllocationSinceLastGC(false),
     gcNextFullGCTime(0),
     gcJitReleaseTime(0),
     gcMode(JSGC_MODE_GLOBAL),
     gcIsNeeded(0),
     gcWeakMapList(NULL),
     gcStats(thisFromCtor()),
+    gcNumber(0),
+    gcStartNumber(0),
     gcTriggerReason(gcreason::NO_REASON),
     gcTriggerCompartment(NULL),
     gcCurrentCompartment(NULL),
     gcCheckCompartment(NULL),
+    gcIncrementalState(gc::NO_INCREMENTAL),
+    gcCompartmentCreated(false),
+    gcLastMarkSlice(false),
+    gcInterFrameGC(0),
+    gcSliceBudget(SliceBudget::Unlimited),
+    gcIncrementalEnabled(true),
+    gcIncrementalCompartment(NULL),
     gcPoke(false),
-    gcMarkAndSweep(false),
     gcRunning(false),
 #ifdef JS_GC_ZEAL
     gcZeal_(0),
     gcZealFrequency(0),
     gcNextScheduled(0),
     gcDebugCompartmentGC(false),
 #endif
     gcCallback(NULL),
-    gcFinishedCallback(NULL),
+    gcSliceCallback(NULL),
     gcMallocBytes(0),
     gcBlackRootsTraceOp(NULL),
     gcBlackRootsData(NULL),
     gcGrayRootsTraceOp(NULL),
     gcGrayRootsData(NULL),
     scriptPCCounters(NULL),
     NaNValue(UndefinedValue()),
     negativeInfinityValue(UndefinedValue()),
@@ -809,16 +815,19 @@ JSRuntime::init(uint32_t maxbytes)
 
 #ifdef JS_METHODJIT_SPEW
     JMCheckLogging();
 #endif
 
     if (!js_InitGC(this, maxbytes))
         return false;
 
+    if (!gcMarker.init())
+        return false;
+
     if (!(atomsCompartment = this->new_<JSCompartment>(this)) ||
         !atomsCompartment->init(NULL) ||
         !compartments.append(atomsCompartment)) {
         Foreground::delete_(atomsCompartment);
         return false;
     }
 
     atomsCompartment->isSystemCompartment = true;
@@ -2432,23 +2441,17 @@ JS_SetExtraGCRootsTracer(JSRuntime *rt, 
     AssertNoGC(rt);
     rt->gcBlackRootsTraceOp = traceOp;
     rt->gcBlackRootsData = data;
 }
 
 JS_PUBLIC_API(void)
 JS_TracerInit(JSTracer *trc, JSContext *cx, JSTraceCallback callback)
 {
-    trc->runtime = cx->runtime;
-    trc->context = cx;
-    trc->callback = callback;
-    trc->debugPrinter = NULL;
-    trc->debugPrintArg = NULL;
-    trc->debugPrintIndex = size_t(-1);
-    trc->eagerlyTraceWeakMaps = true;
+    InitTracer(trc, cx->runtime, cx, callback);
 }
 
 JS_PUBLIC_API(void)
 JS_TraceRuntime(JSTracer *trc)
 {
     AssertNoGC(trc->runtime);
     TraceRuntime(trc);
 }
@@ -2870,18 +2873,17 @@ JS_IsGCMarkingTracer(JSTracer *trc)
 JS_PUBLIC_API(void)
 JS_CompartmentGC(JSContext *cx, JSCompartment *comp)
 {
     AssertNoGC(cx);
 
     /* We cannot GC the atoms compartment alone; use a full GC instead. */
     JS_ASSERT(comp != cx->runtime->atomsCompartment);
 
-    js::gc::VerifyBarriers(cx, true);
-    js_GC(cx, comp, GC_NORMAL, gcreason::API);
+    GC(cx, comp, GC_NORMAL, gcreason::API);
 }
 
 JS_PUBLIC_API(void)
 JS_GC(JSContext *cx)
 {
     JS_CompartmentGC(cx, NULL);
 }
 
@@ -2909,38 +2911,41 @@ JS_SetGCCallbackRT(JSRuntime *rt, JSGCCa
     rt->gcCallback = cb;
     return oldcb;
 }
 
 JS_PUBLIC_API(JSBool)
 JS_IsAboutToBeFinalized(void *thing)
 {
     gc::Cell *t = static_cast<gc::Cell *>(thing);
-    JS_ASSERT(!t->compartment()->rt->gcIncrementalTracer);
     return IsAboutToBeFinalized(t);
 }
 
 JS_PUBLIC_API(void)
 JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32_t value)
 {
     switch (key) {
       case JSGC_MAX_BYTES: {
         AutoLockGC lock(rt);
         JS_ASSERT(value >= rt->gcBytes);
         rt->gcMaxBytes = value;
         break;
       }
       case JSGC_MAX_MALLOC_BYTES:
         rt->setGCMaxMallocBytes(value);
         break;
+      case JSGC_SLICE_TIME_BUDGET:
+        rt->gcSliceBudget = SliceBudget::TimeBudget(value);
+        break;
       default:
         JS_ASSERT(key == JSGC_MODE);
         rt->gcMode = JSGCMode(value);
         JS_ASSERT(rt->gcMode == JSGC_MODE_GLOBAL ||
-                  rt->gcMode == JSGC_MODE_COMPARTMENT);
+                  rt->gcMode == JSGC_MODE_COMPARTMENT ||
+                  rt->gcMode == JSGC_MODE_INCREMENTAL);
         return;
     }
 }
 
 JS_PUBLIC_API(uint32_t)
 JS_GetGCParameter(JSRuntime *rt, JSGCParamKey key)
 {
     switch (key) {
@@ -2951,19 +2956,21 @@ JS_GetGCParameter(JSRuntime *rt, JSGCPar
       case JSGC_BYTES:
         return uint32_t(rt->gcBytes);
       case JSGC_MODE:
         return uint32_t(rt->gcMode);
       case JSGC_UNUSED_CHUNKS:
         return uint32_t(rt->gcChunkPool.getEmptyCount());
       case JSGC_TOTAL_CHUNKS:
         return uint32_t(rt->gcChunkSet.count() + rt->gcChunkPool.getEmptyCount());
+      case JSGC_SLICE_TIME_BUDGET:
+        return uint32_t(rt->gcSliceBudget > 0 ? rt->gcSliceBudget / PRMJ_USEC_PER_MSEC : 0);
       default:
         JS_ASSERT(key == JSGC_NUMBER);
-        return rt->gcNumber;
+        return uint32_t(rt->gcNumber);
     }
 }
 
 JS_PUBLIC_API(void)
 JS_SetGCParameterForThread(JSContext *cx, JSGCParamKey key, uint32_t value)
 {
     JS_ASSERT(key == JSGC_MAX_CODE_CACHE_BYTES);
 }
@@ -6604,17 +6611,26 @@ JS_AbortIfWrongThread(JSRuntime *rt)
         MOZ_Assert("rt->onOwnerThread()", __FILE__, __LINE__);
 #endif
 }
 
 #ifdef JS_GC_ZEAL
 JS_PUBLIC_API(void)
 JS_SetGCZeal(JSContext *cx, uint8_t zeal, uint32_t frequency, JSBool compartment)
 {
-    bool schedule = zeal >= js::gc::ZealAllocThreshold && zeal < js::gc::ZealVerifierThreshold;
+#ifdef JS_GC_ZEAL
+    const char *env = getenv("JS_GC_ZEAL");
+    if (env) {
+        zeal = atoi(env);
+        frequency = 1;
+        compartment = false;
+    }
+#endif
+
+    bool schedule = zeal >= js::gc::ZealAllocValue;
     cx->runtime->gcZeal_ = zeal;
     cx->runtime->gcZealFrequency = frequency;
     cx->runtime->gcNextScheduled = schedule ? frequency : 0;
     cx->runtime->gcDebugCompartmentGC = !!compartment;
 }
 
 JS_PUBLIC_API(void)
 JS_ScheduleGC(JSContext *cx, uint32_t count, JSBool compartment)
--- a/js/src/jsapi.h
+++ b/js/src/jsapi.h
@@ -1024,17 +1024,17 @@ class AutoEnumStateRooter : private Auto
     friend void AutoGCRooter::trace(JSTracer *trc);
 
     const Value &state() const { return stateValue; }
     Value *addr() { return &stateValue; }
 
   protected:
     void trace(JSTracer *trc);
 
-    JSObject * const obj;
+    JSObject *obj;
 
   private:
     Value stateValue;
     JS_DECL_USE_GUARD_OBJECT_NOTIFIER
 };
 
 template<class T>
 class AutoVectorRooter : protected AutoGCRooter
@@ -1423,18 +1423,21 @@ typedef enum JSContextOp {
  *                      return true.
  *   Any other value    For future compatibility the callback must do nothing
  *                      and return true in this case.
  */
 typedef JSBool
 (* JSContextCallback)(JSContext *cx, uintN contextOp);
 
 typedef enum JSGCStatus {
+    /* These callbacks happen outside the GC lock. */
     JSGC_BEGIN,
     JSGC_END,
+
+    /* These callbacks happen within the GC lock. */
     JSGC_MARK_END,
     JSGC_FINALIZE_END
 } JSGCStatus;
 
 typedef JSBool
 (* JSGCCallback)(JSContext *cx, JSGCStatus status);
 
 /*
@@ -3285,25 +3288,34 @@ typedef enum JSGCParamKey {
 
     /* Select GC mode. */
     JSGC_MODE = 6,
 
     /* Number of cached empty GC chunks. */
     JSGC_UNUSED_CHUNKS = 7,
 
     /* Total number of allocated GC chunks. */
-    JSGC_TOTAL_CHUNKS = 8
+    JSGC_TOTAL_CHUNKS = 8,
+
+    /* Max milliseconds to spend in an incremental GC slice. */
+    JSGC_SLICE_TIME_BUDGET = 9
 } JSGCParamKey;
 
 typedef enum JSGCMode {
     /* Perform only global GCs. */
     JSGC_MODE_GLOBAL = 0,
 
     /* Perform per-compartment GCs until too much garbage has accumulated. */
-    JSGC_MODE_COMPARTMENT = 1
+    JSGC_MODE_COMPARTMENT = 1,
+
+    /*
+     * Collect in short time slices rather than all at once. Implies
+     * JSGC_MODE_COMPARTMENT.
+     */
+    JSGC_MODE_INCREMENTAL = 2
 } JSGCMode;
 
 extern JS_PUBLIC_API(void)
 JS_SetGCParameter(JSRuntime *rt, JSGCParamKey key, uint32_t value);
 
 extern JS_PUBLIC_API(uint32_t)
 JS_GetGCParameter(JSRuntime *rt, JSGCParamKey key);
 
@@ -3388,16 +3400,18 @@ struct JSClass {
 #define JSCLASS_HAS_PRIVATE             (1<<0)  /* objects have private slot */
 #define JSCLASS_NEW_ENUMERATE           (1<<1)  /* has JSNewEnumerateOp hook */
 #define JSCLASS_NEW_RESOLVE             (1<<2)  /* has JSNewResolveOp hook */
 #define JSCLASS_PRIVATE_IS_NSISUPPORTS  (1<<3)  /* private is (nsISupports *) */
 #define JSCLASS_NEW_RESOLVE_GETS_START  (1<<4)  /* JSNewResolveOp gets starting
                                                    object in prototype chain
                                                    passed in via *objp in/out
                                                    parameter */
+#define JSCLASS_IMPLEMENTS_BARRIERS     (1<<5)  /* Correctly implements GC read
+                                                   and write barriers */
 #define JSCLASS_DOCUMENT_OBSERVER       (1<<6)  /* DOM document observer */
 
 /*
  * To reserve slots fetched and stored via JS_Get/SetReservedSlot, bitwise-or
  * JSCLASS_HAS_RESERVED_SLOTS(n) into the initializer for JSClass.flags, where
  * n is a constant in [1, 255].  Reserved slots are indexed from 0 to n-1.
  */
 #define JSCLASS_RESERVED_SLOTS_SHIFT    8       /* room for 8 flags below */
--- a/js/src/jsatom.cpp
+++ b/js/src/jsatom.cpp
@@ -381,25 +381,30 @@ js_FinishCommonAtoms(JSContext *cx)
 
 void
 js_TraceAtomState(JSTracer *trc)
 {
     JSRuntime *rt = trc->runtime;
     JSAtomState *state = &rt->atomState;
 
     if (rt->gcKeepAtoms) {
-        for (AtomSet::Range r = state->atoms.all(); !r.empty(); r.popFront())
-            MarkStringRoot(trc, r.front().asPtr(), "locked_atom");
+        for (AtomSet::Range r = state->atoms.all(); !r.empty(); r.popFront()) {
+            JSAtom *tmp = r.front().asPtr();
+            MarkStringRoot(trc, &tmp, "locked_atom");
+            JS_ASSERT(tmp == r.front().asPtr());
+        }
     } else {
         for (AtomSet::Range r = state->atoms.all(); !r.empty(); r.popFront()) {
             AtomStateEntry entry = r.front();
             if (!entry.isTagged())
                 continue;
 
-            MarkStringRoot(trc, entry.asPtr(), "interned_atom");
+            JSAtom *tmp = entry.asPtr();
+            MarkStringRoot(trc, &tmp, "interned_atom");
+            JS_ASSERT(tmp == entry.asPtr());
         }
     }
 }
 
 void
 js_SweepAtomState(JSRuntime *rt)
 {
     JSAtomState *state = &rt->atomState;
--- a/js/src/jscntxt.cpp
+++ b/js/src/jscntxt.cpp
@@ -277,20 +277,20 @@ js_DestroyContext(JSContext *cx, JSDestr
             /* Unpin all common atoms before final GC. */
             js_FinishCommonAtoms(cx);
 
             /* Clear debugging state to remove GC roots. */
             for (CompartmentsIter c(rt); !c.done(); c.next())
                 c->clearTraps(cx);
             JS_ClearAllWatchPoints(cx);
 
-            js_GC(cx, NULL, GC_NORMAL, gcreason::LAST_CONTEXT);
+            GC(cx, NULL, GC_NORMAL, gcreason::LAST_CONTEXT);
 
         } else if (mode == JSDCM_FORCE_GC) {
-            js_GC(cx, NULL, GC_NORMAL, gcreason::DESTROY_CONTEXT);
+            GC(cx, NULL, GC_NORMAL, gcreason::DESTROY_CONTEXT);
         } else if (mode == JSDCM_MAYBE_GC) {
             JS_MaybeGC(cx);
         }
         JS_LOCK_GC(rt);
     }
 #ifdef JS_THREADSAFE
     rt->gcHelperThread.waitBackgroundSweepEnd();
 #endif
@@ -870,17 +870,17 @@ js_InvokeOperationCallback(JSContext *cx
     /*
      * Reset the callback counter first, then run GC and yield. If another
      * thread is racing us here we will accumulate another callback request
      * which will be serviced at the next opportunity.
      */
     JS_ATOMIC_SET(&rt->interrupt, 0);
 
     if (rt->gcIsNeeded)
-        js_GC(cx, rt->gcTriggerCompartment, GC_NORMAL, rt->gcTriggerReason);
+        GCSlice(cx, rt->gcTriggerCompartment, GC_NORMAL, rt->gcTriggerReason);
 
 #ifdef JS_THREADSAFE
     /*
      * We automatically yield the current context every time the operation
      * callback is hit since we might be called as a result of an impending
      * GC on another thread, which would deadlock if we do not yield.
      * Operation callbacks are supposed to happen rarely (seconds, not
      * milliseconds) so it is acceptable to yield at every callback.
@@ -1273,17 +1273,17 @@ JSContext::sizeOfIncludingThis(JSMallocS
 
 void
 JSContext::mark(JSTracer *trc)
 {
     /* Stack frames and slots are traced by StackSpace::mark. */
 
     /* Mark other roots-by-definition in the JSContext. */
     if (globalObject && !hasRunOption(JSOPTION_UNROOTED_GLOBAL))
-        MarkObjectRoot(trc, globalObject, "global object");
+        MarkObjectRoot(trc, &globalObject, "global object");
     if (isExceptionPending())
         MarkValueRoot(trc, &exception, "exception");
 
     if (autoGCRooters)
         autoGCRooters->traceAll(trc);
 
     if (sharpObjectMap.depth > 0)
         js_TraceSharpMap(trc, &sharpObjectMap);
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -299,103 +299,149 @@ struct JSRuntime : js::RuntimeFriendFiel
     size_t              gcMaxMallocBytes;
 
     /*
      * Number of the committed arenas in all GC chunks including empty chunks.
      * The counter is volatile as it is read without the GC lock, see comments
      * in MaybeGC.
      */
     volatile uint32_t   gcNumArenasFreeCommitted;
-    uint32_t            gcNumber;
-    js::GCMarker        *gcIncrementalTracer;
+    js::FullGCMarker    gcMarker;
     void                *gcVerifyData;
     bool                gcChunkAllocationSinceLastGC;
     int64_t             gcNextFullGCTime;
     int64_t             gcJitReleaseTime;
     JSGCMode            gcMode;
-    volatile uintptr_t  gcBarrierFailed;
     volatile uintptr_t  gcIsNeeded;
     js::WeakMapBase     *gcWeakMapList;
     js::gcstats::Statistics gcStats;
 
+    /* Incremented on every GC slice. */
+    uint64_t            gcNumber;
+
+    /* The gcNumber at the time of the most recent GC's first slice. */
+    uint64_t            gcStartNumber;
+
     /* The reason that an interrupt-triggered GC should be called. */
     js::gcreason::Reason gcTriggerReason;
 
-    /* Pre-allocated space for the GC mark stack. */
-    uintptr_t           gcMarkStackArray[js::MARK_STACK_LENGTH];
-
     /*
      * Compartment that triggered GC. If more than one Compatment need GC,
      * gcTriggerCompartment is reset to NULL and a global GC is performed.
      */
     JSCompartment       *gcTriggerCompartment;
 
     /* Compartment that is currently involved in per-compartment GC */
     JSCompartment       *gcCurrentCompartment;
 
     /*
      * If this is non-NULL, all marked objects must belong to this compartment.
      * This is used to look for compartment bugs.
      */
     JSCompartment       *gcCheckCompartment;
 
     /*
+     * The current incremental GC phase. During non-incremental GC, this is
+     * always NO_INCREMENTAL.
+     */
+    js::gc::State       gcIncrementalState;
+
+    /* Indicates that a new compartment was created during incremental GC. */
+    bool                gcCompartmentCreated;
+
+    /* Indicates that the last incremental slice exhausted the mark stack. */
+    bool                gcLastMarkSlice;
+
+    /*
+     * Indicates that a GC slice has taken place in the middle of an animation
+     * frame, rather than at the beginning. In this case, the next slice will be
+     * delayed so that we don't get back-to-back slices.
+     */
+    volatile uintptr_t  gcInterFrameGC;
+
+    /* Default budget for incremental GC slice. See SliceBudget in jsgc.h. */
+    int64_t             gcSliceBudget;
+
+    /*
+     * We disable incremental GC if we encounter a js::Class with a trace hook
+     * that does not implement write barriers.
+     */
+    bool                gcIncrementalEnabled;
+
+    /* Compartment that is undergoing an incremental GC. */
+    JSCompartment       *gcIncrementalCompartment;
+
+    /*
+     * We save all conservative scanned roots in this vector so that
+     * conservative scanning can be "replayed" deterministically. In DEBUG mode,
+     * this allows us to run a non-incremental GC after every incremental GC to
+     * ensure that no objects were missed.
+     */
+#ifdef DEBUG
+    struct SavedGCRoot {
+        void *thing;
+        JSGCTraceKind kind;
+
+        SavedGCRoot(void *thing, JSGCTraceKind kind) : thing(thing), kind(kind) {}
+    };
+    js::Vector<SavedGCRoot, 0, js::SystemAllocPolicy> gcSavedRoots;
+#endif
+
+    /*
      * We can pack these flags as only the GC thread writes to them. Atomic
      * updates to packed bytes are not guaranteed, so stores issued by one
      * thread may be lost due to unsynchronized read-modify-write cycles on
      * other threads.
      */
     bool                gcPoke;
-    bool                gcMarkAndSweep;
     bool                gcRunning;
 
     /*
      * These options control the zealousness of the GC. The fundamental values
      * are gcNextScheduled and gcDebugCompartmentGC. At every allocation,
      * gcNextScheduled is decremented. When it reaches zero, we do either a
      * full or a compartmental GC, based on gcDebugCompartmentGC.
      *
-     * At this point, if gcZeal_ >= 2 then gcNextScheduled is reset to the
+     * At this point, if gcZeal_ == 2 then gcNextScheduled is reset to the
      * value of gcZealFrequency. Otherwise, no additional GCs take place.
      *
      * You can control these values in several ways:
      *   - Pass the -Z flag to the shell (see the usage info for details)
      *   - Call gczeal() or schedulegc() from inside shell-executed JS code
      *     (see the help for details)
      *
-     * Additionally, if gzZeal_ == 1 then we perform GCs in select places
-     * (during MaybeGC and whenever a GC poke happens). This option is mainly
-     * useful to embedders.
+     * If gzZeal_ == 1 then we perform GCs in select places (during MaybeGC and
+     * whenever a GC poke happens). This option is mainly useful to embedders.
      *
      * We use gcZeal_ == 4 to enable write barrier verification. See the comment
      * in jsgc.cpp for more information about this.
      */
 #ifdef JS_GC_ZEAL
     int                 gcZeal_;
     int                 gcZealFrequency;
     int                 gcNextScheduled;
     bool                gcDebugCompartmentGC;
 
     int gcZeal() { return gcZeal_; }
 
     bool needZealousGC() {
         if (gcNextScheduled > 0 && --gcNextScheduled == 0) {
-            if (gcZeal() >= js::gc::ZealAllocThreshold && gcZeal() < js::gc::ZealVerifierThreshold)
+            if (gcZeal() == js::gc::ZealAllocValue)
                 gcNextScheduled = gcZealFrequency;
             return true;
         }
         return false;
     }
 #else
     int gcZeal() { return 0; }
     bool needZealousGC() { return false; }
 #endif
 
     JSGCCallback        gcCallback;
-    JSGCFinishedCallback gcFinishedCallback;
+    js::GCSliceCallback gcSliceCallback;
 
   private:
     /*
      * Malloc counter to measure memory pressure for GC scheduling. It runs
      * from gcMaxMallocBytes down to zero.
      */
     volatile ptrdiff_t  gcMallocBytes;
 
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -68,17 +68,16 @@
 using namespace mozilla;
 using namespace js;
 using namespace js::gc;
 
 JSCompartment::JSCompartment(JSRuntime *rt)
   : rt(rt),
     principals(NULL),
     needsBarrier_(false),
-    gcIncrementalTracer(NULL),
     gcBytes(0),
     gcTriggerBytes(0),
     gcLastBytes(0),
     hold(false),
     typeLifoAlloc(TYPE_LIFO_ALLOC_PRIMARY_CHUNK_SIZE),
     data(NULL),
     active(false),
     hasDebugModeCodeToDrop(false),
@@ -123,16 +122,19 @@ JSCompartment::init(JSContext *cx)
         return false;
 
     if (!regExps.init(cx))
         return false;
 
     if (!scriptFilenameTable.init())
         return false;
 
+    if (!barrierMarker_.init())
+        return false;
+
     return debuggees.init();
 }
 
 #ifdef JS_METHODJIT
 bool
 JSCompartment::ensureJaegerCompartmentExists(JSContext *cx)
 {
     if (jaegerCompartment_)
@@ -430,31 +432,61 @@ JSCompartment::markTypes(JSTracer *trc)
      * Mark all scripts, type objects and singleton JS objects in the
      * compartment. These can be referred to directly by type sets, which we
      * cannot modify while code which depends on these type sets is active.
      */
     JS_ASSERT(activeAnalysis);
 
     for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
         JSScript *script = i.get<JSScript>();
-        MarkScriptRoot(trc, script, "mark_types_script");
+        MarkScriptRoot(trc, &script, "mark_types_script");
+        JS_ASSERT(script == i.get<JSScript>());
     }
 
     for (size_t thingKind = FINALIZE_OBJECT0;
          thingKind < FINALIZE_OBJECT_LIMIT;
          thingKind++) {
         for (CellIterUnderGC i(this, AllocKind(thingKind)); !i.done(); i.next()) {
             JSObject *object = i.get<JSObject>();
-            if (object->hasSingletonType())
-                MarkObjectRoot(trc, object, "mark_types_singleton");
+            if (object->hasSingletonType()) {
+                MarkObjectRoot(trc, &object, "mark_types_singleton");
+                JS_ASSERT(object == i.get<JSObject>());
+            }
         }
     }
 
-    for (CellIterUnderGC i(this, FINALIZE_TYPE_OBJECT); !i.done(); i.next())
-        MarkTypeObjectRoot(trc, i.get<types::TypeObject>(), "mark_types_scan");
+    for (CellIterUnderGC i(this, FINALIZE_TYPE_OBJECT); !i.done(); i.next()) {
+        types::TypeObject *type = i.get<types::TypeObject>();
+        MarkTypeObjectRoot(trc, &type, "mark_types_scan");
+        JS_ASSERT(type == i.get<types::TypeObject>());
+    }
+}
+
+void
+JSCompartment::discardJitCode(JSContext *cx)
+{
+    /*
+     * Kick all frames on the stack into the interpreter, and release all JIT
+     * code in the compartment.
+     */
+#ifdef JS_METHODJIT
+    mjit::ClearAllFrames(this);
+
+    for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
+        JSScript *script = i.get<JSScript>();
+        mjit::ReleaseScriptCode(cx, script);
+
+        /*
+         * Use counts for scripts are reset on GC. After discarding code we
+         * need to let it warm back up to get information like which opcodes
+         * are setting array holes or accessing getter properties.
+         */
+        script->resetUseCount();
+    }
+#endif
 }
 
 void
 JSCompartment::sweep(JSContext *cx, bool releaseTypes)
 {
     /* Remove dead wrappers from the table. */
     for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
         JS_ASSERT_IF(IsAboutToBeFinalized(e.front().key) &&
@@ -463,50 +495,33 @@ JSCompartment::sweep(JSContext *cx, bool
         if (IsAboutToBeFinalized(e.front().key) ||
             IsAboutToBeFinalized(e.front().value)) {
             e.removeFront();
         }
     }
 
     /* Remove dead references held weakly by the compartment. */
 
+    regExps.sweep(rt);
+
     sweepBaseShapeTable(cx);
     sweepInitialShapeTable(cx);
     sweepNewTypeObjectTable(cx, newTypeObjects);
     sweepNewTypeObjectTable(cx, lazyTypeObjects);
 
     if (emptyTypeObject && IsAboutToBeFinalized(emptyTypeObject))
         emptyTypeObject = NULL;
 
     newObjectCache.reset();
 
     sweepBreakpoints(cx);
 
     {
         gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_DISCARD_CODE);
-
-        /*
-         * Kick all frames on the stack into the interpreter, and release all JIT
-         * code in the compartment.
-         */
-#ifdef JS_METHODJIT
-        mjit::ClearAllFrames(this);
-
-        for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
-            JSScript *script = i.get<JSScript>();
-            mjit::ReleaseScriptCode(cx, script);
-
-            /*
-             * Use counts for scripts are reset on GC. After discarding code we
-             * need to let it warm back up to get information like which opcodes
-             * are setting array holes or accessing getter properties.
-             */
-            script->resetUseCount();
-        }
-#endif
+        discardJitCode(cx);
     }
 
     if (!activeAnalysis) {
         gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_DISCARD_ANALYSIS);
 
         /*
          * Clear the analysis pool, but don't release its data yet. While
          * sweeping types any live data will be allocated into the pool.
@@ -550,18 +565,16 @@ JSCompartment::sweep(JSContext *cx, bool
     }
 
     active = false;
 }
 
 void
 JSCompartment::purge(JSContext *cx)
 {
-    arenas.purge();
-    regExps.purge();
     dtoaCache.purge();
 
     /*
      * Clear the hash and reset all evalHashLink to null before the GC. This
      * way MarkChildren(trc, JSScript *) can assume that JSScript::u.object is
      * not null when we have script owned by an object and not from the eval
      * cache.
      */
@@ -765,23 +778,16 @@ JSCompartment::sweepBreakpoints(JSContex
                 nextbp = bp->nextInSite();
                 if (scriptGone || IsAboutToBeFinalized(bp->debugger->toJSObject()))
                     bp->destroy(cx);
             }
         }
     }
 }
 
-GCMarker *
-JSCompartment::createBarrierTracer()
-{
-    JS_ASSERT(!gcIncrementalTracer);
-    return NULL;
-}
-
 size_t
 JSCompartment::sizeOfShapeTable(JSMallocSizeOfFun mallocSizeOf)
 {
     return baseShapes.sizeOfExcludingThis(mallocSizeOf)
          + initialShapes.sizeOfExcludingThis(mallocSizeOf)
          + newTypeObjects.sizeOfExcludingThis(mallocSizeOf)
          + lazyTypeObjects.sizeOfExcludingThis(mallocSizeOf);
 }
--- a/js/src/jscompartment.h
+++ b/js/src/jscompartment.h
@@ -41,17 +41,16 @@
 #define jscompartment_h___
 
 #include "mozilla/Attributes.h"
 
 #include "jsclist.h"
 #include "jscntxt.h"
 #include "jsfun.h"
 #include "jsgc.h"
-#include "jsgcstats.h"
 #include "jsobj.h"
 #include "jsscope.h"
 #include "vm/GlobalObject.h"
 #include "vm/RegExpObject.h"
 
 #ifdef _MSC_VER
 #pragma warning(push)
 #pragma warning(disable:4251) /* Silence warning about JS_FRIEND_API and data members. */
@@ -158,41 +157,56 @@ struct ScriptFilenameHasher
         return strcmp(e->filename, l) == 0;
     }
 };
 
 typedef HashSet<ScriptFilenameEntry *,
                 ScriptFilenameHasher,
                 SystemAllocPolicy> ScriptFilenameTable;
 
+/* If HashNumber grows, need to change WrapperHasher. */
+JS_STATIC_ASSERT(sizeof(HashNumber) == 4);
+
+struct WrapperHasher
+{
+    typedef Value Lookup;
+
+    static HashNumber hash(Value key) {
+        uint64_t bits = JSVAL_TO_IMPL(key).asBits;
+        return uint32_t(bits) ^ uint32_t(bits >> 32);
+    }
+
+    static bool match(const Value &l, const Value &k) { return l == k; }
+};
+
+typedef HashMap<Value, ReadBarrieredValue, WrapperHasher, SystemAllocPolicy> WrapperMap;
+
 } /* namespace js */
 
 namespace JS {
 struct TypeInferenceSizes;
 }
 
 struct JSCompartment
 {
     JSRuntime                    *rt;
     JSPrincipals                 *principals;
 
     js::gc::ArenaLists           arenas;
 
     bool                         needsBarrier_;
-    js::GCMarker                 *gcIncrementalTracer;
+    js::BarrierGCMarker          barrierMarker_;
 
     bool needsBarrier() {
         return needsBarrier_;
     }
 
     js::GCMarker *barrierTracer() {
         JS_ASSERT(needsBarrier_);
-        if (gcIncrementalTracer)
-            return gcIncrementalTracer;
-        return createBarrierTracer();
+        return &barrierMarker_;
     }
 
     size_t                       gcBytes;
     size_t                       gcTriggerBytes;
     size_t                       gcLastBytes;
     size_t                       gcMaxMallocBytes;
 
     bool                         hold;
@@ -320,20 +334,21 @@ struct JSCompartment
     bool wrap(JSContext *cx, JSObject **objp);
     bool wrapId(JSContext *cx, jsid *idp);
     bool wrap(JSContext *cx, js::PropertyOp *op);
     bool wrap(JSContext *cx, js::StrictPropertyOp *op);
     bool wrap(JSContext *cx, js::PropertyDescriptor *desc);
     bool wrap(JSContext *cx, js::AutoIdVector &props);
 
     void markTypes(JSTracer *trc);
+    void discardJitCode(JSContext *cx);
     void sweep(JSContext *cx, bool releaseTypes);
     void purge(JSContext *cx);
 
-    void setGCLastBytes(size_t lastBytes, JSGCInvocationKind gckind);
+    void setGCLastBytes(size_t lastBytes, js::JSGCInvocationKind gckind);
     void reduceGCTriggerBytes(size_t amount);
     
     void resetGCMallocBytes();
     void setGCMaxMallocBytes(size_t value);
     void updateMallocCounter(size_t nbytes) {
         ptrdiff_t oldCount = gcMallocBytes;
         ptrdiff_t newCount = oldCount - ptrdiff_t(nbytes);
         gcMallocBytes = newCount;
@@ -392,18 +407,16 @@ struct JSCompartment
     bool setDebugModeFromC(JSContext *cx, bool b);
 
     void clearBreakpointsIn(JSContext *cx, js::Debugger *dbg, JSObject *handler);
     void clearTraps(JSContext *cx);
 
   private:
     void sweepBreakpoints(JSContext *cx);
 
-    js::GCMarker *createBarrierTracer();
-
   public:
     js::WatchpointMap *watchpointMap;
 };
 
 #define JS_PROPERTY_TREE(cx)    ((cx)->compartment->propertyTree)
 
 namespace js {
 static inline MathCache *
--- a/js/src/jsexn.cpp
+++ b/js/src/jsexn.cpp
@@ -89,17 +89,17 @@ static void
 exn_finalize(JSContext *cx, JSObject *obj);
 
 static JSBool
 exn_resolve(JSContext *cx, JSObject *obj, jsid id, uintN flags,
             JSObject **objp);
 
 Class js::ErrorClass = {
     js_Error_str,
-    JSCLASS_HAS_PRIVATE | JSCLASS_NEW_RESOLVE |
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_NEW_RESOLVE |
     JSCLASS_HAS_CACHED_PROTO(JSProto_Error),
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
     (JSResolveOp)exn_resolve,
     JS_ConvertStub,
@@ -414,24 +414,24 @@ exn_trace(JSTracer *trc, JSObject *obj)
     JSExnPrivate *priv;
     JSStackTraceElem *elem;
     size_t vcount, i;
     HeapValue *vp;
 
     priv = GetExnPrivate(obj);
     if (priv) {
         if (priv->message)
-            MarkString(trc, priv->message, "exception message");
+            MarkString(trc, &priv->message, "exception message");
         if (priv->filename)
-            MarkString(trc, priv->filename, "exception filename");
+            MarkString(trc, &priv->filename, "exception filename");
 
         elem = priv->stackElems;
         for (vcount = i = 0; i != priv->stackDepth; ++i, ++elem) {
             if (elem->funName)
-                MarkString(trc, elem->funName, "stack trace function name");
+                MarkString(trc, &elem->funName, "stack trace function name");
             if (IS_GC_MARKING_TRACER(trc) && elem->filename)
                 js_MarkScriptFilename(elem->filename);
             vcount += elem->argc;
         }
         vp = GetStackTraceValueBuffer(priv);
         for (i = 0; i != vcount; ++i, ++vp)
             MarkValue(trc, vp, "stack trace argument");
     }
--- a/js/src/jsfriendapi.cpp
+++ b/js/src/jsfriendapi.cpp
@@ -127,32 +127,38 @@ JS_NewObjectWithUniqueType(JSContext *cx
     if (!obj || !obj->setSingletonType(cx))
         return NULL;
     return obj;
 }
 
 JS_FRIEND_API(void)
 js::GCForReason(JSContext *cx, gcreason::Reason reason)
 {
-    js_GC(cx, NULL, GC_NORMAL, reason);
+    GC(cx, NULL, GC_NORMAL, reason);
 }
 
 JS_FRIEND_API(void)
 js::CompartmentGCForReason(JSContext *cx, JSCompartment *comp, gcreason::Reason reason)
 {
     /* We cannot GC the atoms compartment alone; use a full GC instead. */
     JS_ASSERT(comp != cx->runtime->atomsCompartment);
 
-    js_GC(cx, comp, GC_NORMAL, reason);
+    GC(cx, comp, GC_NORMAL, reason);
 }
 
 JS_FRIEND_API(void)
 js::ShrinkingGC(JSContext *cx, gcreason::Reason reason)
 {
-    js_GC(cx, NULL, GC_SHRINK, reason);
+    GC(cx, NULL, GC_SHRINK, reason);
+}
+
+JS_FRIEND_API(void)
+js::IncrementalGC(JSContext *cx, gcreason::Reason reason)
+{
+    GCSlice(cx, NULL, GC_NORMAL, reason);
 }
 
 JS_FRIEND_API(void)
 JS_ShrinkGCBuffers(JSRuntime *rt)
 {
     ShrinkGCBuffers(rt);
 }
 
@@ -166,17 +172,17 @@ JS_FRIEND_API(JSBool)
 JS_WrapPropertyDescriptor(JSContext *cx, js::PropertyDescriptor *desc)
 {
     return cx->compartment->wrap(cx, desc);
 }
 
 JS_FRIEND_API(void)
 JS_TraceShapeCycleCollectorChildren(JSTracer *trc, void *shape)
 {
-    MarkCycleCollectorChildren(trc, (const Shape *)shape);
+    MarkCycleCollectorChildren(trc, (Shape *)shape);
 }
 
 AutoPreserveCompartment::AutoPreserveCompartment(JSContext *cx
                                                  JS_GUARD_OBJECT_NOTIFIER_PARAM_NO_INIT)
   : cx(cx), oldCompartment(cx->compartment)
 {
     JS_GUARD_OBJECT_NOTIFIER_INIT;
 }
@@ -396,22 +402,16 @@ js::GCThingIsMarkedGray(void *thing)
 }
 
 JS_FRIEND_API(void)
 JS_SetAccumulateTelemetryCallback(JSRuntime *rt, JSAccumulateTelemetryDataCallback callback)
 {
     rt->telemetryCallback = callback;
 }
 
-JS_FRIEND_API(void)
-JS_SetGCFinishedCallback(JSRuntime *rt, JSGCFinishedCallback callback)
-{
-    rt->gcFinishedCallback = callback;
-}
-
 #ifdef DEBUG
 JS_FRIEND_API(void)
 js_DumpString(JSString *str)
 {
     str->dump();
 }
 
 JS_FRIEND_API(void)
@@ -546,49 +546,16 @@ js::DumpHeapComplete(JSContext *cx, FILE
 
     dtrc.visited.finish();
 }
 
 #endif
 
 namespace js {
 
-JS_FRIEND_API(bool)
-IsIncrementalBarrierNeeded(JSRuntime *rt)
-{
-    return !!rt->gcIncrementalTracer && !rt->gcRunning;
-}
-
-JS_FRIEND_API(bool)
-IsIncrementalBarrierNeeded(JSContext *cx)
-{
-    return IsIncrementalBarrierNeeded(cx->runtime);
-}
-
-extern JS_FRIEND_API(void)
-IncrementalReferenceBarrier(void *ptr)
-{
-    if (!ptr)
-        return;
-    JS_ASSERT(!static_cast<gc::Cell *>(ptr)->compartment()->rt->gcRunning);
-    uint32_t kind = gc::GetGCThingTraceKind(ptr);
-    if (kind == JSTRACE_OBJECT)
-        JSObject::writeBarrierPre((JSObject *) ptr);
-    else if (kind == JSTRACE_STRING)
-        JSString::writeBarrierPre((JSString *) ptr);
-    else
-        JS_NOT_REACHED("invalid trace kind");
-}
-
-extern JS_FRIEND_API(void)
-IncrementalValueBarrier(const Value &v)
-{
-    HeapValue::writeBarrierPre(v);
-}
-
 /* static */ void
 AutoLockGC::LockGC(JSRuntime *rt)
 {
     JS_ASSERT(rt);
     JS_LOCK_GC(rt);
 }
 
 /* static */ void
@@ -714,9 +681,95 @@ GetRuntimeCompartments(JSRuntime *rt)
 }
 
 JS_FRIEND_API(size_t)
 SizeOfJSContext()
 {
     return sizeof(JSContext);
 }
 
+JS_FRIEND_API(GCSliceCallback)
+SetGCSliceCallback(JSRuntime *rt, GCSliceCallback callback)
+{
+    GCSliceCallback old = rt->gcSliceCallback;
+    rt->gcSliceCallback = callback;
+    return old;
+}
+
+JS_FRIEND_API(bool)
+WantGCSlice(JSRuntime *rt)
+{
+    if (rt->gcZeal() == gc::ZealFrameVerifierValue || rt->gcZeal() == gc::ZealFrameGCValue)
+        return true;
+
+    if (rt->gcIncrementalState != gc::NO_INCREMENTAL)
+        return true;
+
+    return false;
+}
+
+JS_FRIEND_API(void)
+NotifyDidPaint(JSContext *cx)
+{
+    JSRuntime *rt = cx->runtime;
+
+    if (rt->gcZeal() == gc::ZealFrameVerifierValue) {
+        gc::VerifyBarriers(cx);
+        return;
+    }
+
+    if (rt->gcZeal() == gc::ZealFrameGCValue) {
+        GCSlice(cx, NULL, GC_NORMAL, gcreason::REFRESH_FRAME);
+        return;
+    }
+
+    if (rt->gcIncrementalState != gc::NO_INCREMENTAL && !rt->gcInterFrameGC)
+        GCSlice(cx, rt->gcIncrementalCompartment, GC_NORMAL, gcreason::REFRESH_FRAME);
+
+    rt->gcInterFrameGC = false;
+}
+
+extern JS_FRIEND_API(bool)
+IsIncrementalGCEnabled(JSRuntime *rt)
+{
+    return rt->gcIncrementalEnabled;
+}
+
+JS_FRIEND_API(bool)
+IsIncrementalBarrierNeeded(JSRuntime *rt)
+{
+    return (rt->gcIncrementalState == gc::MARK && !rt->gcRunning);
+}
+
+JS_FRIEND_API(bool)
+IsIncrementalBarrierNeeded(JSContext *cx)
+{
+    return IsIncrementalBarrierNeeded(cx->runtime);
+}
+
+JS_FRIEND_API(bool)
+IsIncrementalBarrierNeededOnObject(JSObject *obj)
+{
+    return obj->compartment()->needsBarrier();
+}
+
+extern JS_FRIEND_API(void)
+IncrementalReferenceBarrier(void *ptr)
+{
+    if (!ptr)
+        return;
+    JS_ASSERT(!static_cast<gc::Cell *>(ptr)->compartment()->rt->gcRunning);
+    uint32_t kind = gc::GetGCThingTraceKind(ptr);
+    if (kind == JSTRACE_OBJECT)
+        JSObject::writeBarrierPre((JSObject *) ptr);
+    else if (kind == JSTRACE_STRING)
+        JSString::writeBarrierPre((JSString *) ptr);
+    else
+        JS_NOT_REACHED("invalid trace kind");
+}
+
+extern JS_FRIEND_API(void)
+IncrementalValueBarrier(const Value &v)
+{
+    HeapValue::writeBarrierPre(v);
+}
+
 } // namespace js
--- a/js/src/jsfriendapi.h
+++ b/js/src/jsfriendapi.h
@@ -95,31 +95,29 @@ JS_NondeterministicGetWeakMapKeys(JSCont
 extern JS_FRIEND_API(void)
 JS_TraceShapeCycleCollectorChildren(JSTracer *trc, void *shape);
 
 enum {
     JS_TELEMETRY_GC_REASON,
     JS_TELEMETRY_GC_IS_COMPARTMENTAL,
     JS_TELEMETRY_GC_MS,
     JS_TELEMETRY_GC_MARK_MS,
-    JS_TELEMETRY_GC_SWEEP_MS
+    JS_TELEMETRY_GC_SWEEP_MS,
+    JS_TELEMETRY_GC_SLICE_MS,
+    JS_TELEMETRY_GC_MMU_50,
+    JS_TELEMETRY_GC_RESET,
+    JS_TELEMETRY_GC_INCREMENTAL_DISABLED
 };
 
 typedef void
 (* JSAccumulateTelemetryDataCallback)(int id, uint32_t sample);
 
 extern JS_FRIEND_API(void)
 JS_SetAccumulateTelemetryCallback(JSRuntime *rt, JSAccumulateTelemetryDataCallback callback);
 
-typedef void
-(* JSGCFinishedCallback)(JSRuntime *rt, JSCompartment *comp, const char *description);
-
-extern JS_FRIEND_API(void)
-JS_SetGCFinishedCallback(JSRuntime *rt, JSGCFinishedCallback callback);
-
 extern JS_FRIEND_API(JSPrincipals *)
 JS_GetCompartmentPrincipals(JSCompartment *compartment);
 
 /* Safe to call with input obj == NULL. Returns non-NULL iff obj != NULL. */
 extern JS_FRIEND_API(JSObject *)
 JS_ObjectToInnerObject(JSContext *cx, JSObject *obj);
 
 /* Requires obj != NULL. */
@@ -698,22 +696,75 @@ extern JS_FRIEND_API(void)
 GCForReason(JSContext *cx, gcreason::Reason reason);
 
 extern JS_FRIEND_API(void)
 CompartmentGCForReason(JSContext *cx, JSCompartment *comp, gcreason::Reason reason);
 
 extern JS_FRIEND_API(void)
 ShrinkingGC(JSContext *cx, gcreason::Reason reason);
 
+extern JS_FRIEND_API(void)
+IncrementalGC(JSContext *cx, gcreason::Reason reason);
+
+extern JS_FRIEND_API(void)
+SetGCSliceTimeBudget(JSContext *cx, int64_t millis);
+
+enum GCProgress {
+    /*
+     * During non-incremental GC, the GC is bracketed by JSGC_CYCLE_BEGIN/END
+     * callbacks. During an incremental GC, the sequence of callbacks is as
+     * follows:
+     *   JSGC_CYCLE_BEGIN, JSGC_SLICE_END  (first slice)
+     *   JSGC_SLICE_BEGIN, JSGC_SLICE_END  (second slice)
+     *   ...
+     *   JSGC_SLICE_BEGIN, JSGC_CYCLE_END  (last slice)
+     */
+
+    GC_CYCLE_BEGIN,
+    GC_SLICE_BEGIN,
+    GC_SLICE_END,
+    GC_CYCLE_END
+};
+
+struct GCDescription {
+    const char *logMessage;
+    bool isCompartment;
+
+    GCDescription(const char *msg, bool isCompartment)
+      : logMessage(msg), isCompartment(isCompartment) {}
+};
+
+typedef void
+(* GCSliceCallback)(JSRuntime *rt, GCProgress progress, const GCDescription &desc);
+
+extern JS_FRIEND_API(GCSliceCallback)
+SetGCSliceCallback(JSRuntime *rt, GCSliceCallback callback);
+
+extern JS_FRIEND_API(bool)
+WantGCSlice(JSRuntime *rt);
+
+/*
+ * Signals a good place to do an incremental slice, because the browser is
+ * drawing a frame.
+ */
+extern JS_FRIEND_API(void)
+NotifyDidPaint(JSContext *cx);
+
+extern JS_FRIEND_API(bool)
+IsIncrementalGCEnabled(JSRuntime *rt);
+
 extern JS_FRIEND_API(bool)
 IsIncrementalBarrierNeeded(JSRuntime *rt);
 
 extern JS_FRIEND_API(bool)
 IsIncrementalBarrierNeeded(JSContext *cx);
 
+extern JS_FRIEND_API(bool)
+IsIncrementalBarrierNeededOnObject(JSObject *obj);
+
 extern JS_FRIEND_API(void)
 IncrementalReferenceBarrier(void *ptr);
 
 extern JS_FRIEND_API(void)
 IncrementalValueBarrier(const Value &v);
 
 class ObjectPtr
 {
--- a/js/src/jsfun.cpp
+++ b/js/src/jsfun.cpp
@@ -534,29 +534,29 @@ args_trace(JSTracer *trc, JSObject *obj)
      * invocation.  To distinguish the two cases (which imply different access
      * paths to the generator object), we use the JSFRAME_FLOATING_GENERATOR
      * flag, which is only set on the StackFrame kept in the generator object's
      * JSGenerator.
      */
 #if JS_HAS_GENERATORS
     StackFrame *fp = argsobj.maybeStackFrame();
     if (fp && fp->isFloatingGenerator())
-        MarkObject(trc, js_FloatingFrameToGenerator(fp)->obj, "generator object");
+        MarkObject(trc, &js_FloatingFrameToGenerator(fp)->obj, "generator object");
 #endif
 }
 
 /*
  * The classes below collaborate to lazily reflect and synchronize actual
  * argument values, argument count, and callee function object stored in a
  * StackFrame with their corresponding property values in the frame's
  * arguments object.
  */
 Class js::NormalArgumentsObjectClass = {
     "Arguments",
-    JSCLASS_NEW_RESOLVE |
+    JSCLASS_NEW_RESOLVE | JSCLASS_IMPLEMENTS_BARRIERS |
     JSCLASS_HAS_RESERVED_SLOTS(NormalArgumentsObject::RESERVED_SLOTS) |
     JSCLASS_HAS_CACHED_PROTO(JSProto_Object) |
     JSCLASS_FOR_OF_ITERATION,
     JS_PropertyStub,         /* addProperty */
     args_delProperty,
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     args_enumerate,
@@ -582,17 +582,17 @@ Class js::NormalArgumentsObjectClass = {
 
 /*
  * Strict mode arguments is significantly less magical than non-strict mode
  * arguments, so it is represented by a different class while sharing some
  * functionality.
  */
 Class js::StrictArgumentsObjectClass = {
     "Arguments",
-    JSCLASS_NEW_RESOLVE |
+    JSCLASS_NEW_RESOLVE | JSCLASS_IMPLEMENTS_BARRIERS |
     JSCLASS_HAS_RESERVED_SLOTS(StrictArgumentsObject::RESERVED_SLOTS) |
     JSCLASS_HAS_CACHED_PROTO(JSProto_Object) |
     JSCLASS_FOR_OF_ITERATION,
     JS_PropertyStub,         /* addProperty */
     args_delProperty,
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     strictargs_enumerate,
@@ -931,23 +931,23 @@ static void
 call_trace(JSTracer *trc, JSObject *obj)
 {
     JS_ASSERT(obj->isCall());
 
     /* Mark any generator frame, as for arguments objects. */
 #if JS_HAS_GENERATORS
     StackFrame *fp = (StackFrame *) obj->getPrivate();
     if (fp && fp->isFloatingGenerator())
-        MarkObject(trc, js_FloatingFrameToGenerator(fp)->obj, "generator object");
+        MarkObject(trc, &js_FloatingFrameToGenerator(fp)->obj, "generator object");
 #endif
 }
 
 JS_PUBLIC_DATA(Class) js::CallClass = {
     "Call",
-    JSCLASS_HAS_PRIVATE |
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
     JSCLASS_HAS_RESERVED_SLOTS(CallObject::RESERVED_SLOTS) |
     JSCLASS_NEW_RESOLVE | JSCLASS_IS_ANONYMOUS,
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
     (JSResolveOp)call_resolve,
@@ -1460,17 +1460,17 @@ JSFunction::trace(JSTracer *trc)
                        toExtended()->extendedSlots, "nativeReserved");
     }
 
     if (atom)
         MarkStringUnbarriered(trc, atom, "atom");
 
     if (isInterpreted()) {
         if (script())
-            MarkScript(trc, script(), "script");
+            MarkScript(trc, &script(), "script");
         if (environment())
             MarkObjectUnbarriered(trc, environment(), "fun_callscope");
     }
 }
 
 static void
 fun_trace(JSTracer *trc, JSObject *obj)
 {
@@ -1494,17 +1494,17 @@ JSFunction::sizeOfMisc(JSMallocSizeOfFun
 
 /*
  * Reserve two slots in all function objects for XPConnect.  Note that this
  * does not bloat every instance, only those on which reserved slots are set,
  * and those on which ad-hoc properties are defined.
  */
 JS_FRIEND_DATA(Class) js::FunctionClass = {
     js_Function_str,
-    JSCLASS_NEW_RESOLVE |
+    JSCLASS_NEW_RESOLVE | JSCLASS_IMPLEMENTS_BARRIERS |
     JSCLASS_HAS_CACHED_PROTO(JSProto_Function),
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     fun_enumerate,
     (JSResolveOp)fun_resolve,
     JS_ConvertStub,
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -39,23 +39,49 @@
  * ***** END LICENSE BLOCK ***** */
 
 /* JS Mark-and-Sweep Garbage Collector. */
 
 #include "mozilla/Attributes.h"
 #include "mozilla/Util.h"
 
 /*
- * This GC allocates fixed-sized things with sizes up to GC_NBYTES_MAX (see
- * jsgc.h). It allocates from a special GC arena pool with each arena allocated
- * using malloc. It uses an ideally parallel array of flag bytes to hold the
- * mark bit, finalizer type index, etc.
+ * This code implements a mark-and-sweep garbage collector. The mark phase is
+ * incremental. Most sweeping is done on a background thread. A GC is divided
+ * into slices as follows:
+ *
+ * Slice 1: Roots pushed onto the mark stack. The mark stack is processed by
+ * popping an element, marking it, and pushing its children.
+ *   ... JS code runs ...
+ * Slice 2: More mark stack processing.
+ *   ... JS code runs ...
+ * Slice n-1: More mark stack processing.
+ *   ... JS code runs ...
+ * Slice n: Mark stack is completely drained. Some sweeping is done.
+ *   ... JS code runs, remaining sweeping done on background thread ...
+ *
+ * When background sweeping finishes the GC is complete.
  *
- * XXX swizzle page to freelist for better locality of reference
+ * Incremental GC requires close collaboration with the mutator (i.e., JS code):
+ *
+ * 1. During an incremental GC, if a memory location (except a root) is written
+ * to, then the value it previously held must be marked. Write barriers ensure
+ * this.
+ * 2. Any object that is allocated during incremental GC must start out marked.
+ * 3. Roots are special memory locations that don't need write
+ * barriers. However, they must be marked in the first slice. Roots are things
+ * like the C stack and the VM stack, since it would be too expensive to put
+ * barriers on them.
+ *
+ * Write barriers are handled using the compartment's barrierMarker_
+ * JSTracer. This includes a per-compartment stack of GC things that have been
+ * write-barriered. This stack is processed in each GC slice. The barrierMarker_
+ * is also used during write barrier verification (VerifyBarriers below).
  */
+
 #include <math.h>
 #include <string.h>     /* for memset used when DEBUG */
 
 #include "jstypes.h"
 #include "jsutil.h"
 #include "jshash.h"
 #include "jsclist.h"
 #include "jsprf.h"
@@ -113,22 +139,41 @@
 
 using namespace mozilla;
 using namespace js;
 using namespace js::gc;
 
 namespace js {
 namespace gc {
 
+/*
+ * Lower limit after which we limit the heap growth
+ */
+const size_t GC_ALLOCATION_THRESHOLD = 30 * 1024 * 1024;
+
+/*
+ * A GC is triggered once the number of newly allocated arenas is
+ * GC_HEAP_GROWTH_FACTOR times the number of live arenas after the last GC
+ * starting after the lower limit of GC_ALLOCATION_THRESHOLD. This number is
+ * used for non-incremental GCs.
+ */
+const float GC_HEAP_GROWTH_FACTOR = 3.0f;
+
+/* Perform a Full GC every 20 seconds if MaybeGC is called */
+static const uint64_t GC_IDLE_FULL_SPAN = 20 * 1000 * 1000;
+
 #ifdef JS_GC_ZEAL
 static void
 StartVerifyBarriers(JSContext *cx);
 
 static void
 EndVerifyBarriers(JSContext *cx);
+
+void
+FinishVerifier(JSRuntime *rt);
 #endif
 
 /* This array should be const, but that doesn't link right under GCC. */
 AllocKind slotsToThingKind[] = {
     /* 0 */  FINALIZE_OBJECT0,  FINALIZE_OBJECT2,  FINALIZE_OBJECT2,  FINALIZE_OBJECT4,
     /* 4 */  FINALIZE_OBJECT4,  FINALIZE_OBJECT8,  FINALIZE_OBJECT8,  FINALIZE_OBJECT8,
     /* 8 */  FINALIZE_OBJECT8,  FINALIZE_OBJECT12, FINALIZE_OBJECT12, FINALIZE_OBJECT12,
     /* 12 */ FINALIZE_OBJECT12, FINALIZE_OBJECT16, FINALIZE_OBJECT16, FINALIZE_OBJECT16,
@@ -270,16 +315,18 @@ Arena::finalize(JSContext *cx, AllocKind
     /* Enforce requirements on size of T. */
     JS_ASSERT(thingSize % Cell::CellSize == 0);
     JS_ASSERT(thingSize <= 255);
 
     JS_ASSERT(aheader.allocated());
     JS_ASSERT(thingKind == aheader.getAllocKind());
     JS_ASSERT(thingSize == aheader.getThingSize());
     JS_ASSERT(!aheader.hasDelayedMarking);
+    JS_ASSERT(!aheader.markOverflow);
+    JS_ASSERT(!aheader.allocatedDuringIncremental);
 
     uintptr_t thing = thingsStart(thingKind);
     uintptr_t lastByte = thingsEnd() - 1;
 
     FreeSpan nextFree(aheader.getFirstFreeSpan());
     nextFree.checkSpan();
 
     FreeSpan newListHead;
@@ -845,17 +892,16 @@ PickChunk(JSCompartment *comp)
 
 JS_FRIEND_API(bool)
 IsAboutToBeFinalized(const Cell *thing)
 {
     JSCompartment *thingCompartment = reinterpret_cast<const Cell *>(thing)->compartment();
     JSRuntime *rt = thingCompartment->rt;
     if (rt->gcCurrentCompartment != NULL && rt->gcCurrentCompartment != thingCompartment)
         return false;
-
     return !reinterpret_cast<const Cell *>(thing)->isMarked();
 }
 
 bool
 IsAboutToBeFinalized(const Value &v)
 {
     JS_ASSERT(v.isMarkable());
     return IsAboutToBeFinalized((Cell *)v.toGCThing());
@@ -921,16 +967,28 @@ InFreeList(ArenaHeader *aheader, uintptr
         /*
          * The last possible empty span is an the end of the arena. Here
          * span->end < thing < thingsEnd and so we must have more spans.
          */
         span = span->nextSpan();
     }
 }
 
+enum ConservativeGCTest
+{
+    CGCT_VALID,
+    CGCT_LOWBITSET, /* excluded because one of the low bits was set */
+    CGCT_NOTARENA,  /* not within arena range in a chunk */
+    CGCT_OTHERCOMPARTMENT,  /* in another compartment */
+    CGCT_NOTCHUNK,  /* not within a valid chunk */
+    CGCT_FREEARENA, /* within arena containing only free things */
+    CGCT_NOTLIVE,   /* gcthing is not allocated */
+    CGCT_END
+};
+
 /*
  * Tests whether w is a (possibly dead) GC thing. Returns CGCT_VALID and
  * details about the thing if so. On failure, returns the reason for rejection.
  */
 inline ConservativeGCTest
 IsAddressableGCThing(JSRuntime *rt, uintptr_t w,
                      gc::AllocKind *thingKindPtr, ArenaHeader **arenaHeader, void **thing)
 {
@@ -1019,32 +1077,28 @@ MarkIfGCThingWord(JSTracer *trc, uintptr
     /*
      * Check if the thing is free. We must use the list of free spans as at
      * this point we no longer have the mark bits from the previous GC run and
      * we must account for newly allocated things.
      */
     if (InFreeList(aheader, uintptr_t(thing)))
         return CGCT_NOTLIVE;
 
+    JSGCTraceKind traceKind = MapAllocToTraceKind(thingKind);
 #ifdef DEBUG
     const char pattern[] = "machine_stack %p";
     char nameBuf[sizeof(pattern) - 2 + sizeof(thing) * 2];
     JS_snprintf(nameBuf, sizeof(nameBuf), pattern, thing);
     JS_SET_TRACING_NAME(trc, nameBuf);
 #endif
-    MarkKind(trc, thing, MapAllocToTraceKind(thingKind));
-
-#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
-    if (IS_GC_MARKING_TRACER(trc)) {
-        GCMarker *marker = static_cast<GCMarker *>(trc);
-        if (marker->conservativeDumpFileName)
-            marker->conservativeRoots.append(thing);
-        if (uintptr_t(thing) != w)
-            marker->conservativeStats.unaligned++;
-    }
+    MarkKind(trc, thing, traceKind);
+
+#ifdef DEBUG
+    if (trc->runtime->gcIncrementalState == MARK_ROOTS)
+        trc->runtime->gcSavedRoots.append(JSRuntime::SavedGCRoot(thing, traceKind));
 #endif
 
     return CGCT_VALID;
 }
 
 static void
 MarkWordConservatively(JSTracer *trc, uintptr_t w)
 {
@@ -1065,18 +1119,36 @@ static void
 MarkRangeConservatively(JSTracer *trc, const uintptr_t *begin, const uintptr_t *end)
 {
     JS_ASSERT(begin <= end);
     for (const uintptr_t *i = begin; i < end; ++i)
         MarkWordConservatively(trc, *i);
 }
 
 static JS_NEVER_INLINE void
-MarkConservativeStackRoots(JSTracer *trc, JSRuntime *rt)
+MarkConservativeStackRoots(JSTracer *trc, bool useSavedRoots)
 {
+    JSRuntime *rt = trc->runtime;
+
+#ifdef DEBUG
+    if (useSavedRoots) {
+        for (JSRuntime::SavedGCRoot *root = rt->gcSavedRoots.begin();
+             root != rt->gcSavedRoots.end();
+             root++)
+        {
+            JS_SET_TRACING_NAME(trc, "cstack");
+            MarkKind(trc, root->thing, root->kind);
+        }
+        return;
+    }
+
+    if (rt->gcIncrementalState == MARK_ROOTS)
+        rt->gcSavedRoots.clearAndFree();
+#endif
+
     ConservativeGCData *cgcd = &rt->conservativeGC;
     if (!cgcd->hasStackToScan()) {
 #ifdef JS_THREADSAFE
         JS_ASSERT(!rt->suspendCount);
         JS_ASSERT(rt->requestDepth <= cgcd->requestThreshold);
 #endif
         return;
     }
@@ -1127,16 +1199,18 @@ MarkStackRangeConservatively(JSTracer *t
     JS_ASSERT(begin <= end);
     for (const uintptr_t *i = begin; i < end; i += sizeof(Value) / sizeof(uintptr_t))
         MarkWordConservatively(trc, *i);
 #else
     MarkRangeConservatively(trc, begin, end);
 #endif
 }
 
+
+
 JS_NEVER_INLINE void
 ConservativeGCData::recordStackTop()
 {
     /* Update the native stack pointer if it points to a bigger stack. */
     uintptr_t dummy;
     nativeStackTop = &dummy;
 
     /*
@@ -1186,16 +1260,21 @@ js_FinishGC(JSRuntime *rt)
     /*
      * Wait until the background finalization stops and the helper thread
      * shuts down before we forcefully release any remaining GC memory.
      */
 #ifdef JS_THREADSAFE
     rt->gcHelperThread.finish();
 #endif
 
+#ifdef JS_GC_ZEAL
+    /* Free memory associated with GC verification. */
+    FinishVerifier(rt);
+#endif
+
     /* Delete all remaining Compartments. */
     for (CompartmentsIter c(rt); !c.done(); c.next())
         Foreground::delete_(c.get());
     rt->compartments.clear();
     rt->atomsCompartment = NULL;
 
     rt->gcSystemAvailableChunkListHead = NULL;
     rt->gcUserAvailableChunkListHead = NULL;
@@ -1231,33 +1310,33 @@ js_AddGCThingRoot(JSContext *cx, void **
     return ok;
 }
 
 JS_FRIEND_API(JSBool)
 js_AddRootRT(JSRuntime *rt, jsval *vp, const char *name)
 {
     /*
      * Due to the long-standing, but now removed, use of rt->gcLock across the
-     * bulk of js_GC, API users have come to depend on JS_AddRoot etc. locking
+     * bulk of js::GC, API users have come to depend on JS_AddRoot etc. locking
      * properly with a racing GC, without calling JS_AddRoot from a request.
      * We have to preserve API compatibility here, now that we avoid holding
      * rt->gcLock across the mark phase (including the root hashtable mark).
      */
     AutoLockGC lock(rt);
 
     return !!rt->gcRootsHash.put((void *)vp,
                                  RootInfo(name, JS_GC_ROOT_VALUE_PTR));
 }
 
 JS_FRIEND_API(JSBool)
 js_AddGCThingRootRT(JSRuntime *rt, void **rp, const char *name)
 {
     /*
      * Due to the long-standing, but now removed, use of rt->gcLock across the
-     * bulk of js_GC, API users have come to depend on JS_AddRoot etc. locking
+     * bulk of js::GC, API users have come to depend on JS_AddRoot etc. locking
      * properly with a racing GC, without calling JS_AddRoot from a request.
      * We have to preserve API compatibility here, now that we avoid holding
      * rt->gcLock across the mark phase (including the root hashtable mark).
      */
     AutoLockGC lock(rt);
 
     return !!rt->gcRootsHash.put((void *)rp,
                                  RootInfo(name, JS_GC_ROOT_GCTHING_PTR));
@@ -1365,16 +1444,29 @@ JSCompartment::reduceGCTriggerBytes(size
     if (gcTriggerBytes - amount < GC_ALLOCATION_THRESHOLD * GC_HEAP_GROWTH_FACTOR)
         return;
     gcTriggerBytes -= amount;
 }
 
 namespace js {
 namespace gc {
 
+inline void
+ArenaLists::prepareForIncrementalGC(JSCompartment *comp)
+{
+    for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
+        FreeSpan *headSpan = &freeLists[i];
+        if (!headSpan->isEmpty()) {
+            ArenaHeader *aheader = headSpan->arenaHeader();
+            aheader->allocatedDuringIncremental = true;
+            comp->barrierMarker_.delayMarkingArena(aheader);
+        }
+    }
+}
+
 inline void *
 ArenaLists::allocateFromArena(JSCompartment *comp, AllocKind thingKind)
 {
     Chunk *chunk = NULL;
 
     ArenaList *al = &arenaLists[thingKind];
     AutoLockGC maybeLock;
 
@@ -1418,16 +1510,20 @@ ArenaLists::allocateFromArena(JSCompartm
             al->cursor = &aheader->next;
 
             /*
              * Move the free span stored in the arena to the free list and
              * allocate from it.
              */
             freeLists[thingKind] = aheader->getFirstFreeSpan();
             aheader->setAsFullyUsed();
+            if (JS_UNLIKELY(comp->needsBarrier())) {
+                aheader->allocatedDuringIncremental = true;
+                comp->barrierMarker_.delayMarkingArena(aheader);
+            }
             return freeLists[thingKind].infallibleAllocate(Arena::thingSize(thingKind));
         }
 
         /* Make sure we hold the GC lock before we call PickChunk. */
         if (!maybeLock.locked())
             maybeLock.lock(comp->rt);
         chunk = PickChunk(comp);
         if (!chunk)
@@ -1443,16 +1539,20 @@ ArenaLists::allocateFromArena(JSCompartm
      * cursor, so after the GC the most recently added arena will be used first
      * for allocations improving cache locality.
      */
     JS_ASSERT(!*al->cursor);
     ArenaHeader *aheader = chunk->allocateArena(comp, thingKind);
     if (!aheader)
         return NULL;
 
+    if (JS_UNLIKELY(comp->needsBarrier())) {
+        aheader->allocatedDuringIncremental = true;
+        comp->barrierMarker_.delayMarkingArena(aheader);
+    }
     aheader->next = al->head;
     if (!al->head) {
         JS_ASSERT(al->cursor == &al->head);
         al->cursor = &aheader->next;
     }
     al->head = aheader;
 
     /* See comments before allocateFromNewArena about this assert. */
@@ -1614,29 +1714,29 @@ ArenaLists::finalizeScripts(JSContext *c
 
 static void
 RunLastDitchGC(JSContext *cx)
 {
     JSRuntime *rt = cx->runtime;
 
     /* The last ditch GC preserves all atoms. */
     AutoKeepAtoms keep(rt);
-    js_GC(cx, rt->gcTriggerCompartment, GC_NORMAL, gcreason::LAST_DITCH);
+    GC(cx, rt->gcTriggerCompartment, GC_NORMAL, gcreason::LAST_DITCH);
 }
 
 /* static */ void *
 ArenaLists::refillFreeList(JSContext *cx, AllocKind thingKind)
 {
     JS_ASSERT(cx->compartment->arenas.freeLists[thingKind].isEmpty());
 
     JSCompartment *comp = cx->compartment;
     JSRuntime *rt = comp->rt;
     JS_ASSERT(!rt->gcRunning);
 
-    bool runGC = !!rt->gcIsNeeded;
+    bool runGC = rt->gcIncrementalState != NO_INCREMENTAL && comp->gcBytes > comp->gcTriggerBytes;
     for (;;) {
         if (JS_UNLIKELY(runGC)) {
             RunLastDitchGC(cx);
 
             /*
              * The JSGC_END callback can legitimately allocate new GC
              * things and populate the free list. If that happens, just
              * return that list head.
@@ -1717,103 +1817,309 @@ js_UnlockGCThingRT(JSRuntime *rt, void *
         rt->gcPoke = true;
         if (--p->value == 0)
             rt->gcLocksHash.remove(p);
     }
 }
 
 namespace js {
 
+void
+InitTracer(JSTracer *trc, JSRuntime *rt, JSContext *cx, JSTraceCallback callback)
+{
+    trc->runtime = rt;
+    trc->context = cx;
+    trc->callback = callback;
+    trc->debugPrinter = NULL;
+    trc->debugPrintArg = NULL;
+    trc->debugPrintIndex = size_t(-1);
+    trc->eagerlyTraceWeakMaps = true;
+}
+
+/* static */ int64_t
+SliceBudget::TimeBudget(int64_t millis)
+{
+    return millis * PRMJ_USEC_PER_MSEC;
+}
+
+/* static */ int64_t
+SliceBudget::WorkBudget(int64_t work)
+{
+    return -work;
+}
+
+SliceBudget::SliceBudget()
+  : deadline(INT64_MAX),
+    counter(INTPTR_MAX)
+{
+}
+
+SliceBudget::SliceBudget(int64_t budget)
+{
+    if (budget == Unlimited) {
+        deadline = INT64_MAX;
+        counter = INTPTR_MAX;
+    } else if (budget > 0) {
+        deadline = PRMJ_Now() + budget;
+        counter = CounterReset;
+    } else {
+        deadline = 0;
+        counter = -budget;
+    }
+}
+
+bool
+SliceBudget::checkOverBudget()
+{
+    bool over = PRMJ_Now() > deadline;
+    if (!over)
+        counter = CounterReset;
+    return over;
+}
+
+GCMarker::GCMarker()
+  : color(BLACK),
+    started(false),
+    unmarkedArenaStackTop(NULL),
+    markLaterArenas(0),
+    grayFailed(false)
+{
+}
+
+bool
+GCMarker::init(bool lazy)
+{
+    if (!stack.init(lazy ? 0 : MARK_STACK_LENGTH))
+        return false;
+    return true;
+}
+
+void
+GCMarker::start(JSRuntime *rt, JSContext *cx)
+{
+    InitTracer(this, rt, cx, NULL);
+    JS_ASSERT(!started);
+    started = true;
+    color = BLACK;
+
+    JS_ASSERT(!unmarkedArenaStackTop);
+    JS_ASSERT(markLaterArenas == 0);
+
+    JS_ASSERT(grayRoots.empty());
+    JS_ASSERT(!grayFailed);
+
+    /*
+     * The GC is recomputing the liveness of WeakMap entries, so we delay
+     * visting entries.
+     */
+    eagerlyTraceWeakMaps = JS_FALSE;
+}
+
+void
+GCMarker::stop()
+{
+    JS_ASSERT(isDrained());
+
+    JS_ASSERT(started);
+    started = false;
+
+    JS_ASSERT(!unmarkedArenaStackTop);
+    JS_ASSERT(markLaterArenas == 0);
+
+    JS_ASSERT(grayRoots.empty());
+    grayFailed = false;
+}
+
+void
+GCMarker::reset()
+{
+    color = BLACK;
+
+    stack.reset();
+    JS_ASSERT(isMarkStackEmpty());
+
+    while (unmarkedArenaStackTop) {
+        ArenaHeader *aheader = unmarkedArenaStackTop;
+        JS_ASSERT(aheader->hasDelayedMarking);
+        JS_ASSERT(markLaterArenas);
+        unmarkedArenaStackTop = aheader->getNextDelayedMarking();
+        aheader->hasDelayedMarking = 0;
+        aheader->markOverflow = 0;
+        aheader->allocatedDuringIncremental = 0;
+        markLaterArenas--;
+    }
+    JS_ASSERT(isDrained());
+    JS_ASSERT(!markLaterArenas);
+
+    grayRoots.clearAndFree();
+    grayFailed = false;
+}
+
 /*
  * When the native stack is low, the GC does not call JS_TraceChildren to mark
  * the reachable "children" of the thing. Rather the thing is put aside and
  * JS_TraceChildren is called later with more space on the C stack.
  *
  * To implement such delayed marking of the children with minimal overhead for
  * the normal case of sufficient native stack, the code adds a field per
  * arena. The field markingDelay->link links all arenas with delayed things
  * into a stack list with the pointer to stack top in
  * GCMarker::unmarkedArenaStackTop. delayMarkingChildren adds
  * arenas to the stack as necessary while markDelayedChildren pops the arenas
  * from the stack until it empties.
  */
 
-GCMarker::GCMarker(JSContext *cx)
-  : color(BLACK),
-    unmarkedArenaStackTop(NULL),
-    stack(cx->runtime->gcMarkStackArray)
+inline void
+GCMarker::delayMarkingArena(ArenaHeader *aheader)
 {
-    JS_TracerInit(this, cx, NULL);
-    markLaterArenas = 0;
-#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
-    conservativeDumpFileName = getenv("JS_DUMP_CONSERVATIVE_GC_ROOTS");
-    memset(&conservativeStats, 0, sizeof(conservativeStats));
-#endif
-
-    /*
-     * The GC is recomputing the liveness of WeakMap entries, so we
-     * delay visting entries.
-     */
-    eagerlyTraceWeakMaps = JS_FALSE;
-}
-
-GCMarker::~GCMarker()
-{
-#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
-    dumpConservativeRoots();
-#endif
+    if (aheader->hasDelayedMarking) {
+        /* Arena already scheduled to be marked later */
+        return;
+    }
+    aheader->setNextDelayedMarking(unmarkedArenaStackTop);
+    unmarkedArenaStackTop = aheader;
+    markLaterArenas++;
 }
 
 void
 GCMarker::delayMarkingChildren(const void *thing)
 {
     const Cell *cell = reinterpret_cast<const Cell *>(thing);
-    ArenaHeader *aheader = cell->arenaHeader();
-    if (aheader->hasDelayedMarking) {
-        /* Arena already scheduled to be marked later */
-        return;
-    }
-    aheader->setNextDelayedMarking(unmarkedArenaStackTop);
-    unmarkedArenaStackTop = aheader->getArena();
-    markLaterArenas++;
-}
-
-static void
-MarkDelayedChildren(GCMarker *trc, Arena *a)
-{
-    AllocKind allocKind = a->aheader.getAllocKind();
-    JSGCTraceKind traceKind = MapAllocToTraceKind(allocKind);
-    size_t thingSize = Arena::thingSize(allocKind);
-    uintptr_t end = a->thingsEnd();
-    for (uintptr_t thing = a->thingsStart(allocKind); thing != end; thing += thingSize) {
-        Cell *t = reinterpret_cast<Cell *>(thing);
-        if (t->isMarked())
-            JS_TraceChildren(trc, t, traceKind);
-    }
+    cell->arenaHeader()->markOverflow = 1;
+    delayMarkingArena(cell->arenaHeader());
 }
 
 void
-GCMarker::markDelayedChildren()
+GCMarker::markDelayedChildren(ArenaHeader *aheader)
 {
+    if (aheader->markOverflow) {
+        bool always = aheader->allocatedDuringIncremental;
+        aheader->markOverflow = 0;
+
+        for (CellIterUnderGC i(aheader); !i.done(); i.next()) {
+            Cell *t = i.getCell();
+            if (always || t->isMarked()) {
+                t->markIfUnmarked();
+                JS_TraceChildren(this, t, MapAllocToTraceKind(aheader->getAllocKind()));
+            }
+        }
+    } else {
+        JS_ASSERT(aheader->allocatedDuringIncremental);
+        PushArena(this, aheader);
+    }
+    aheader->allocatedDuringIncremental = 0;
+}
+
+bool
+GCMarker::markDelayedChildren(SliceBudget &budget)
+{
+    gcstats::AutoPhase ap(runtime->gcStats, gcstats::PHASE_MARK_DELAYED);
+
     JS_ASSERT(unmarkedArenaStackTop);
     do {
         /*
          * If marking gets delayed at the same arena again, we must repeat
          * marking of its things. For that we pop arena from the stack and
          * clear its hasDelayedMarking flag before we begin the marking.
          */
-        Arena *a = unmarkedArenaStackTop;
-        JS_ASSERT(a->aheader.hasDelayedMarking);
+        ArenaHeader *aheader = unmarkedArenaStackTop;
+        JS_ASSERT(aheader->hasDelayedMarking);
         JS_ASSERT(markLaterArenas);
-        unmarkedArenaStackTop = a->aheader.getNextDelayedMarking();
-        a->aheader.hasDelayedMarking = 0;
+        unmarkedArenaStackTop = aheader->getNextDelayedMarking();
+        aheader->hasDelayedMarking = 0;
         markLaterArenas--;
-        MarkDelayedChildren(this, a);
+        markDelayedChildren(aheader);
+
+        if (budget.checkOverBudget())
+            return false;
     } while (unmarkedArenaStackTop);
     JS_ASSERT(!markLaterArenas);
+
+    return true;
+}
+
+#ifdef DEBUG
+void
+GCMarker::checkCompartment(void *p)
+{
+    JS_ASSERT(started);
+
+    Cell *cell = static_cast<Cell *>(p);
+    if (runtime->gcRunning && runtime->gcCurrentCompartment)
+        JS_ASSERT(cell->compartment() == runtime->gcCurrentCompartment);
+    else if (runtime->gcIncrementalCompartment)
+        JS_ASSERT(cell->compartment() == runtime->gcIncrementalCompartment);
+}
+#endif
+
+bool
+GCMarker::hasBufferedGrayRoots() const
+{
+    return !grayFailed;
+}
+
+void
+GCMarker::startBufferingGrayRoots()
+{
+    JS_ASSERT(!callback);
+    callback = GrayCallback;
+    JS_ASSERT(IS_GC_MARKING_TRACER(this));
+}
+
+void
+GCMarker::endBufferingGrayRoots()
+{
+    JS_ASSERT(callback == GrayCallback);
+    callback = NULL;
+    JS_ASSERT(IS_GC_MARKING_TRACER(this));
+}
+
+void
+GCMarker::markBufferedGrayRoots()
+{
+    JS_ASSERT(!grayFailed);
+
+    for (GrayRoot *elem = grayRoots.begin(); elem != grayRoots.end(); elem++) {
+#ifdef DEBUG
+        debugPrinter = elem->debugPrinter;
+        debugPrintArg = elem->debugPrintArg;
+        debugPrintIndex = elem->debugPrintIndex;
+#endif
+        MarkKind(this, elem->thing, elem->kind);
+    }
+
+    grayRoots.clearAndFree();
+}
+
+void
+GCMarker::appendGrayRoot(void *thing, JSGCTraceKind kind)
+{
+    JS_ASSERT(started);
+
+    if (grayFailed)
+        return;
+
+    GrayRoot root(thing, kind);
+#ifdef DEBUG
+    root.debugPrinter = debugPrinter;
+    root.debugPrintArg = debugPrintArg;
+    root.debugPrintIndex = debugPrintIndex;
+#endif
+
+    if (!grayRoots.append(root)) {
+        grayRoots.clearAndFree();
+        grayFailed = true;
+    }
+}
+
+void
+GCMarker::GrayCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind)
+{
+    GCMarker *gcmarker = static_cast<GCMarker *>(trc);
+    gcmarker->appendGrayRoot(*thingp, kind);
 }
 
 } /* namespace js */
 
 #ifdef DEBUG
 static void
 EmptyMarkCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind)
 {
@@ -1860,27 +2166,38 @@ gc_root_traversal(JSTracer *trc, const R
 
 static void
 gc_lock_traversal(const GCLocks::Entry &entry, JSTracer *trc)
 {
     JS_ASSERT(entry.value >= 1);
     MarkGCThingRoot(trc, entry.key, "locked object");
 }
 
+namespace js {
+
+void
+MarkCompartmentActive(StackFrame *fp)
+{
+    if (fp->isScriptFrame())
+        fp->script()->compartment()->active = true;
+}
+
+} /* namespace js */
+
 void
 AutoIdArray::trace(JSTracer *trc)
 {
     JS_ASSERT(tag == IDARRAY);
     gc::MarkIdRange(trc, idArray->length, idArray->vector, "JSAutoIdArray.idArray");
 }
 
 void
 AutoEnumStateRooter::trace(JSTracer *trc)
 {
-    gc::MarkObjectRoot(trc, obj, "JS::AutoEnumStateRooter.obj");
+    gc::MarkObjectRoot(trc, &obj, "JS::AutoEnumStateRooter.obj");
 }
 
 inline void
 AutoGCRooter::trace(JSTracer *trc)
 {
     switch (tag) {
       case JSVAL:
         MarkValueRoot(trc, &static_cast<AutoValueRooter *>(this)->val, "JS::AutoValueRooter.val");
@@ -1911,54 +2228,62 @@ AutoGCRooter::trace(JSTracer *trc)
             MarkValueRoot(trc, &desc.set, "PropDesc::set");
         }
         return;
       }
 
       case DESCRIPTOR : {
         PropertyDescriptor &desc = *static_cast<AutoPropertyDescriptorRooter *>(this);
         if (desc.obj)
-            MarkObjectRoot(trc, desc.obj, "Descriptor::obj");
+            MarkObjectRoot(trc, &desc.obj, "Descriptor::obj");
         MarkValueRoot(trc, &desc.value, "Descriptor::value");
-        if ((desc.attrs & JSPROP_GETTER) && desc.getter)
-            MarkObjectRoot(trc, CastAsObject(desc.getter), "Descriptor::get");
-        if (desc.attrs & JSPROP_SETTER && desc.setter)
-            MarkObjectRoot(trc, CastAsObject(desc.setter), "Descriptor::set");
+        if ((desc.attrs & JSPROP_GETTER) && desc.getter) {
+            JSObject *tmp = JS_FUNC_TO_DATA_PTR(JSObject *, desc.getter);
+            MarkObjectRoot(trc, &tmp, "Descriptor::get");
+            desc.getter = JS_DATA_TO_FUNC_PTR(JSPropertyOp, tmp);
+        }
+        if (desc.attrs & JSPROP_SETTER && desc.setter) {
+            JSObject *tmp = JS_FUNC_TO_DATA_PTR(JSObject *, desc.setter);
+            MarkObjectRoot(trc, &tmp, "Descriptor::set");
+            desc.setter = JS_DATA_TO_FUNC_PTR(JSStrictPropertyOp, tmp);
+        }
         return;
       }
 
       case NAMESPACES: {
         JSXMLArray<JSObject> &array = static_cast<AutoNamespaceArray *>(this)->array;
         MarkObjectRange(trc, array.length, array.vector, "JSXMLArray.vector");
         js_XMLArrayCursorTrace(trc, array.cursors);
         return;
       }
 
       case XML:
         js_TraceXML(trc, static_cast<AutoXMLRooter *>(this)->xml);
         return;
 
       case OBJECT:
-        if (JSObject *obj = static_cast<AutoObjectRooter *>(this)->obj)
-            MarkObjectRoot(trc, obj, "JS::AutoObjectRooter.obj");
+        if (static_cast<AutoObjectRooter *>(this)->obj)
+            MarkObjectRoot(trc, &static_cast<AutoObjectRooter *>(this)->obj,
+                           "JS::AutoObjectRooter.obj");
         return;
 
       case ID:
-        MarkIdRoot(trc, static_cast<AutoIdRooter *>(this)->id_, "JS::AutoIdRooter.id_");
+        MarkIdRoot(trc, &static_cast<AutoIdRooter *>(this)->id_, "JS::AutoIdRooter.id_");
         return;
 
       case VALVECTOR: {
         AutoValueVector::VectorImpl &vector = static_cast<AutoValueVector *>(this)->vector;
         MarkValueRootRange(trc, vector.length(), vector.begin(), "js::AutoValueVector.vector");
         return;
       }
 
       case STRING:
-        if (JSString *str = static_cast<AutoStringRooter *>(this)->str)
-            MarkStringRoot(trc, str, "JS::AutoStringRooter.str");
+        if (static_cast<AutoStringRooter *>(this)->str)
+            MarkStringRoot(trc, &static_cast<AutoStringRooter *>(this)->str,
+                           "JS::AutoStringRooter.str");
         return;
 
       case IDVECTOR: {
         AutoIdVector::VectorImpl &vector = static_cast<AutoIdVector *>(this)->vector;
         MarkIdRootRange(trc, vector.length(), vector.begin(), "js::AutoIdVector.vector");
         return;
       }
 
@@ -1991,46 +2316,40 @@ void
 AutoGCRooter::traceAll(JSTracer *trc)
 {
     for (js::AutoGCRooter *gcr = this; gcr; gcr = gcr->down)
         gcr->trace(trc);
 }
 
 namespace js {
 
-void
-MarkWeakReferences(GCMarker *gcmarker)
-{
-    JS_ASSERT(gcmarker->isMarkStackEmpty());
-    while (WatchpointMap::markAllIteratively(gcmarker) ||
-           WeakMapBase::markAllIteratively(gcmarker) ||
-           Debugger::markAllIteratively(gcmarker)) {
-        gcmarker->drainMarkStack();
-    }
-    JS_ASSERT(gcmarker->isMarkStackEmpty());
-}
-
 static void
-MarkRuntime(JSTracer *trc)
+MarkRuntime(JSTracer *trc, bool useSavedRoots = false)
 {
     JSRuntime *rt = trc->runtime;
+    JS_ASSERT(trc->callback != GCMarker::GrayCallback);
+    if (rt->gcCurrentCompartment) {
+        for (CompartmentsIter c(rt); !c.done(); c.next())
+            c->markCrossCompartmentWrappers(trc);
+        Debugger::markCrossCompartmentDebuggerObjectReferents(trc);
+    }
 
     if (rt->hasContexts())
-        MarkConservativeStackRoots(trc, rt);
+        MarkConservativeStackRoots(trc, useSavedRoots);
 
     for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront())
         gc_root_traversal(trc, r.front());
 
     for (GCLocks::Range r = rt->gcLocksHash.all(); !r.empty(); r.popFront())
         gc_lock_traversal(r.front(), trc);
 
     if (rt->scriptPCCounters) {
-        const ScriptOpcodeCountsVector &vec = *rt->scriptPCCounters;
+        ScriptOpcodeCountsVector &vec = *rt->scriptPCCounters;
         for (size_t i = 0; i < vec.length(); i++)
-            MarkScriptRoot(trc, vec[i].script, "scriptPCCounters");
+            MarkScriptRoot(trc, &vec[i].script, "scriptPCCounters");
     }
 
     js_TraceAtomState(trc);
     rt->staticStrings.trace(trc);
 
     JSContext *iter = NULL;
     while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter))
         acx->mark(trc);
@@ -2044,41 +2363,48 @@ MarkRuntime(JSTracer *trc)
             if (c->watchpointMap)
                 c->watchpointMap->markAll(trc);
         }
 
         /* Do not discard scripts with counters while profiling. */
         if (rt->profilingScripts) {
             for (CellIterUnderGC i(c, FINALIZE_SCRIPT); !i.done(); i.next()) {
                 JSScript *script = i.get<JSScript>();
-                if (script->pcCounters)
-                    MarkScriptRoot(trc, script, "profilingScripts");
+                if (script->pcCounters) {
+                    MarkScriptRoot(trc, &script, "profilingScripts");
+                    JS_ASSERT(script == i.get<JSScript>());
+                }
             }
         }
     }
 
 #ifdef JS_METHODJIT
     /* We need to expand inline frames before stack scanning. */
     for (CompartmentsIter c(rt); !c.done(); c.next())
         mjit::ExpandInlineFrames(c);
 #endif
 
     rt->stackSpace.mark(trc);
 
     /* The embedding can register additional roots here. */
     if (JSTraceDataOp op = rt->gcBlackRootsTraceOp)
         (*op)(trc, rt->gcBlackRootsData);
 
-    if (!IS_GC_MARKING_TRACER(trc)) {
-        /* We don't want to miss these when called from TraceRuntime. */
-        if (JSTraceDataOp op = rt->gcGrayRootsTraceOp)
+    /* During GC, this buffers up the gray roots and doesn't mark them. */
+    if (JSTraceDataOp op = rt->gcGrayRootsTraceOp) {
+        if (IS_GC_MARKING_TRACER(trc)) {
+            GCMarker *gcmarker = static_cast<GCMarker *>(trc);
+            gcmarker->startBufferingGrayRoots();
             (*op)(trc, rt->gcGrayRootsData);
+            gcmarker->endBufferingGrayRoots();
+        } else {
+            (*op)(trc, rt->gcGrayRootsData);
+        }
     }
 }
-
 void
 TriggerGC(JSRuntime *rt, gcreason::Reason reason)
 {
     JS_ASSERT(rt->onOwnerThread());
 
     if (rt->gcRunning || rt->gcIsNeeded)
         return;
 
@@ -2090,22 +2416,22 @@ TriggerGC(JSRuntime *rt, gcreason::Reaso
 }
 
 void
 TriggerCompartmentGC(JSCompartment *comp, gcreason::Reason reason)
 {
     JSRuntime *rt = comp->rt;
     JS_ASSERT(!rt->gcRunning);
 
-    if (rt->gcZeal()) {
+    if (rt->gcZeal() == ZealAllocValue) {
         TriggerGC(rt, reason);
         return;
     }
 
-    if (rt->gcMode != JSGC_MODE_COMPARTMENT || comp == rt->atomsCompartment) {
+    if (rt->gcMode == JSGC_MODE_GLOBAL || comp == rt->atomsCompartment) {
         /* We can't do a compartmental GC of the default compartment. */
         TriggerGC(rt, reason);
         return;
     }
 
     if (rt->gcIsNeeded) {
         /* If we need to GC more than one compartment, run a full GC. */
         if (rt->gcTriggerCompartment != comp)
@@ -2124,43 +2450,47 @@ TriggerCompartmentGC(JSCompartment *comp
 }
 
 void
 MaybeGC(JSContext *cx)
 {
     JSRuntime *rt = cx->runtime;
     JS_ASSERT(rt->onOwnerThread());
 
-    if (rt->gcZeal()) {
-        js_GC(cx, NULL, GC_NORMAL, gcreason::MAYBEGC);
+    if (rt->gcZeal() == ZealAllocValue || rt->gcZeal() == ZealPokeValue) {
+        GC(cx, NULL, GC_NORMAL, gcreason::MAYBEGC);
         return;
     }
 
     JSCompartment *comp = cx->compartment;
     if (rt->gcIsNeeded) {
-        js_GC(cx, (comp == rt->gcTriggerCompartment) ? comp : NULL, GC_NORMAL, gcreason::MAYBEGC);
+        GCSlice(cx, (comp == rt->gcTriggerCompartment) ? comp : NULL,
+                GC_NORMAL, gcreason::MAYBEGC);
         return;
     }
 
-    if (comp->gcBytes > 8192 && comp->gcBytes >= 3 * (comp->gcTriggerBytes / 4)) {
-        js_GC(cx, (rt->gcMode == JSGC_MODE_COMPARTMENT) ? comp : NULL, GC_NORMAL, gcreason::MAYBEGC);
+    if (comp->gcBytes > 8192 &&
+        comp->gcBytes >= 3 * (comp->gcTriggerBytes / 4) &&
+        rt->gcIncrementalState == NO_INCREMENTAL)
+    {
+        GCSlice(cx, NULL, GC_NORMAL, gcreason::MAYBEGC);
         return;
     }
 
     /*
      * Access to the counters and, on 32 bit, setting gcNextFullGCTime below
      * is not atomic and a race condition could trigger or suppress the GC. We
      * tolerate this.
      */
     int64_t now = PRMJ_Now();
     if (rt->gcNextFullGCTime && rt->gcNextFullGCTime <= now) {
         if (rt->gcChunkAllocationSinceLastGC ||
             rt->gcNumArenasFreeCommitted > FreeCommittedArenasThreshold)
         {
-            js_GC(cx, NULL, GC_SHRINK, gcreason::MAYBEGC);
+            GCSlice(cx, NULL, GC_SHRINK, gcreason::MAYBEGC);
         } else {
             rt->gcNextFullGCTime = now + GC_IDLE_FULL_SPAN;
         }
     }
 }
 
 static void
 DecommitArenasFromAvailableList(JSRuntime *rt, Chunk **availableListHeadp)
@@ -2597,86 +2927,230 @@ SweepCompartments(JSContext *cx, JSGCInv
             continue;
         }
         *write++ = compartment;
     }
     rt->compartments.resize(write - rt->compartments.begin());
 }
 
 static void
-BeginMarkPhase(JSContext *cx, GCMarker *gcmarker, JSGCInvocationKind gckind)
+PurgeRuntime(JSContext *cx)
 {
     JSRuntime *rt = cx->runtime;
 
     for (GCCompartmentsIter c(rt); !c.done(); c.next())
         c->purge(cx);
 
     rt->purge(cx);
 
     {
         JSContext *iter = NULL;
         while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter))
             acx->purge();
     }
+}
+
+static void
+BeginMarkPhase(JSContext *cx)
+{
+    JSRuntime *rt = cx->runtime;
+    GCMarker *gcmarker = &rt->gcMarker;
+
+    rt->gcStartNumber = rt->gcNumber;
+
+    /* Reset weak map list. */
+    WeakMapBase::resetWeakMapList(rt);
+
+    /*
+     * We must purge the runtime at the beginning of an incremental GC. The
+     * danger if we purge later is that the snapshot invariant of incremental
+     * GC will be broken, as follows. If some object is reachable only through
+     * some cache (say the dtoaCache) then it will not be part of the snapshot.
+     * If we purge after root marking, then the mutator could obtain a pointer
+     * to the object and start using it. This object might never be marked, so
+     * a GC hazard would exist.
+     */
+    PurgeRuntime(cx);
 
     /*
      * Mark phase.
      */
-    rt->gcStats.beginPhase(gcstats::PHASE_MARK);
+    gcstats::AutoPhase ap1(rt->gcStats, gcstats::PHASE_MARK);
+    gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_MARK_ROOTS);
 
     for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
         r.front()->bitmap.clear();
 
-    if (rt->gcCurrentCompartment) {
-        for (CompartmentsIter c(rt); !c.done(); c.next())
-            c->markCrossCompartmentWrappers(gcmarker);
-        Debugger::markCrossCompartmentDebuggerObjectReferents(gcmarker);
+    MarkRuntime(gcmarker);
+}
+
+void
+MarkWeakReferences(GCMarker *gcmarker)
+{
+    JS_ASSERT(gcmarker->isDrained());
+    while (WatchpointMap::markAllIteratively(gcmarker) ||
+           WeakMapBase::markAllIteratively(gcmarker) ||
+           Debugger::markAllIteratively(gcmarker))
+    {
+        SliceBudget budget;
+        gcmarker->drainMarkStack(budget);
     }
-
-    MarkRuntime(gcmarker);
+    JS_ASSERT(gcmarker->isDrained());
 }
 
 static void
-EndMarkPhase(JSContext *cx, GCMarker *gcmarker, JSGCInvocationKind gckind)
+MarkGrayAndWeak(JSContext *cx)
 {
     JSRuntime *rt = cx->runtime;
-
-    JS_ASSERT(gcmarker->isMarkStackEmpty());
+    FullGCMarker *gcmarker = &rt->gcMarker;
+
+    JS_ASSERT(gcmarker->isDrained());
+    MarkWeakReferences(gcmarker);
+
+    gcmarker->setMarkColorGray();
+    if (gcmarker->hasBufferedGrayRoots()) {
+        gcmarker->markBufferedGrayRoots();
+    } else {
+        if (JSTraceDataOp op = rt->gcGrayRootsTraceOp)
+            (*op)(gcmarker, rt->gcGrayRootsData);
+    }
+    SliceBudget budget;
+    gcmarker->drainMarkStack(budget);
     MarkWeakReferences(gcmarker);
-
-    if (JSTraceDataOp op = rt->gcGrayRootsTraceOp) {
-        gcmarker->setMarkColorGray();
-        (*op)(gcmarker, rt->gcGrayRootsData);
-        gcmarker->drainMarkStack();
-        MarkWeakReferences(gcmarker);
+    JS_ASSERT(gcmarker->isDrained());
+}
+
+#ifdef DEBUG
+static void
+ValidateIncrementalMarking(JSContext *cx);
+#endif
+
+static void
+EndMarkPhase(JSContext *cx)
+{
+    JSRuntime *rt = cx->runtime;
+
+    {
+        gcstats::AutoPhase ap1(rt->gcStats, gcstats::PHASE_MARK);
+        gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_MARK_OTHER);
+        MarkGrayAndWeak(cx);
     }
 
-    JS_ASSERT(gcmarker->isMarkStackEmpty());
-    rt->gcIncrementalTracer = NULL;
-
-    rt->gcStats.endPhase(gcstats::PHASE_MARK);
+    JS_ASSERT(rt->gcMarker.isDrained());
+
+#ifdef DEBUG
+    if (rt->gcIncrementalState != NO_INCREMENTAL)
+        ValidateIncrementalMarking(cx);
+#endif
 
     if (rt->gcCallback)
         (void) rt->gcCallback(cx, JSGC_MARK_END);
 
 #ifdef DEBUG
     /* Make sure that we didn't mark an object in another compartment */
     if (rt->gcCurrentCompartment) {
         for (CompartmentsIter c(rt); !c.done(); c.next()) {
             JS_ASSERT_IF(c != rt->gcCurrentCompartment && c != rt->atomsCompartment,
                          c->arenas.checkArenaListAllUnmarked());
         }
     }
 #endif
 }
 
+#ifdef DEBUG
 static void
-SweepPhase(JSContext *cx, GCMarker *gcmarker, JSGCInvocationKind gckind)
+ValidateIncrementalMarking(JSContext *cx)
 {
     JSRuntime *rt = cx->runtime;
+    FullGCMarker *gcmarker = &rt->gcMarker;
+
+    js::gc::State state = rt->gcIncrementalState;
+    rt->gcIncrementalState = NO_INCREMENTAL;
+
+    /* As we're re-doing marking, we need to reset the weak map list. */
+    WeakMapBase::resetWeakMapList(rt);
+
+    JS_ASSERT(gcmarker->isDrained());
+    gcmarker->reset();
+
+    typedef HashMap<Chunk *, uintptr_t *> BitmapMap;
+    BitmapMap map(cx);
+    map.init();
+
+    for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront()) {
+        ChunkBitmap *bitmap = &r.front()->bitmap;
+        uintptr_t *entry = (uintptr_t *)js_malloc(sizeof(bitmap->bitmap));
+        if (entry)
+            memcpy(entry, bitmap->bitmap, sizeof(bitmap->bitmap));
+        map.putNew(r.front(), entry);
+    }
+
+    for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
+        r.front()->bitmap.clear();
+
+    MarkRuntime(gcmarker, true);
+    SliceBudget budget;
+    rt->gcMarker.drainMarkStack(budget);
+    MarkGrayAndWeak(cx);
+
+    for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront()) {
+        Chunk *chunk = r.front();
+        ChunkBitmap *bitmap = &chunk->bitmap;
+        uintptr_t *entry = map.lookup(r.front())->value;
+        ChunkBitmap incBitmap;
+
+        if (!entry)
+            continue;
+
+        memcpy(incBitmap.bitmap, entry, sizeof(incBitmap.bitmap));
+        js_free(entry);
+
+        for (size_t i = 0; i < ArenasPerChunk; i++) {
+            Arena *arena = &chunk->arenas[i];
+            if (!arena->aheader.allocated())
+                continue;
+            if (rt->gcCurrentCompartment && arena->aheader.compartment != rt->gcCurrentCompartment)
+                continue;
+            if (arena->aheader.allocatedDuringIncremental)
+                continue;
+
+            AllocKind kind = arena->aheader.getAllocKind();
+            uintptr_t thing = arena->thingsStart(kind);
+            uintptr_t end = arena->thingsEnd();
+            while (thing < end) {
+                Cell *cell = (Cell *)thing;
+                if (bitmap->isMarked(cell, BLACK) && !incBitmap.isMarked(cell, BLACK)) {
+                    JS_DumpHeap(cx, stdout, NULL, JSGCTraceKind(0), NULL, 100000, NULL);
+                    printf("Assertion cell: %p (%d)\n", (void *)cell, cell->getAllocKind());
+                }
+                JS_ASSERT_IF(bitmap->isMarked(cell, BLACK), incBitmap.isMarked(cell, BLACK));
+                thing += Arena::thingSize(kind);
+            }
+        }
+
+        memcpy(bitmap->bitmap, incBitmap.bitmap, sizeof(incBitmap.bitmap));
+    }
+
+    rt->gcIncrementalState = state;
+}
+#endif
+
+static void
+SweepPhase(JSContext *cx, JSGCInvocationKind gckind)
+{
+    JSRuntime *rt = cx->runtime;
+
+#ifdef JS_THREADSAFE
+    if (rt->hasContexts() && rt->gcHelperThread.prepareForBackgroundSweep())
+        cx->gcBackgroundFree = &rt->gcHelperThread;
+#endif
+
+    /* Purge the ArenaLists before sweeping. */
+    for (GCCompartmentsIter c(rt); !c.done(); c.next())
+        c->arenas.purge();
 
     /*
      * Sweep phase.
      *
      * Finalize as we sweep, outside of rt->gcLock but with rt->gcRunning set
      * so that any attempt to allocate a GC-thing from a finalizer will fail,
      * rather than nest badly and leave the unmarked newborn to be swept.
      *
@@ -2685,17 +3159,17 @@ SweepPhase(JSContext *cx, GCMarker *gcma
      * freed. Note that even after the entry is freed, JSObject finalizers can
      * continue to access the corresponding JSString* assuming that they are
      * unique. This works since the atomization API must not be called during
      * the GC.
      */
     gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP);
 
     /* Finalize unreachable (key,value) pairs in all weak maps. */
-    WeakMapBase::sweepAll(gcmarker);
+    WeakMapBase::sweepAll(&rt->gcMarker);
 
     js_SweepAtomState(rt);
 
     /* Collect watch points associated with unreachable objects. */
     WatchpointMap::sweepAll(rt);
 
     if (!rt->gcCurrentCompartment)
         Debugger::sweepAll(cx);
@@ -2766,218 +3240,190 @@ SweepPhase(JSContext *cx, GCMarker *gcma
 #endif
     }
 
     {
         gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_XPCONNECT);
         if (rt->gcCallback)
             (void) rt->gcCallback(cx, JSGC_FINALIZE_END);
     }
+
+    for (CompartmentsIter c(rt); !c.done(); c.next())
+        c->setGCLastBytes(c->gcBytes, gckind);
 }
 
 /* Perform mark-and-sweep GC. If comp is set, we perform a single-compartment GC. */
 static void
 MarkAndSweep(JSContext *cx, JSGCInvocationKind gckind)
 {
     JSRuntime *rt = cx->runtime;
-    rt->gcNumber++;
-
-    /* Clear gcIsNeeded now, when we are about to start a normal GC cycle. */
-    rt->gcIsNeeded = false;
-    rt->gcTriggerCompartment = NULL;
-    
-    /* Clear gcMallocBytes for all compartments */
-    JSCompartment **read = rt->compartments.begin();
-    JSCompartment **end = rt->compartments.end();
-    JS_ASSERT(rt->compartments.length() >= 1);
-    
-    while (read < end) {
-        JSCompartment *compartment = *read++;
-        compartment->resetGCMallocBytes();
-    }
-
-    /* Reset weak map list. */
-    WeakMapBase::resetWeakMapList(rt);
-
-    /* Reset malloc counter. */
-    rt->resetGCMallocBytes();
 
     AutoUnlockGC unlock(rt);
 
-    GCMarker gcmarker(cx);
-    JS_ASSERT(IS_GC_MARKING_TRACER(&gcmarker));
-    JS_ASSERT(gcmarker.getMarkColor() == BLACK);
-    rt->gcIncrementalTracer = &gcmarker;
-
-    BeginMarkPhase(cx, &gcmarker, gckind);
-    gcmarker.drainMarkStack();
-    EndMarkPhase(cx, &gcmarker, gckind);
-    SweepPhase(cx, &gcmarker, gckind);
+    rt->gcMarker.start(rt, cx);
+    JS_ASSERT(!rt->gcMarker.callback);
+
+    BeginMarkPhase(cx);
+    {
+        gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_MARK);
+        SliceBudget budget;
+        rt->gcMarker.drainMarkStack(budget);
+    }
+    EndMarkPhase(cx);
+    SweepPhase(cx, gckind);
+
+    rt->gcMarker.stop();
 }
 
-class AutoGCSession {
+/*
+ * This class should be used by any code that needs to exclusive access to the
+ * heap in order to trace through it...
+ */
+class AutoHeapSession {
   public:
-    explicit AutoGCSession(JSContext *cx);
+    explicit AutoHeapSession(JSContext *cx);
+    ~AutoHeapSession();
+
+  protected:
+    JSContext *context;
+
+  private:
+    AutoHeapSession(const AutoHeapSession&) MOZ_DELETE;
+    void operator=(const AutoHeapSession&) MOZ_DELETE;
+};
+
+/* ...while this class is to be used only for garbage collection. */
+class AutoGCSession : AutoHeapSession {
+  public:
+    explicit AutoGCSession(JSContext *cx, JSCompartment *comp);
     ~AutoGCSession();
 
   private:
-    JSContext   *context;
-
-    AutoGCSession(const AutoGCSession&) MOZ_DELETE;
-    void operator=(const AutoGCSession&) MOZ_DELETE;
+    /*
+     * We should not be depending on cx->compartment in the GC, so set it to
+     * NULL to look for violations.
+     */
+    SwitchToCompartment switcher;
 };
 
-/* Start a new GC session. */
-AutoGCSession::AutoGCSession(JSContext *cx)
+/* Start a new heap session. */
+AutoHeapSession::AutoHeapSession(JSContext *cx)
   : context(cx)
 {
     JS_ASSERT(!cx->runtime->noGCOrAllocationCheck);
     JSRuntime *rt = cx->runtime;
     JS_ASSERT(!rt->gcRunning);
     rt->gcRunning = true;
 }
 
-AutoGCSession::~AutoGCSession()
+AutoHeapSession::~AutoHeapSession()
 {
     JSRuntime *rt = context->runtime;
     rt->gcRunning = false;
 }
 
-/*
- * GC, repeatedly if necessary, until we think we have not created any new
- * garbage. We disable inlining to ensure that the bottom of the stack with
- * possible GC roots recorded in js_GC excludes any pointers we use during the
- * marking implementation.
- */
-static JS_NEVER_INLINE void
-GCCycle(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind)
+AutoGCSession::AutoGCSession(JSContext *cx, JSCompartment *comp)
+  : AutoHeapSession(cx),
+    switcher(cx, (JSCompartment *)NULL)
 {
     JSRuntime *rt = cx->runtime;
 
-    JS_ASSERT_IF(comp, comp != rt->atomsCompartment);
-    JS_ASSERT_IF(comp, rt->gcMode == JSGC_MODE_COMPARTMENT);
-
-    /* Recursive GC is no-op. */
-    if (rt->gcMarkAndSweep)
-        return;
-
-    AutoGCSession gcsession(cx);
-
-    /* Don't GC if we are reporting an OOM. */
-    if (rt->inOOMReport)
-        return;
-
-    /*
-     * We should not be depending on cx->compartment in the GC, so set it to
-     * NULL to look for violations.
-     */
-    SwitchToCompartment sc(cx, (JSCompartment *)NULL);
-
     JS_ASSERT(!rt->gcCurrentCompartment);
     rt->gcCurrentCompartment = comp;
 
-    rt->gcMarkAndSweep = true;
-
-#ifdef JS_THREADSAFE
-    /*
-     * As we about to purge caches and clear the mark bits we must wait for
-     * any background finalization to finish. We must also wait for the
-     * background allocation to finish so we can avoid taking the GC lock
-     * when manipulating the chunks during the GC.
-     */
-    JS_ASSERT(!cx->gcBackgroundFree);
-    rt->gcHelperThread.waitBackgroundSweepOrAllocEnd();
-    if (rt->hasContexts() && rt->gcHelperThread.prepareForBackgroundSweep())
-        cx->gcBackgroundFree = &rt->gcHelperThread;
-#endif
-
-    MarkAndSweep(cx, gckind);
-
-#ifdef JS_THREADSAFE
-    if (cx->gcBackgroundFree) {
-        JS_ASSERT(cx->gcBackgroundFree == &rt->gcHelperThread);
-        cx->gcBackgroundFree = NULL;
-        rt->gcHelperThread.startBackgroundSweep(cx, gckind == GC_SHRINK);
-    }
-#endif
-
-    rt->gcMarkAndSweep = false;
-    rt->gcCurrentCompartment = NULL;
-
+    rt->gcIsNeeded = false;
+    rt->gcTriggerCompartment = NULL;
+    rt->gcInterFrameGC = true;
+
+    rt->gcNumber++;
+
+    rt->resetGCMallocBytes();
+
+    /* Clear gcMallocBytes for all compartments */
     for (CompartmentsIter c(rt); !c.done(); c.next())
-        c->setGCLastBytes(c->gcBytes, gckind);
+        c->resetGCMallocBytes();
 }
 
-void
-js_GC(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, gcreason::Reason reason)
+AutoGCSession::~AutoGCSession()
 {
-    JSRuntime *rt = cx->runtime;
-    JS_AbortIfWrongThread(rt);
-
-#ifdef JS_GC_ZEAL
-    struct AutoVerifyBarriers {
-        JSContext *cx;
-        bool inVerify;
-        AutoVerifyBarriers(JSContext *cx) : cx(cx), inVerify(cx->runtime->gcVerifyData) {
-            if (inVerify) EndVerifyBarriers(cx);
-        }
-        ~AutoVerifyBarriers() { if (inVerify) StartVerifyBarriers(cx); }
-    } av(cx);
-#endif
-
-    RecordNativeStackTopForGC(cx);
-
-    gcstats::AutoGC agc(rt->gcStats, comp, reason);
-
-    do {
-        /*
-         * Let the API user decide to defer a GC if it wants to (unless this
-         * is the last context).  Invoke the callback regardless. Sample the
-         * callback in case we are freely racing with a JS_SetGCCallback{,RT}
-         * on another thread.
-         */
-        if (JSGCCallback callback = rt->gcCallback) {
-            if (!callback(cx, JSGC_BEGIN) && rt->hasContexts())
-                return;
-        }
-
-        {
-            /* Lock out other GC allocator and collector invocations. */
-            AutoLockGC lock(rt);
-            rt->gcPoke = false;
-            GCCycle(cx, comp, gckind);
-        }
-
-        /* We re-sample the callback again as the finalizers can change it. */
-        if (JSGCCallback callback = rt->gcCallback)
-            (void) callback(cx, JSGC_END);
-
-        /*
-         * On shutdown, iterate until finalizers or the JSGC_END callback
-         * stop creating garbage.
-         */
-    } while (!rt->hasContexts() && rt->gcPoke);
-
+    JSRuntime *rt = context->runtime;
+
+    rt->gcCurrentCompartment = NULL;
     rt->gcNextFullGCTime = PRMJ_Now() + GC_IDLE_FULL_SPAN;
-
     rt->gcChunkAllocationSinceLastGC = false;
 }
 
-namespace js {
-
-void
-ShrinkGCBuffers(JSRuntime *rt)
+static void
+ResetIncrementalGC(JSRuntime *rt)
 {
-    AutoLockGC lock(rt);
-    JS_ASSERT(!rt->gcRunning);
-#ifndef JS_THREADSAFE
-    ExpireChunksAndArenas(rt, true);
-#else
-    rt->gcHelperThread.startBackgroundShrink();
-#endif
+    if (rt->gcIncrementalState == NO_INCREMENTAL)
+        return;
+
+    for (CompartmentsIter c(rt); !c.done(); c.next()) {
+        if (!rt->gcIncrementalCompartment || rt->gcIncrementalCompartment == c) {
+            c->needsBarrier_ = false;
+            c->barrierMarker_.reset();
+            c->barrierMarker_.stop();
+        }
+        JS_ASSERT(!c->needsBarrier_);
+    }
+
+    rt->gcIncrementalCompartment = NULL;
+    rt->gcMarker.reset();
+    rt->gcMarker.stop();
+    rt->gcIncrementalState = NO_INCREMENTAL;
+
+    rt->gcStats.reset();
+}
+
+class AutoGCSlice {
+  public:
+    AutoGCSlice(JSContext *cx);
+    ~AutoGCSlice();
+
+  private:
+    JSContext *context;
+};
+
+AutoGCSlice::AutoGCSlice(JSContext *cx)
+  : context(cx)
+{
+    JSRuntime *rt = context->runtime;
+
+    /*
+     * During incremental GC, the compartment's active flag determines whether
+     * there are stack frames active for any of its scripts. Normally this flag
+     * is set at the beginning of the mark phase. During incremental GC, we also
+     * set it at the start of every phase.
+     */
+    rt->stackSpace.markActiveCompartments();
+
+    for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
+        /* Clear this early so we don't do any write barriers during GC. */
+        if (rt->gcIncrementalState == MARK)
+            c->needsBarrier_ = false;
+        else
+            JS_ASSERT(!c->needsBarrier_);
+    }
+}
+
+AutoGCSlice::~AutoGCSlice()
+{
+    JSRuntime *rt = context->runtime;
+
+    for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
+        if (rt->gcIncrementalState == MARK) {
+            c->needsBarrier_ = true;
+            c->arenas.prepareForIncrementalGC(c);
+        } else {
+            JS_ASSERT(rt->gcIncrementalState == NO_INCREMENTAL);
+
+            c->needsBarrier_ = false;
+        }
+    }
 }
 
 class AutoCopyFreeListToArenas {
     JSRuntime *rt;
 
   public:
     AutoCopyFreeListToArenas(JSRuntime *rt)
       : rt(rt) {
@@ -2986,28 +3432,318 @@ class AutoCopyFreeListToArenas {
     }
 
     ~AutoCopyFreeListToArenas() {
         for (CompartmentsIter c(rt); !c.done(); c.next())
             c->arenas.clearFreeListsInArenas();
     }
 };
 
+static void
+IncrementalGCSlice(JSContext *cx, int64_t budget, JSGCInvocationKind gckind)
+{
+    JS_ASSERT(budget != SliceBudget::Unlimited);
+
+    JSRuntime *rt = cx->runtime;
+
+    AutoUnlockGC unlock(rt);
+    AutoGCSlice slice(cx);
+
+    gc::State initialState = rt->gcIncrementalState;
+
+    if (rt->gcIncrementalState == NO_INCREMENTAL) {
+        JS_ASSERT(!rt->gcIncrementalCompartment);
+        rt->gcIncrementalCompartment = rt->gcCurrentCompartment;
+        rt->gcIncrementalState = MARK_ROOTS;
+        rt->gcLastMarkSlice = false;
+    }
+
+    if (rt->gcIncrementalState == MARK_ROOTS) {
+        rt->gcMarker.start(rt, cx);
+        JS_ASSERT(IS_GC_MARKING_TRACER(&rt->gcMarker));
+
+        for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
+            c->discardJitCode(cx);
+            c->barrierMarker_.start(rt, NULL);
+        }
+
+        BeginMarkPhase(cx);
+
+        rt->gcIncrementalState = MARK;
+    }
+
+    if (rt->gcIncrementalState == MARK) {
+        gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_MARK);
+        SliceBudget sliceBudget(budget);
+
+        /* If we needed delayed marking for gray roots, then collect until done. */
+        if (!rt->gcMarker.hasBufferedGrayRoots())
+            sliceBudget.reset();
+
+        bool finished = rt->gcMarker.drainMarkStack(sliceBudget);
+
+        for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
+            c->barrierMarker_.context = cx;
+            finished &= c->barrierMarker_.drainMarkStack(sliceBudget);
+            c->barrierMarker_.context = NULL;
+        }
+
+        if (finished) {
+            JS_ASSERT(rt->gcMarker.isDrained());
+#ifdef DEBUG
+            for (GCCompartmentsIter c(rt); !c.done(); c.next())
+                JS_ASSERT(c->barrierMarker_.isDrained());
+#endif
+            if (initialState == MARK && !rt->gcLastMarkSlice)
+                rt->gcLastMarkSlice = true;
+            else
+                rt->gcIncrementalState = SWEEP;
+        }
+    }
+
+    if (rt->gcIncrementalState == SWEEP) {
+        EndMarkPhase(cx);
+        SweepPhase(cx, gckind);
+
+        rt->gcMarker.stop();
+
+        /* JIT code was already discarded during sweeping. */
+        for (GCCompartmentsIter c(rt); !c.done(); c.next())
+            c->barrierMarker_.stop();
+
+        rt->gcIncrementalCompartment = NULL;
+
+        rt->gcIncrementalState = NO_INCREMENTAL;
+    }
+}
+
+static bool
+IsIncrementalGCSafe(JSContext *cx)
+{
+    JSRuntime *rt = cx->runtime;
+
+    if (rt->gcCompartmentCreated) {
+        rt->gcCompartmentCreated = false;
+        return false;
+    }
+
+    if (rt->gcKeepAtoms)
+        return false;
+
+    for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
+        if (c->activeAnalysis)
+            return false;
+    }
+
+    if (rt->gcIncrementalState != NO_INCREMENTAL &&
+        rt->gcCurrentCompartment != rt->gcIncrementalCompartment)
+    {
+        return false;
+    }
+
+    if (!rt->gcIncrementalEnabled)
+        return false;
+
+    return true;
+}
+
+static bool
+IsIncrementalGCAllowed(JSContext *cx)
+{
+    JSRuntime *rt = cx->runtime;
+
+    if (rt->gcMode != JSGC_MODE_INCREMENTAL)
+        return false;
+
+#ifdef ANDROID
+    /* Incremental GC is disabled on Android for now. */
+    return false;
+#endif
+
+    if (!IsIncrementalGCSafe(cx))
+        return false;
+
+    for (CompartmentsIter c(rt); !c.done(); c.next()) {
+        if (c->gcBytes > c->gcTriggerBytes)
+            return false;
+    }
+
+    return true;
+}
+
+/*
+ * GC, repeatedly if necessary, until we think we have not created any new
+ * garbage. We disable inlining to ensure that the bottom of the stack with
+ * possible GC roots recorded in js_GC excludes any pointers we use during the
+ * marking implementation.
+ */
+static JS_NEVER_INLINE void
+GCCycle(JSContext *cx, JSCompartment *comp, int64_t budget, JSGCInvocationKind gckind)
+{
+    JSRuntime *rt = cx->runtime;
+
+    JS_ASSERT_IF(comp, comp != rt->atomsCompartment);
+    JS_ASSERT_IF(comp, rt->gcMode != JSGC_MODE_GLOBAL);
+
+    /* Recursive GC is no-op. */
+    if (rt->gcRunning)
+        return;
+
+    AutoGCSession gcsession(cx, comp);
+
+    /* Don't GC if we are reporting an OOM. */
+    if (rt->inOOMReport)
+        return;
+
+#ifdef JS_THREADSAFE
+    /*
+     * As we about to purge caches and clear the mark bits we must wait for
+     * any background finalization to finish. We must also wait for the
+     * background allocation to finish so we can avoid taking the GC lock
+     * when manipulating the chunks during the GC.
+     */
+    JS_ASSERT(!cx->gcBackgroundFree);
+    rt->gcHelperThread.waitBackgroundSweepOrAllocEnd();
+#endif
+
+    if (budget != SliceBudget::Unlimited) {
+        if (!IsIncrementalGCAllowed(cx))
+            budget = SliceBudget::Unlimited;
+    }
+
+    if (budget == SliceBudget::Unlimited)
+        ResetIncrementalGC(rt);
+
+    AutoCopyFreeListToArenas copy(rt);
+
+    if (budget == SliceBudget::Unlimited)
+        MarkAndSweep(cx, gckind);
+    else
+        IncrementalGCSlice(cx, budget, gckind);
+
+#ifdef DEBUG
+    if (rt->gcIncrementalState == NO_INCREMENTAL) {
+        for (CompartmentsIter c(rt); !c.done(); c.next())
+            JS_ASSERT(!c->needsBarrier_);
+    }
+#endif
+#ifdef JS_THREADSAFE
+    if (rt->gcIncrementalState == NO_INCREMENTAL) {
+        if (cx->gcBackgroundFree) {
+            JS_ASSERT(cx->gcBackgroundFree == &rt->gcHelperThread);
+            cx->gcBackgroundFree = NULL;
+            rt->gcHelperThread.startBackgroundSweep(cx, gckind == GC_SHRINK);
+        }
+    }
+#endif
+}
+
+static void
+Collect(JSContext *cx, JSCompartment *comp, int64_t budget,
+        JSGCInvocationKind gckind, gcreason::Reason reason)
+{
+    JSRuntime *rt = cx->runtime;
+    JS_AbortIfWrongThread(rt);
+
+    JS_ASSERT_IF(budget != SliceBudget::Unlimited, JSGC_INCREMENTAL);
+
+#ifdef JS_GC_ZEAL
+    struct AutoVerifyBarriers {
+        JSContext *cx;
+        bool inVerify;
+        AutoVerifyBarriers(JSContext *cx) : cx(cx), inVerify(cx->runtime->gcVerifyData) {
+            if (inVerify) EndVerifyBarriers(cx);
+        }
+        ~AutoVerifyBarriers() { if (inVerify) StartVerifyBarriers(cx); }
+    } av(cx);
+#endif
+
+    RecordNativeStackTopForGC(cx);
+
+    /* This is a heuristic to avoid resets. */
+    if (rt->gcIncrementalState != NO_INCREMENTAL && !rt->gcIncrementalCompartment)
+        comp = NULL;
+
+    gcstats::AutoGCSlice agc(rt->gcStats, comp, reason);
+
+    do {
+        /*
+         * Let the API user decide to defer a GC if it wants to (unless this
+         * is the last context). Invoke the callback regardless.
+         */
+        if (rt->gcIncrementalState == NO_INCREMENTAL) {
+            if (JSGCCallback callback = rt->gcCallback) {
+                if (!callback(cx, JSGC_BEGIN) && rt->hasContexts())
+                    return;
+            }
+        }
+
+        {
+            /* Lock out other GC allocator and collector invocations. */
+            AutoLockGC lock(rt);
+            rt->gcPoke = false;
+            GCCycle(cx, comp, budget, gckind);
+        }
+
+        if (rt->gcIncrementalState == NO_INCREMENTAL) {
+            if (JSGCCallback callback = rt->gcCallback)
+                (void) callback(cx, JSGC_END);
+        }
+
+        /*
+         * On shutdown, iterate until finalizers or the JSGC_END callback
+         * stop creating garbage.
+         */
+    } while (!rt->hasContexts() && rt->gcPoke);
+}
+
+namespace js {
+
+void
+GC(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, gcreason::Reason reason)
+{
+    Collect(cx, comp, SliceBudget::Unlimited, gckind, reason);
+}
+
+void
+GCSlice(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, gcreason::Reason reason)
+{
+    Collect(cx, comp, cx->runtime->gcSliceBudget, gckind, reason);
+}
+
+void
+GCDebugSlice(JSContext *cx, int64_t objCount)
+{
+    Collect(cx, NULL, SliceBudget::WorkBudget(objCount), GC_NORMAL, gcreason::API);
+}
+
+void
+ShrinkGCBuffers(JSRuntime *rt)
+{
+    AutoLockGC lock(rt);
+    JS_ASSERT(!rt->gcRunning);
+#ifndef JS_THREADSAFE
+    ExpireChunksAndArenas(rt, true);
+#else
+    rt->gcHelperThread.startBackgroundShrink();
+#endif
+}
+
 void
 TraceRuntime(JSTracer *trc)
 {
     JS_ASSERT(!IS_GC_MARKING_TRACER(trc));
 
 #ifdef JS_THREADSAFE
     {
         JSContext *cx = trc->context;
         JSRuntime *rt = cx->runtime;
         if (!rt->gcRunning) {
             AutoLockGC lock(rt);
-            AutoGCSession gcsession(cx);
+            AutoHeapSession session(cx);
 
             rt->gcHelperThread.waitBackgroundSweepEnd();
             AutoUnlockGC unlock(rt);
 
             AutoCopyFreeListToArenas copy(rt);
             RecordNativeStackTopForGC(trc->context);
             MarkRuntime(trc);
             return;
@@ -3058,17 +3794,17 @@ IterateCompartments(JSContext *cx, void 
                     IterateCompartmentCallback compartmentCallback)
 {
     CHECK_REQUEST(cx);
 
     JSRuntime *rt = cx->runtime;
     JS_ASSERT(!rt->gcRunning);
 
     AutoLockGC lock(rt);
-    AutoGCSession gcsession(cx);
+    AutoHeapSession session(cx);
 #ifdef JS_THREADSAFE
     rt->gcHelperThread.waitBackgroundSweepEnd();
 #endif
     AutoUnlockGC unlock(rt);
 
     AutoCopyFreeListToArenas copy(rt);
     for (CompartmentsIter c(rt); !c.done(); c.next()) {
         (*compartmentCallback)(cx, data, c);
@@ -3082,17 +3818,17 @@ IterateCompartmentsArenasCells(JSContext
                                IterateCellCallback cellCallback)
 {
     CHECK_REQUEST(cx);
 
     JSRuntime *rt = cx->runtime;
     JS_ASSERT(!rt->gcRunning);
 
     AutoLockGC lock(rt);
-    AutoGCSession gcsession(cx);
+    AutoHeapSession session(cx);
 #ifdef JS_THREADSAFE
     rt->gcHelperThread.waitBackgroundSweepEnd();
 #endif
     AutoUnlockGC unlock(rt);
 
     AutoCopyFreeListToArenas copy(rt);
     for (CompartmentsIter c(rt); !c.done(); c.next()) {
         (*compartmentCallback)(cx, data, c);
@@ -3112,17 +3848,17 @@ IterateChunks(JSContext *cx, void *data,
 {
     /* :XXX: Any way to common this preamble with IterateCompartmentsArenasCells? */
     CHECK_REQUEST(cx);
 
     JSRuntime *rt = cx->runtime;
     JS_ASSERT(!rt->gcRunning);
 
     AutoLockGC lock(rt);
-    AutoGCSession gcsession(cx);
+    AutoHeapSession session(cx);
 #ifdef JS_THREADSAFE
     rt->gcHelperThread.waitBackgroundSweepEnd();
 #endif
     AutoUnlockGC unlock(rt);
 
     for (js::GCChunkSet::Range r = rt->gcChunkSet.all(); !r.empty(); r.popFront())
         chunkCallback(cx, data, r.front());
 }
@@ -3133,17 +3869,17 @@ IterateCells(JSContext *cx, JSCompartmen
 {
     /* :XXX: Any way to common this preamble with IterateCompartmentsArenasCells? */
     CHECK_REQUEST(cx);
 
     JSRuntime *rt = cx->runtime;
     JS_ASSERT(!rt->gcRunning);
 
     AutoLockGC lock(rt);
-    AutoGCSession gcsession(cx);
+    AutoHeapSession session(cx);
 #ifdef JS_THREADSAFE
     rt->gcHelperThread.waitBackgroundSweepEnd();
 #endif
     AutoUnlockGC unlock(rt);
 
     AutoCopyFreeListToArenas copy(rt);
 
     JSGCTraceKind traceKind = MapAllocToTraceKind(thingKind);
@@ -3181,16 +3917,33 @@ NewCompartment(JSContext *cx, JSPrincipa
         compartment->setGCLastBytes(8192, GC_NORMAL);
 
         /*
          * Before reporting the OOM condition, |lock| needs to be cleaned up,
          * hence the scoping.
          */
         {
             AutoLockGC lock(rt);
+
+            /*
+             * If we're in the middle of an incremental GC, we cancel
+             * it. Otherwise we might fail the mark the newly created
+             * compartment fully.
+             */
+            if (rt->gcIncrementalState == MARK) {
+                rt->gcCompartmentCreated = true;
+
+                /*
+                 * Start the tracer so that it's legal to stop() it when
+                 * resetting the GC.
+                 */
+                if (!rt->gcIncrementalCompartment)
+                    compartment->barrierMarker_.start(rt, NULL);
+            }
+
             if (rt->compartments.append(compartment))
                 return compartment;
         }
 
         js_ReportOutOfMemory(cx);
     }
     Foreground::delete_(compartment);
     return NULL;
@@ -3224,17 +3977,17 @@ CheckStackRoot(JSTracer *trc, uintptr_t 
     VALGRIND_MAKE_MEM_DEFINED(&w, sizeof(w));
 #endif
 
     ConservativeGCTest test = MarkIfGCThingWord(trc, *w, DONT_MARK_THING);
 
     if (test == CGCT_VALID) {
         JSContext *iter = NULL;
         bool matched = false;
-        JSRuntime *rt = trc->context->runtime;
+        JSRuntime *rt = trc->runtime;
         while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter)) {
             for (unsigned i = 0; i < THING_ROOT_COUNT; i++) {
                 Root<Cell*> *rooter = acx->thingGCRooters[i];
                 while (rooter) {
                     if (rooter->address() == (Cell **) w)
                         matched = true;
                     rooter = rooter->previous();
                 }
@@ -3267,17 +4020,17 @@ CheckStackRootsRange(JSTracer *trc, uint
 }
 
 void
 CheckStackRoots(JSContext *cx)
 {
     AutoCopyFreeListToArenas copy(cx->runtime);
 
     JSTracer checker;
-    JS_TRACER_INIT(&checker, cx, EmptyMarkCallback);
+    JS_TracerInit(&checker, cx, EmptyMarkCallback);
 
     ThreadData *td = JS_THREAD_DATA(cx);
 
     ConservativeGCThreadData *ctd = &td->conservativeGC;
     ctd->recordStackTop();
 
     JS_ASSERT(ctd->hasStackToScan());
     uintptr_t *stackMin, *stackEnd;
@@ -3345,32 +4098,30 @@ typedef HashMap<void *, VerifyNode *> No
  * for the graph. If we run out of memory (i.e., if edgeptr goes beyond term),
  * we just abandon the verification.
  *
  * The nodemap field is a hashtable that maps from the address of the GC thing
  * to the VerifyNode that represents it.
  */
 struct VerifyTracer : JSTracer {
     /* The gcNumber when the verification began. */
-    uint32_t number;
+    uint64_t number;
 
     /* This counts up to JS_VERIFIER_FREQ to decide whether to verify. */
     uint32_t count;
 
     /* This graph represents the initial GC "snapshot". */
     VerifyNode *curnode;
     VerifyNode *root;
     char *edgeptr;
     char *term;
     NodeMap nodemap;
 
-    /* A dummy marker used for the write barriers; stored in gcMarkingTracer. */
-    GCMarker gcmarker;
-
-    VerifyTracer(JSContext *cx) : nodemap(cx), gcmarker(cx) {}
+    VerifyTracer(JSContext *cx) : root(NULL), nodemap(cx) {}
+    ~VerifyTracer() { js_free(root); }
 };
 
 /*
  * This function builds up the heap snapshot by adding edges to the current
  * node.
  */
 static void
 AccumulateEdge(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
@@ -3424,55 +4175,41 @@ NextNode(VerifyNode *node)
 			      sizeof(EdgeValue)*(node->count - 1));
 }
 
 static void
 StartVerifyBarriers(JSContext *cx)
 {
     JSRuntime *rt = cx->runtime;
 
-    if (rt->gcVerifyData)
+    if (rt->gcVerifyData || rt->gcIncrementalState != NO_INCREMENTAL)
         return;
 
     AutoLockGC lock(rt);
-    AutoGCSession gcsession(cx);
+    AutoHeapSession session(cx);
+
+    if (!IsIncrementalGCSafe(cx))
+        return;
 
 #ifdef JS_THREADSAFE
     rt->gcHelperThread.waitBackgroundSweepOrAllocEnd();
 #endif
 
     AutoUnlockGC unlock(rt);
 
     AutoCopyFreeListToArenas copy(rt);
     RecordNativeStackTopForGC(cx);
 
     for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
         r.front()->bitmap.clear();
 
-    /*
-     * Kick all frames on the stack into the interpreter, and release all JIT
-     * code in the compartment.
-     */
-#ifdef JS_METHODJIT
-    for (CompartmentsIter c(rt); !c.done(); c.next()) {
-        mjit::ClearAllFrames(c);
-
-        for (CellIterUnderGC i(c, FINALIZE_SCRIPT); !i.done(); i.next()) {
-            JSScript *script = i.get<JSScript>();
-            mjit::ReleaseScriptCode(cx, script);
-
-            /*
-             * Use counts for scripts are reset on GC. After discarding code we
-             * need to let it warm back up to get information like which opcodes
-             * are setting array holes or accessing getter properties.
-             */
-            script->resetUseCount();
-        }
-    }
-#endif
+    for (CompartmentsIter c(rt); !c.done(); c.next())
+        c->discardJitCode(cx);
+
+    PurgeRuntime(cx);
 
     VerifyTracer *trc = new (js_malloc(sizeof(VerifyTracer))) VerifyTracer(cx);
 
     rt->gcNumber++;
     trc->number = rt->gcNumber;
     trc->count = 0;
 
     JS_TracerInit(trc, cx, AccumulateEdge);
@@ -3483,16 +4220,19 @@ StartVerifyBarriers(JSContext *cx)
     trc->edgeptr = (char *)trc->root;
     trc->term = trc->edgeptr + size;
 
     trc->nodemap.init();
 
     /* Create the root node. */
     trc->curnode = MakeNode(trc, NULL, JSGCTraceKind(0));
 
+    /* We want MarkRuntime to save the roots to gcSavedRoots. */
+    rt->gcIncrementalState = MARK_ROOTS;
+
     /* Make all the roots be edges emanating from the root node. */
     MarkRuntime(trc);
 
     VerifyNode *node = trc->curnode;
     if (trc->edgeptr == trc->term)
         goto oom;
 
     /* For each edge, make a node for it if one doesn't already exist. */
@@ -3507,65 +4247,93 @@ StartVerifyBarriers(JSContext *cx)
             if (trc->edgeptr == trc->term)
                 goto oom;
         }
 
         node = NextNode(node);
     }
 
     rt->gcVerifyData = trc;
-    rt->gcIncrementalTracer = &trc->gcmarker;
+    rt->gcIncrementalState = MARK;
     for (CompartmentsIter c(rt); !c.done(); c.next()) {
-        c->gcIncrementalTracer = &trc->gcmarker;
         c->needsBarrier_ = true;
+        c->barrierMarker_.start(rt, NULL);
+        c->arenas.prepareForIncrementalGC(c);
     }
 
     return;
 
 oom:
-    js_free(trc->root);
+    rt->gcIncrementalState = NO_INCREMENTAL;
     trc->~VerifyTracer();
     js_free(trc);
 }
 
 static void
-CheckAutorooter(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
+MarkFromAutorooter(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
 {
     static_cast<Cell *>(*thingp)->markIfUnmarked();
 }
 
+static bool
+IsMarkedOrAllocated(Cell *cell)
+{
+    return cell->isMarked() || cell->arenaHeader()->allocatedDuringIncremental;
+}
+
+const static uint32_t MAX_VERIFIER_EDGES = 1000;
+
 /*
  * This function is called by EndVerifyBarriers for every heap edge. If the edge
  * already existed in the original snapshot, we "cancel it out" by overwriting
  * it with NULL. EndVerifyBarriers later asserts that the remaining non-NULL
  * edges (i.e., the ones from the original snapshot that must have been
  * modified) must point to marked objects.
  */
 static void
 CheckEdge(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
 {
     VerifyTracer *trc = (VerifyTracer *)jstrc;
     VerifyNode *node = trc->curnode;
 
+    /* Avoid n^2 behavior. */
+    if (node->count > MAX_VERIFIER_EDGES)
+        return;
+
     for (uint32_t i = 0; i < node->count; i++) {
         if (node->edges[i].thing == *thingp) {
             JS_ASSERT(node->edges[i].kind == kind);
             node->edges[i].thing = NULL;
             return;
         }
     }
+
+    /*
+     * Anything that is reachable now should have been reachable before, or else
+     * it should be marked.
+     */
+    NodeMap::Ptr p = trc->nodemap.lookup(*thingp);
+    JS_ASSERT_IF(!p, IsMarkedOrAllocated(static_cast<Cell *>(*thingp)));
+}
+
+static void
+CheckReachable(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
+{
+    VerifyTracer *trc = (VerifyTracer *)jstrc;
+    NodeMap::Ptr p = trc->nodemap.lookup(*thingp);
+    JS_ASSERT_IF(!p, IsMarkedOrAllocated(static_cast<Cell *>(*thingp)));
 }
 
 static void
 EndVerifyBarriers(JSContext *cx)
 {
     JSRuntime *rt = cx->runtime;
 
     AutoLockGC lock(rt);
-    AutoGCSession gcsession(cx);
+    AutoHeapSession session(cx);
 
 #ifdef JS_THREADSAFE
     rt->gcHelperThread.waitBackgroundSweepOrAllocEnd();
 #endif
 
     AutoUnlockGC unlock(rt);
 
     AutoCopyFreeListToArenas copy(rt);
@@ -3573,63 +4341,93 @@ EndVerifyBarriers(JSContext *cx)
 
     VerifyTracer *trc = (VerifyTracer *)rt->gcVerifyData;
 
     if (!trc)
         return;
 
     JS_ASSERT(trc->number == rt->gcNumber);
 
-    for (CompartmentsIter c(rt); !c.done(); c.next()) {
-        c->gcIncrementalTracer = NULL;
+    /* We need to disable barriers before tracing, which may invoke barriers. */
+    for (CompartmentsIter c(rt); !c.done(); c.next())
         c->needsBarrier_ = false;
-    }
-
-    if (rt->gcIncrementalTracer->hasDelayedChildren())
-        rt->gcIncrementalTracer->markDelayedChildren();
+
+    for (CompartmentsIter c(rt); !c.done(); c.next())
+        c->discardJitCode(cx);
 
     rt->gcVerifyData = NULL;
-    rt->gcIncrementalTracer = NULL;
-
-    JS_TracerInit(trc, cx, CheckAutorooter);
+    rt->gcIncrementalState = NO_INCREMENTAL;
+
+    JS_TracerInit(trc, cx, MarkFromAutorooter);
 
     JSContext *iter = NULL;
     while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter)) {
         if (acx->autoGCRooters)
             acx->autoGCRooters->traceAll(trc);
     }
 
-    JS_TracerInit(trc, cx, CheckEdge);
-
-    /* Start after the roots. */
-    VerifyNode *node = NextNode(trc->root);
-    int count = 0;
-
-    while ((char *)node < trc->edgeptr) {
-        trc->curnode = node;
-        JS_TraceChildren(trc, node->thing, node->kind);
-
-        for (uint32_t i = 0; i < node->count; i++) {
-            void *thing = node->edges[i].thing;
-            JS_ASSERT_IF(thing, static_cast<Cell *>(thing)->isMarked());
+    if (IsIncrementalGCSafe(cx)) {
+        /*
+         * Verify that all the current roots were reachable previously, or else
+         * are marked.
+         */
+        JS_TracerInit(trc, cx, CheckReachable);
+        MarkRuntime(trc, true);
+
+        JS_TracerInit(trc, cx, CheckEdge);
+
+        /* Start after the roots. */
+        VerifyNode *node = NextNode(trc->root);
+        while ((char *)node < trc->edgeptr) {
+            trc->curnode = node;
+            JS_TraceChildren(trc, node->thing, node->kind);
+
+            if (node->count <= MAX_VERIFIER_EDGES) {
+                for (uint32_t i = 0; i < node->count; i++) {
+                    void *thing = node->edges[i].thing;
+                    JS_ASSERT_IF(thing, IsMarkedOrAllocated(static_cast<Cell *>(thing)));
+                }
+            }
+
+            node = NextNode(node);
         }
-
-        count++;
-        node = NextNode(node);
     }
 
-    js_free(trc->root);
+    for (CompartmentsIter c(rt); !c.done(); c.next()) {
+        c->barrierMarker_.reset();
+        c->barrierMarker_.stop();
+    }
+
     trc->~VerifyTracer();
     js_free(trc);
 }
 
 void
-VerifyBarriers(JSContext *cx, bool always)
+FinishVerifier(JSRuntime *rt)
+{
+    if (VerifyTracer *trc = (VerifyTracer *)rt->gcVerifyData) {
+        trc->~VerifyTracer();
+        js_free(trc);
+    }
+}
+
+void
+VerifyBarriers(JSContext *cx)
 {
-    if (cx->runtime->gcZeal() < ZealVerifierThreshold)
+    JSRuntime *rt = cx->runtime;
+    if (rt->gcVerifyData)
+        EndVerifyBarriers(cx);
+    else
+        StartVerifyBarriers(cx);
+}
+
+void
+MaybeVerifyBarriers(JSContext *cx, bool always)
+{
+    if (cx->runtime->gcZeal() != ZealVerifierValue)
         return;
 
     uint32_t freq = cx->runtime->gcZealFrequency;
 
     JSRuntime *rt = cx->runtime;
     if (VerifyTracer *trc = (VerifyTracer *)rt->gcVerifyData) {
         if (++trc->count < freq && !always)
             return;
@@ -3767,8 +4565,9 @@ JSXML *
 js_NewGCXML(JSContext *cx)
 {
     if (!cx->runningWithTrustedPrincipals())
         ++sE4XObjectsCreated;
 
     return NewGCThing<JSXML>(cx, js::gc::FINALIZE_XML, sizeof(JSXML));
 }
 #endif
+
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -50,17 +50,16 @@
 #include "jsalloc.h"
 #include "jstypes.h"
 #include "jsprvtd.h"
 #include "jspubtd.h"
 #include "jsdhash.h"
 #include "jslock.h"
 #include "jsutil.h"
 #include "jsversion.h"
-#include "jsgcstats.h"
 #include "jscell.h"
 
 #include "ds/BitArray.h"
 #include "gc/Statistics.h"
 #include "js/HashTable.h"
 #include "js/Vector.h"
 #include "js/TemplateLib.h"
 
@@ -77,16 +76,24 @@ js_TraceXML(JSTracer *trc, JSXML* thing)
 
 namespace js {
 
 class GCHelperThread;
 struct Shape;
 
 namespace gc {
 
+enum State {
+    NO_INCREMENTAL,
+    MARK_ROOTS,
+    MARK,
+    SWEEP,
+    INVALID
+};
+
 struct Arena;
 
 /*
  * This must be an upper bound, but we do not need the least upper bound, so
  * we just exclude non-background objects.
  */
 const size_t MAX_BACKGROUND_FINALIZE_KINDS = FINALIZE_LIMIT - FINALIZE_OBJECT_LIMIT / 2;
 
@@ -414,59 +421,69 @@ struct ArenaHeader {
 
     /*
      * When recursive marking uses too much stack the marking is delayed and
      * the corresponding arenas are put into a stack using the following field
      * as a linkage. To distinguish the bottom of the stack from the arenas
      * not present in the stack we use an extra flag to tag arenas on the
      * stack.
      *
+     * Delayed marking is also used for arenas that we allocate into during an
+     * incremental GC. In this case, we intend to mark all the objects in the
+     * arena, and it's faster to do this marking in bulk.
+     *
      * To minimize the ArenaHeader size we record the next delayed marking
      * linkage as arenaAddress() >> ArenaShift and pack it with the allocKind
      * field and hasDelayedMarking flag. We use 8 bits for the allocKind, not
      * ArenaShift - 1, so the compiler can use byte-level memory instructions
      * to access it.
      */
   public:
     size_t       hasDelayedMarking  : 1;
-    size_t       nextDelayedMarking : JS_BITS_PER_WORD - 8 - 1;
+    size_t       allocatedDuringIncremental : 1;
+    size_t       markOverflow : 1;
+    size_t       nextDelayedMarking : JS_BITS_PER_WORD - 8 - 1 - 1 - 1;
 
     static void staticAsserts() {
         /* We must be able to fit the allockind into uint8_t. */
         JS_STATIC_ASSERT(FINALIZE_LIMIT <= 255);
 
         /*
          * nextDelayedMarkingpacking assumes that ArenaShift has enough bits
          * to cover allocKind and hasDelayedMarking.
          */
-        JS_STATIC_ASSERT(ArenaShift >= 8 + 1);
+        JS_STATIC_ASSERT(ArenaShift >= 8 + 1 + 1 + 1);
     }
 
     inline uintptr_t address() const;
     inline Chunk *chunk() const;
 
     bool allocated() const {
         JS_ASSERT(allocKind <= size_t(FINALIZE_LIMIT));
         return allocKind < size_t(FINALIZE_LIMIT);
     }
 
     void init(JSCompartment *comp, AllocKind kind) {
         JS_ASSERT(!allocated());
+        JS_ASSERT(!markOverflow);
+        JS_ASSERT(!allocatedDuringIncremental);
         JS_ASSERT(!hasDelayedMarking);
         compartment = comp;
 
         JS_STATIC_ASSERT(FINALIZE_LIMIT <= 255);
         allocKind = size_t(kind);
 
         /* See comments in FreeSpan::allocateFromNewArena. */
         firstFreeSpanOffsets = FreeSpan::FullArenaOffsets;
     }
 
     void setAsNotAllocated() {
         allocKind = size_t(FINALIZE_LIMIT);
+        markOverflow = 0;
+        allocatedDuringIncremental = 0;
         hasDelayedMarking = 0;
         nextDelayedMarking = 0;
     }
 
     uintptr_t arenaAddress() const {
         return address();
     }
 
@@ -502,18 +519,18 @@ struct ArenaHeader {
         JS_ASSERT(span->isWithinArena(arenaAddress()));
         firstFreeSpanOffsets = span->encodeAsOffsets();
     }
 
 #ifdef DEBUG
     void checkSynchronizedWithFreeList() const;
 #endif
 
-    inline Arena *getNextDelayedMarking() const;
-    inline void setNextDelayedMarking(Arena *arena);
+    inline ArenaHeader *getNextDelayedMarking() const;
+    inline void setNextDelayedMarking(ArenaHeader *aheader);
 };
 
 struct Arena {
     /*
      * Layout of an arena:
      * An arena is 4K in size and 4K-aligned. It starts with the ArenaHeader
      * descriptor followed by some pad bytes. The remainder of the arena is
      * filled with the array of T things. The pad bytes ensure that the thing
@@ -903,35 +920,34 @@ ArenaHeader::isEmpty() const
 
 inline size_t
 ArenaHeader::getThingSize() const
 {
     JS_ASSERT(allocated());
     return Arena::thingSize(getAllocKind());
 }
 
-inline Arena *
+inline ArenaHeader *
 ArenaHeader::getNextDelayedMarking() const
 {
-    return reinterpret_cast<Arena *>(nextDelayedMarking << ArenaShift);
+    return &reinterpret_cast<Arena *>(nextDelayedMarking << ArenaShift)->aheader;
 }
 
 inline void
-ArenaHeader::setNextDelayedMarking(Arena *arena)
+ArenaHeader::setNextDelayedMarking(ArenaHeader *aheader)
 {
-    JS_ASSERT(!hasDelayedMarking);
+    JS_ASSERT(!(uintptr_t(aheader) & ArenaMask));
     hasDelayedMarking = 1;
-    nextDelayedMarking = arena->address() >> ArenaShift;
+    nextDelayedMarking = aheader->arenaAddress() >> ArenaShift;
 }
 
 JS_ALWAYS_INLINE void
 ChunkBitmap::getMarkWordAndMask(const Cell *cell, uint32_t color,
                                 uintptr_t **wordp, uintptr_t *maskp)
 {
-    JS_ASSERT(cell->chunk() == Chunk::fromAddress(reinterpret_cast<uintptr_t>(this)));
     size_t bit = (cell->address() & ChunkMask) / Cell::CellSize + color;
     JS_ASSERT(bit < ArenaBitmapBits * ArenasPerChunk);
     *maskp = uintptr_t(1) << (bit % JS_BITS_PER_WORD);
     *wordp = &bitmap[bit / JS_BITS_PER_WORD];
 }
 
 static void
 AssertValidColor(const void *thing, uint32_t color)
@@ -965,31 +981,16 @@ Cell::unmark(uint32_t color) const
 }
 
 JSCompartment *
 Cell::compartment() const
 {
     return arenaHeader()->compartment;
 }
 
-/*
- * Lower limit after which we limit the heap growth
- */
-const size_t GC_ALLOCATION_THRESHOLD = 30 * 1024 * 1024;
-
-/*
- * A GC is triggered once the number of newly allocated arenas is
- * GC_HEAP_GROWTH_FACTOR times the number of live arenas after the last GC
- * starting after the lower limit of GC_ALLOCATION_THRESHOLD.
- */
-const float GC_HEAP_GROWTH_FACTOR = 3.0f;
-
-/* Perform a Full GC every 20 seconds if MaybeGC is called */
-static const int64_t GC_IDLE_FULL_SPAN = 20 * 1000 * 1000;
-
 static inline JSGCTraceKind
 MapAllocToTraceKind(AllocKind thingKind)
 {
     static const JSGCTraceKind map[FINALIZE_LIMIT] = {
         JSTRACE_OBJECT,     /* FINALIZE_OBJECT0 */
         JSTRACE_OBJECT,     /* FINALIZE_OBJECT0_BACKGROUND */
         JSTRACE_OBJECT,     /* FINALIZE_OBJECT2 */
         JSTRACE_OBJECT,     /* FINALIZE_OBJECT2_BACKGROUND */
@@ -1163,23 +1164,24 @@ struct ArenaLists {
      * Return the free list back to the arena so the GC finalization will not
      * run the finalizers over unitialized bytes from free things.
      */
     void purge() {
         for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
             FreeSpan *headSpan = &freeLists[i];
             if (!headSpan->isEmpty()) {
                 ArenaHeader *aheader = headSpan->arenaHeader();
-                JS_ASSERT(!aheader->hasFreeThings());
                 aheader->setFirstFreeSpan(headSpan);
                 headSpan->initAsEmpty();
             }
         }
     }
 
+    inline void prepareForIncrementalGC(JSCompartment *comp);
+
     /*
      * Temporarily copy the free list heads to the arenas so the code can see
      * the proper value in ArenaHeader::freeList when accessing the latter
      * outside the GC.
      */
     void copyFreeListsToArenas() {
         for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
             copyFreeListToArena(AllocKind(i));
@@ -1304,33 +1306,16 @@ struct RootInfo {
     JSGCRootType type;
 };
 
 typedef js::HashMap<void *,
                     RootInfo,
                     js::DefaultHasher<void *>,
                     js::SystemAllocPolicy> RootedValueMap;
 
-/* If HashNumber grows, need to change WrapperHasher. */
-JS_STATIC_ASSERT(sizeof(HashNumber) == 4);
-
-struct WrapperHasher
-{
-    typedef Value Lookup;
-
-    static HashNumber hash(Value key) {
-        uint64_t bits = JSVAL_TO_IMPL(key).asBits;
-        return uint32_t(bits) ^ uint32_t(bits >> 32);
-    }
-
-    static bool match(const Value &l, const Value &k) { return l == k; }
-};
-
-typedef HashMap<Value, Value, WrapperHasher, SystemAllocPolicy> WrapperMap;
-
 } /* namespace js */
 
 extern JS_FRIEND_API(JSGCTraceKind)
 js_GetGCThingTraceKind(void *thing);
 
 extern JSBool
 js_InitGC(JSRuntime *rt, uint32_t maxbytes);
 
@@ -1372,16 +1357,19 @@ extern bool
 IsAboutToBeFinalized(const js::Value &value);
 
 extern bool
 js_IsAddressableGCThing(JSRuntime *rt, uintptr_t w, js::gc::AllocKind *thingKind, void **thing);
 
 namespace js {
 
 extern void
+MarkCompartmentActive(js::StackFrame *fp);
+
+extern void
 TraceRuntime(JSTracer *trc);
 
 extern JS_FRIEND_API(void)
 MarkContext(JSTracer *trc, JSContext *acx);
 
 /* Must be called with GC lock taken. */
 extern void
 TriggerGC(JSRuntime *rt, js::gcreason::Reason reason);
@@ -1391,35 +1379,44 @@ extern void
 TriggerCompartmentGC(JSCompartment *comp, js::gcreason::Reason reason);
 
 extern void
 MaybeGC(JSContext *cx);
 
 extern void
 ShrinkGCBuffers(JSRuntime *rt);
 
-} /* namespace js */
-
 /*
  * Kinds of js_GC invocation.
  */
 typedef enum JSGCInvocationKind {
     /* Normal invocation. */
     GC_NORMAL           = 0,
 
     /* Minimize GC triggers and release empty GC chunks right away. */
     GC_SHRINK             = 1
 } JSGCInvocationKind;
 
 /* Pass NULL for |comp| to get a full GC. */
 extern void
-js_GC(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, js::gcreason::Reason r);
+GC(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, js::gcreason::Reason reason);
+
+extern void
+GCSlice(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, js::gcreason::Reason reason);
+
+extern void
+GCDebugSlice(JSContext *cx, int64_t objCount);
+
+} /* namespace js */
 
 namespace js {
 
+void
+InitTracer(JSTracer *trc, JSRuntime *rt, JSContext *cx, JSTraceCallback callback);
+
 #ifdef JS_THREADSAFE
 
 class GCHelperThread {
     enum State {
         IDLE,
         SWEEPING,
         ALLOCATING,
         CANCEL_ALLOCATION,
@@ -1567,149 +1564,337 @@ struct GCChunkHasher {
 typedef HashSet<js::gc::Chunk *, GCChunkHasher, SystemAllocPolicy> GCChunkSet;
 
 template<class T>
 struct MarkStack {
     T *stack;
     T *tos;
     T *limit;
 
+    T *ballast;
+    T *ballastLimit;
+
+    MarkStack()
+      : stack(NULL),
+        tos(NULL),
+        limit(NULL),
+        ballast(NULL),
+        ballastLimit(NULL) { }
+
+    ~MarkStack() {
+        if (stack != ballast)
+            js_free(stack);
+        js_free(ballast);
+    }
+
+    bool init(size_t ballastcap) {
+        JS_ASSERT(!stack);
+
+        if (ballastcap == 0)
+            return true;
+
+        ballast = (T *)js_malloc(sizeof(T) * ballastcap);
+        if (!ballast)
+            return false;
+        ballastLimit = ballast + ballastcap;
+        stack = ballast;
+        limit = ballastLimit;
+        tos = stack;
+        return true;
+    }
+
     bool push(T item) {
-        if (tos == limit)
-            return false;
+        if (tos == limit) {
+            if (!enlarge())
+                return false;
+        }
+        JS_ASSERT(tos < limit);
         *tos++ = item;
         return true;
     }
 
     bool push(T item1, T item2, T item3) {
         T *nextTos = tos + 3;
-        if (nextTos > limit)
-            return false;
+        if (nextTos > limit) {
+            if (!enlarge())
+                return false;
+            nextTos = tos + 3;
+        }
+        JS_ASSERT(nextTos <= limit);
         tos[0] = item1;
         tos[1] = item2;
         tos[2] = item3;
         tos = nextTos;
         return true;
     }
 
     bool isEmpty() const {
         return tos == stack;
     }
 
     T pop() {
         JS_ASSERT(!isEmpty());
         return *--tos;
     }
 
-    template<size_t N>
-    MarkStack(T (&buffer)[N])
-      : stack(buffer),
-        tos(buffer),
-        limit(buffer + N) { }
+    ptrdiff_t position() const {
+        return tos - stack;
+    }
+
+    void reset() {
+        if (stack != ballast) {
+            js_free(stack);
+            stack = ballast;
+            limit = ballastLimit;
+        }
+        tos = stack;
+        JS_ASSERT(limit == ballastLimit);
+    }
+
+    bool enlarge() {
+        size_t tosIndex = tos - stack;
+        size_t cap = limit - stack;
+        size_t newcap = cap * 2;
+        if (newcap == 0)
+            newcap = 32;
+
+        T *newStack;
+        if (stack == ballast) {
+            newStack = (T *)js_malloc(sizeof(T) * newcap);
+            if (!newStack)
+                return false;
+            for (T *src = stack, *dst = newStack; src < tos; )
+                *dst++ = *src++;
+        } else {
+            newStack = (T *)js_realloc(stack, sizeof(T) * newcap);
+            if (!newStack)
+                return false;
+        }
+        stack = newStack;
+        tos = stack + tosIndex;
+        limit = newStack + newcap;
+        return true;
+    }
+};
+
+/*
+ * This class records how much work has been done in a given GC slice, so that
+ * we can return before pausing for too long. Some slices are allowed to run for
+ * unlimited time, and others are bounded. To reduce the number of gettimeofday
+ * calls, we only check the time every 1000 operations.
+ */
+struct SliceBudget {
+    int64_t deadline; /* in microseconds */
+    intptr_t counter;
+
+    static const intptr_t CounterReset = 1000;
+
+    static const int64_t Unlimited = 0;
+    static int64_t TimeBudget(int64_t millis);
+    static int64_t WorkBudget(int64_t work);
+
+    /* Equivalent to SliceBudget(UnlimitedBudget). */
+    SliceBudget();
+
+    /* Instantiate as SliceBudget(Time/WorkBudget(n)). */
+    SliceBudget(int64_t budget);
+
+    void reset() {
+        deadline = INT64_MAX;
+        counter = INTPTR_MAX;
+    }
+
+    void step() {
+        counter--;
+    }
+
+    bool checkOverBudget();
+
+    bool isOverBudget() {
+        if (counter > 0)
+            return false;
+        return checkOverBudget();
+    }
 };
 
 static const size_t MARK_STACK_LENGTH = 32768;
 
 struct GCMarker : public JSTracer {
+  private:
     /*
      * We use a common mark stack to mark GC things of different types and use
      * the explicit tags to distinguish them when it cannot be deduced from
      * the context of push or pop operation.
-     *
-     * Currently we need only 4 tags. However that can be extended to 8 if
-     * necessary as we tag only GC things.
      */
     enum StackTag {
         ValueArrayTag,
         ObjectTag,
         TypeTag,
         XmlTag,
-        LastTag = XmlTag
+        SavedValueArrayTag,
+        LastTag = SavedValueArrayTag
     };
 
-    static const uintptr_t StackTagMask = 3;
+    static const uintptr_t StackTagMask = 7;
 
     static void staticAsserts() {
         JS_STATIC_ASSERT(StackTagMask >= uintptr_t(LastTag));
         JS_STATIC_ASSERT(StackTagMask <= gc::Cell::CellMask);
     }
 
-  private:
-    /* The color is only applied to objects, functions and xml. */
-    uint32_t color;
+  public:
+    explicit GCMarker();
+    bool init(bool lazy);
 
-  public:
-    /* Pointer to the top of the stack of arenas we are delaying marking on. */
-    js::gc::Arena *unmarkedArenaStackTop;
-    /* Count of arenas that are currently in the stack. */
-    DebugOnly<size_t> markLaterArenas;
+    void start(JSRuntime *rt, JSContext *cx);
+    void stop();
+    void reset();
 
-#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
-    js::gc::ConservativeGCStats conservativeStats;
-    Vector<void *, 0, SystemAllocPolicy> conservativeRoots;
-    const char *conservativeDumpFileName;
-    void dumpConservativeRoots();
-#endif
+    void pushObject(JSObject *obj) {
+        pushTaggedPtr(ObjectTag, obj);
+    }
 
-    MarkStack<uintptr_t> stack;
+    void pushType(types::TypeObject *type) {
+        pushTaggedPtr(TypeTag, type);
+    }
 
-  public:
-    explicit GCMarker(JSContext *cx);
-    ~GCMarker();
+    void pushXML(JSXML *xml) {
+        pushTaggedPtr(XmlTag, xml);
+    }
 
     uint32_t getMarkColor() const {
         return color;
     }
 
     /*
      * The only valid color transition during a GC is from black to gray. It is
      * wrong to switch the mark color from gray to black. The reason is that the
      * cycle collector depends on the invariant that there are no black to gray
      * edges in the GC heap. This invariant lets the CC not trace through black
      * objects. If this invariant is violated, the cycle collector may free
      * objects that are still reachable.
      */
     void setMarkColorGray() {
+        JS_ASSERT(isDrained());
         JS_ASSERT(color == gc::BLACK);
         color = gc::GRAY;
     }
 
+    inline void delayMarkingArena(gc::ArenaHeader *aheader);
     void delayMarkingChildren(const void *thing);
-
+    void markDelayedChildren(gc::ArenaHeader *aheader);
+    bool markDelayedChildren(SliceBudget &budget);
     bool hasDelayedChildren() const {
         return !!unmarkedArenaStackTop;
     }
 
-    void markDelayedChildren();
+    bool isDrained() {
+        return isMarkStackEmpty() && !unmarkedArenaStackTop;
+    }
+
+    bool drainMarkStack(SliceBudget &budget);
+
+    /*
+     * Gray marking must be done after all black marking is complete. However,
+     * we do not have write barriers on XPConnect roots. Therefore, XPConnect
+     * roots must be accumulated in the first slice of incremental GC. We
+     * accumulate these roots in the GrayRootMarker and then mark them later,
+     * after black marking is complete. This accumulation can fail, but in that
+     * case we switch to non-incremental GC.
+     */
+    bool hasBufferedGrayRoots() const;
+    void startBufferingGrayRoots();
+    void endBufferingGrayRoots();
+    void markBufferedGrayRoots();
+
+    static void GrayCallback(JSTracer *trc, void **thing, JSGCTraceKind kind);
+
+    MarkStack<uintptr_t> stack;
+
+  private:
+#ifdef DEBUG
+    void checkCompartment(void *p);
+#else
+    void checkCompartment(void *p) {}
+#endif
+
+    void pushTaggedPtr(StackTag tag, void *ptr) {
+        checkCompartment(ptr);
+        uintptr_t addr = reinterpret_cast<uintptr_t>(ptr);
+        JS_ASSERT(!(addr & StackTagMask));
+        if (!stack.push(addr | uintptr_t(tag)))
+            delayMarkingChildren(ptr);
+    }
+
+    void pushValueArray(JSObject *obj, void *start, void *end) {
+        checkCompartment(obj);
+
+        if (start == end)
+            return;
+
+        JS_ASSERT(start <= end);
+        uintptr_t tagged = reinterpret_cast<uintptr_t>(obj) | GCMarker::ValueArrayTag;
+        uintptr_t startAddr = reinterpret_cast<uintptr_t>(start);
+        uintptr_t endAddr = reinterpret_cast<uintptr_t>(end);
+
+        /*
+         * Push in the reverse order so obj will be on top. If we cannot push
+         * the array, we trigger delay marking for the whole object.
+         */
+        if (!stack.push(endAddr, startAddr, tagged))
+            delayMarkingChildren(obj);
+    }
 
     bool isMarkStackEmpty() {
         return stack.isEmpty();
     }
 
-    void drainMarkStack();
+    bool restoreValueArray(JSObject *obj, void **vpp, void **endp);
+    void saveValueRanges();
+    inline void processMarkStackTop(SliceBudget &budget);
+
+    void appendGrayRoot(void *thing, JSGCTraceKind kind);
 
-    inline void processMarkStackTop();
+    /* The color is only applied to objects, functions and xml. */
+    uint32_t color;
+
+    DebugOnly<bool> started;
+
+    /* Pointer to the top of the stack of arenas we are delaying marking on. */
+    js::gc::ArenaHeader *unmarkedArenaStackTop;
+    /* Count of arenas that are currently in the stack. */
+    DebugOnly<size_t> markLaterArenas;
 
-    void pushObject(JSObject *obj) {
-        pushTaggedPtr(ObjectTag, obj);
-    }
+    struct GrayRoot {
+        void *thing;
+        JSGCTraceKind kind;
+#ifdef DEBUG
+        JSTraceNamePrinter debugPrinter;
+        const void *debugPrintArg;
+        size_t debugPrintIndex;
+#endif
 
-    void pushType(types::TypeObject *type) {
-        pushTaggedPtr(TypeTag, type);
+        GrayRoot(void *thing, JSGCTraceKind kind)
+          : thing(thing), kind(kind) {}
+    };
+
+    bool grayFailed;
+    Vector<GrayRoot, 0, SystemAllocPolicy> grayRoots;
+};
+
+struct BarrierGCMarker : public GCMarker {
+    bool init() {
+        return GCMarker::init(true);
     }
+};
 
-    void pushXML(JSXML *xml) {
-        pushTaggedPtr(XmlTag, xml);
-    }
 
-    void pushTaggedPtr(StackTag tag, void *ptr) {
-        uintptr_t addr = reinterpret_cast<uintptr_t>(ptr);
-        JS_ASSERT(!(addr & StackTagMask));
-        if (!stack.push(addr | uintptr_t(tag)))
-            delayMarkingChildren(ptr);
+struct FullGCMarker : public GCMarker {
+    bool init() {
+        return GCMarker::init(false);
     }
 };
 
 void
 MarkStackRangeConservatively(JSTracer *trc, Value *begin, Value *end);
 
 typedef void (*IterateCompartmentCallback)(JSContext *cx, void *data, JSCompartment *compartment);
 typedef void (*IterateChunkCallback)(JSContext *cx, void *data, gc::Chunk *chunk);
@@ -1752,17 +1937,18 @@ IterateCells(JSContext *cx, JSCompartmen
 } /* namespace js */
 
 extern void
 js_FinalizeStringRT(JSRuntime *rt, JSString *str);
 
 /*
  * Macro to test if a traversal is the marking phase of the GC.
  */
-#define IS_GC_MARKING_TRACER(trc) ((trc)->callback == NULL)
+#define IS_GC_MARKING_TRACER(trc) \
+    ((trc)->callback == NULL || (trc)->callback == GCMarker::GrayCallback)
 
 namespace js {
 namespace gc {
 
 JSCompartment *
 NewCompartment(JSContext *cx, JSPrincipals *principals);
 
 /* Tries to run a GC no matter what (used for GC zeal). */
@@ -1773,30 +1959,40 @@ RunDebugGC(JSContext *cx);
 /* Overwrites stack references to GC things which have not been rooted. */
 void CheckStackRoots(JSContext *cx);
 
 inline void MaybeCheckStackRoots(JSContext *cx) { CheckStackRoots(cx); }
 #else
 inline void MaybeCheckStackRoots(JSContext *cx) {}
 #endif
 
-const int ZealPokeThreshold = 1;
-const int ZealAllocThreshold = 2;
-const int ZealVerifierThreshold = 4;
+const int ZealPokeValue = 1;
+const int ZealAllocValue = 2;
+const int ZealFrameGCValue = 3;
+const int ZealVerifierValue = 4;
+const int ZealFrameVerifierValue = 5;
 
 #ifdef JS_GC_ZEAL
 
 /* Check that write barriers have been used correctly. See jsgc.cpp. */
 void
-VerifyBarriers(JSContext *cx, bool always = false);
+VerifyBarriers(JSContext *cx);
+
+void
+MaybeVerifyBarriers(JSContext *cx, bool always = false);
 
 #else
 
 static inline void
-VerifyBarriers(JSContext *cx, bool always = false)
+VerifyBarriers(JSContext *cx)
+{
+}
+
+static inline void
+MaybeVerifyBarriers(JSContext *cx, bool always = false)
 {
 }
 
 #endif
 
 } /* namespace gc */
 
 static inline JSCompartment *
--- a/js/src/jsgcinlines.h
+++ b/js/src/jsgcinlines.h
@@ -205,17 +205,17 @@ GCPoke(JSRuntime *rt, Value oldval)
 #if 1
     rt->gcPoke = true;
 #else
     rt->gcPoke = oldval.isGCThing();
 #endif
 
 #ifdef JS_GC_ZEAL
     /* Schedule a GC to happen "soon" after a GC poke. */
-    if (rt->gcZeal() >= js::gc::ZealPokeThreshold)
+    if (rt->gcZeal() == js::gc::ZealPokeValue)
         rt->gcNextScheduled = 1;
 #endif
 }
 
 /*
  * Invoke ArenaOp and CellOp on every arena and cell in a compartment which
  * have the specified thing kind.
  */
@@ -257,24 +257,35 @@ class CellIterImpl
     const FreeSpan *span;
     uintptr_t thing;
     Cell *cell;
 
   protected:
     CellIterImpl() {
     }
 
-    void init(JSCompartment *comp, AllocKind kind) {
+    void initSpan(JSCompartment *comp, AllocKind kind) {
         JS_ASSERT(comp->arenas.isSynchronizedFreeList(kind));
         firstThingOffset = Arena::firstThingOffset(kind);
         thingSize = Arena::thingSize(kind);
-        aheader = comp->arenas.getFirstArena(kind);
         firstSpan.initAsEmpty();
         span = &firstSpan;
         thing = span->first;
+    }
+
+    void init(ArenaHeader *singleAheader) {
+        aheader = singleAheader;
+        initSpan(aheader->compartment, aheader->getAllocKind());
+        next();
+        aheader = NULL;
+    }
+
+    void init(JSCompartment *comp, AllocKind kind) {
+        initSpan(comp, kind);
+        aheader = comp->arenas.getFirstArena(kind);
         next();
     }
 
   public:
     bool done() const {
         return !cell;
     }
 
@@ -306,41 +317,47 @@ class CellIterImpl
             thing = aheader->arenaAddress() | firstThingOffset;
             aheader = aheader->next;
         }
         cell = reinterpret_cast<Cell *>(thing);
         thing += thingSize;
     }
 };
 
-class CellIterUnderGC : public CellIterImpl {
-
+class CellIterUnderGC : public CellIterImpl
+{
   public:
     CellIterUnderGC(JSCompartment *comp, AllocKind kind) {
         JS_ASSERT(comp->rt->gcRunning);
         init(comp, kind);
     }
+
+    CellIterUnderGC(ArenaHeader *aheader) {
+        JS_ASSERT(aheader->compartment->rt->gcRunning);
+        init(aheader);
+    }
 };
 
 /*
  * When using the iterator outside the GC the caller must ensure that no GC or
  * allocations of GC things are possible and that the background finalization
  * for the given thing kind is not enabled or is done.
  */
-class CellIter: public CellIterImpl
+class CellIter : public CellIterImpl
 {
     ArenaLists *lists;
     AllocKind kind;
 #ifdef DEBUG
     size_t *counter;
 #endif
   public:
     CellIter(JSContext *cx, JSCompartment *comp, AllocKind kind)
       : lists(&comp->arenas),
-        kind(kind) {
+        kind(kind)
+    {
 #ifdef JS_THREADSAFE
         JS_ASSERT(comp->arenas.doneBackgroundFinalize(kind));
 #endif
         if (lists->isSynchronizedFreeList(kind)) {
             lists = NULL;
         } else {
             JS_ASSERT(!comp->rt->gcRunning);
             lists->copyFreeListToArena(kind);
@@ -392,16 +409,19 @@ NewGCThing(JSContext *cx, js::gc::AllocK
 #endif
 
     js::gc::MaybeCheckStackRoots(cx);
 
     JSCompartment *comp = cx->compartment;
     void *t = comp->arenas.allocateFromFreeList(kind, thingSize);
     if (!t)
         t = js::gc::ArenaLists::refillFreeList(cx, kind);
+
+    JS_ASSERT_IF(t && comp->needsBarrier(),
+                 static_cast<T *>(t)->arenaHeader()->allocatedDuringIncremental);
     return static_cast<T *>(t);
 }
 
 /* Alternate form which allocates a GC thing if doing so cannot trigger a GC. */
 template <typename T>
 inline T *
 TryNewGCThing(JSContext *cx, js::gc::AllocKind kind, size_t thingSize)
 {
@@ -414,16 +434,18 @@ TryNewGCThing(JSContext *cx, js::gc::All
     JS_ASSERT(!cx->runtime->noGCOrAllocationCheck);
 
 #ifdef JS_GC_ZEAL
     if (cx->runtime->needZealousGC())
         return NULL;
 #endif
 
     void *t = cx->compartment->arenas.allocateFromFreeList(kind, thingSize);
+    JS_ASSERT_IF(t && cx->compartment->needsBarrier(),
+                 static_cast<T *>(t)->arenaHeader()->allocatedDuringIncremental);
     return static_cast<T *>(t);
 }
 
 } /* namespace gc */
 } /* namespace js */
 
 inline JSObject *
 js_NewGCObject(JSContext *cx, js::gc::AllocKind kind)
--- a/js/src/jsgcmark.cpp
+++ b/js/src/jsgcmark.cpp
@@ -56,17 +56,17 @@ PushMarkStack(GCMarker *gcmarker, JSObje
 
 static inline void
 PushMarkStack(GCMarker *gcmarker, JSFunction *thing);
 
 static inline void
 PushMarkStack(GCMarker *gcmarker, JSScript *thing);
 
 static inline void
-PushMarkStack(GCMarker *gcmarker, const Shape *thing);
+PushMarkStack(GCMarker *gcmarker, Shape *thing);
 
 static inline void
 PushMarkStack(GCMarker *gcmarker, JSString *thing);
 
 static inline void
 PushMarkStack(GCMarker *gcmarker, types::TypeObject *thing);
 
 /*** Object Marking ***/
@@ -98,53 +98,60 @@ MarkInternal(JSTracer *trc, T *thing)
                  thing->compartment() == rt->gcCheckCompartment ||
                  thing->compartment() == rt->atomsCompartment);
 
     /*
      * Don't mark things outside a compartment if we are in a per-compartment
      * GC.
      */
     if (!rt->gcCurrentCompartment || thing->compartment() == rt->gcCurrentCompartment) {
-        if (IS_GC_MARKING_TRACER(trc)) {
+        if (!trc->callback) {
             PushMarkStack(static_cast<GCMarker *>(trc), thing);
         } else {
             void *tmp = (void *)thing;
             trc->callback(trc, &tmp, GetGCThingTraceKind(thing));
             JS_ASSERT(tmp == thing);
         }
     }
 
 #ifdef DEBUG
     trc->debugPrinter = NULL;
     trc->debugPrintArg = NULL;
 #endif
 }
 
+#define JS_ROOT_MARKING_ASSERT(trc)                                     \
+    JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc),                             \
+                 trc->runtime->gcIncrementalState == NO_INCREMENTAL ||  \
+                 trc->runtime->gcIncrementalState == MARK_ROOTS);
+
+
 template <typename T>
 static void
 MarkUnbarriered(JSTracer *trc, T *thing, const char *name)
 {
     JS_SET_TRACING_NAME(trc, name);
     MarkInternal(trc, thing);
 }
 
 template <typename T>
 static void
-Mark(JSTracer *trc, const HeapPtr<T> &thing, const char *name)
+Mark(JSTracer *trc, HeapPtr<T> *thing, const char *name)
 {
     JS_SET_TRACING_NAME(trc, name);
-    MarkInternal(trc, thing.get());
+    MarkInternal(trc, thing->get());
 }
 
 template <typename T>
 static void
-MarkRoot(JSTracer *trc, T *thing, const char *name)
+MarkRoot(JSTracer *trc, T **thingp, const char *name)
 {
+    JS_ROOT_MARKING_ASSERT(trc);
     JS_SET_TRACING_NAME(trc, name);
-    MarkInternal(trc, thing);
+    MarkInternal(trc, *thingp);
 }
 
 template <typename T>
 static void
 MarkRange(JSTracer *trc, size_t len, HeapPtr<T> *vec, const char *name)
 {
     for (size_t i = 0; i < len; ++i) {
         if (T *obj = vec[i]) {
@@ -153,33 +160,34 @@ MarkRange(JSTracer *trc, size_t len, Hea
         }
     }
 }
 
 template <typename T>
 static void
 MarkRootRange(JSTracer *trc, size_t len, T **vec, const char *name)
 {
+    JS_ROOT_MARKING_ASSERT(trc);
     for (size_t i = 0; i < len; ++i) {
         JS_SET_TRACING_INDEX(trc, name, i);
         MarkInternal(trc, vec[i]);
     }
 }
 
 #define DeclMarkerImpl(base, type)                                                                \
 void                                                                                              \
-Mark##base(JSTracer *trc, const HeapPtr<type> &thing, const char *name)                           \
+Mark##base(JSTracer *trc, HeapPtr<type> *thing, const char *name)                                 \
 {                                                                                                 \
     Mark<type>(trc, thing, name);                                                                 \
 }                                                                                                 \
                                                                                                   \
 void                                                                                              \
-Mark##base##Root(JSTracer *trc, type *thing, const char *name)                                    \
+Mark##base##Root(JSTracer *trc, type **thingp, const char *name)                                  \
 {                                                                                                 \
-    MarkRoot<type>(trc, thing, name);                                                             \
+    MarkRoot<type>(trc, thingp, name);                                                            \
 }                                                                                                 \
                                                                                                   \
 void                                                                                              \
 Mark##base##Unbarriered(JSTracer *trc, type *thing, const char *name)                             \
 {                                                                                                 \
     MarkUnbarriered<type>(trc, thing, name);                                                      \
 }                                                                                                 \
                                                                                                   \
@@ -241,62 +249,70 @@ MarkKind(JSTracer *trc, void *thing, JSG
         break;
 #endif
     }
 }
 
 void
 MarkGCThingRoot(JSTracer *trc, void *thing, const char *name)
 {
+    JS_ROOT_MARKING_ASSERT(trc);
     JS_SET_TRACING_NAME(trc, name);
     if (!thing)
         return;
     MarkKind(trc, thing, GetGCThingTraceKind(thing));
 }
 
 /*** ID Marking ***/
 
 static inline void
-MarkIdInternal(JSTracer *trc, const jsid &id)
+MarkIdInternal(JSTracer *trc, jsid *id)
 {
-    if (JSID_IS_STRING(id))
-        MarkInternal(trc, JSID_TO_STRING(id));
-    else if (JS_UNLIKELY(JSID_IS_OBJECT(id)))
-        MarkInternal(trc, JSID_TO_OBJECT(id));
+    if (JSID_IS_STRING(*id)) {
+        JSString *str = JSID_TO_STRING(*id);
+        MarkInternal(trc, str);
+        *id = ATOM_TO_JSID(reinterpret_cast<JSAtom *>(str));
+    } else if (JS_UNLIKELY(JSID_IS_OBJECT(*id))) {
+        JSObject *obj = JSID_TO_OBJECT(*id);
+        MarkInternal(trc, obj);
+        *id = OBJECT_TO_JSID(obj);
+    }
 }
 
 void
-MarkId(JSTracer *trc, const HeapId &id, const char *name)
+MarkId(JSTracer *trc, HeapId *id, const char *name)
 {
     JS_SET_TRACING_NAME(trc, name);
-    MarkIdInternal(trc, id);
+    MarkIdInternal(trc, id->unsafeGet());
 }
 
 void
-MarkIdRoot(JSTracer *trc, const jsid &id, const char *name)
+MarkIdRoot(JSTracer *trc, jsid *id, const char *name)
 {
+    JS_ROOT_MARKING_ASSERT(trc);
     JS_SET_TRACING_NAME(trc, name);
     MarkIdInternal(trc, id);
 }
 
 void
 MarkIdRange(JSTracer *trc, size_t len, HeapId *vec, const char *name)
 {
     for (size_t i = 0; i < len; ++i) {
         JS_SET_TRACING_INDEX(trc, name, i);
-        MarkIdInternal(trc, vec[i]);
+        MarkIdInternal(trc, vec[i].unsafeGet());
     }
 }
 
 void
 MarkIdRootRange(JSTracer *trc, size_t len, jsid *vec, const char *name)
 {
+    JS_ROOT_MARKING_ASSERT(trc);
     for (size_t i = 0; i < len; ++i) {
         JS_SET_TRACING_INDEX(trc, name, i);
-        MarkIdInternal(trc, vec[i]);
+        MarkIdInternal(trc, &vec[i]);
     }
 }
 
 /*** Value Marking ***/
 
 static inline void
 MarkValueInternal(JSTracer *trc, Value *v)
 {
@@ -311,32 +327,34 @@ MarkValue(JSTracer *trc, HeapValue *v, c
 {
     JS_SET_TRACING_NAME(trc, name);
     MarkValueInternal(trc, v->unsafeGet());
 }
 
 void
 MarkValueRoot(JSTracer *trc, Value *v, const char *name)
 {
+    JS_ROOT_MARKING_ASSERT(trc);
     JS_SET_TRACING_NAME(trc, name);
     MarkValueInternal(trc, v);
 }
 
 void
 MarkValueRange(JSTracer *trc, size_t len, HeapValue *vec, const char *name)
 {
     for (size_t i = 0; i < len; ++i) {
         JS_SET_TRACING_INDEX(trc, name, i);
         MarkValueInternal(trc, vec[i].unsafeGet());
     }
 }
 
 void
 MarkValueRootRange(JSTracer *trc, size_t len, Value *vec, const char *name)
 {
+    JS_ROOT_MARKING_ASSERT(trc);
     for (size_t i = 0; i < len; ++i) {
         JS_SET_TRACING_INDEX(trc, name, i);
         MarkValueInternal(trc, &vec[i]);
     }
 }
 
 /*** Special Marking ***/
 
@@ -347,38 +365,35 @@ MarkValueRootRange(JSTracer *trc, size_t
 static void
 MarkObject(JSTracer *trc, const HeapPtr<GlobalObject, JSScript *> &thing, const char *name)
 {
     JS_SET_TRACING_NAME(trc, name);
     MarkInternal(trc, thing.get());
 }
 
 void
-MarkShape(JSTracer *trc, const HeapPtr<const Shape> &thing, const char *name)
-{
-    JS_SET_TRACING_NAME(trc, name);
-    MarkInternal(trc, const_cast<Shape *>(thing.get()));
-}
-
-void
 MarkValueUnbarriered(JSTracer *trc, Value *v, const char *name)
 {
     JS_SET_TRACING_NAME(trc, name);
     MarkValueInternal(trc, v);
 }
 
 void
 MarkCrossCompartmentValue(JSTracer *trc, HeapValue *v, const char *name)
 {
     if (v->isMarkable()) {
         Cell *cell = (Cell *)v->toGCThing();
         JSRuntime *rt = trc->runtime;
         if (rt->gcCurrentCompartment && cell->compartment() != rt->gcCurrentCompartment)
             return;
 
+        /* In case we're called from a write barrier. */
+        if (rt->gcIncrementalCompartment && cell->compartment() != rt->gcIncrementalCompartment)
+            return;
+
         MarkValue(trc, v, name);
     }
 }
 
 /*** Push Mark Stack ***/
 
 #define JS_COMPARTMENT_ASSERT(rt, thing)                                 \
     JS_ASSERT_IF((rt)->gcCurrentCompartment,                             \
@@ -438,20 +453,20 @@ PushMarkStack(GCMarker *gcmarker, JSScri
      * refer to other scripts only indirectly (like via nested functions) and
      * we cannot get to deep recursion.
      */
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         MarkChildren(gcmarker, thing);
 }
 
 static void
-ScanShape(GCMarker *gcmarker, const Shape *shape);
+ScanShape(GCMarker *gcmarker, Shape *shape);
 
 static void
-PushMarkStack(GCMarker *gcmarker, const Shape *thing)
+PushMarkStack(GCMarker *gcmarker, Shape *thing)
 {
     JS_COMPARTMENT_ASSERT(gcmarker->runtime, thing);
 
     /* We mark shapes directly rather than pushing on the stack. */
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         ScanShape(gcmarker, thing);
 }
 
@@ -464,22 +479,22 @@ PushMarkStack(GCMarker *gcmarker, BaseSh
     JS_COMPARTMENT_ASSERT(gcmarker->runtime, thing);
 
     /* We mark base shapes directly rather than pushing on the stack. */
     if (thing->markIfUnmarked(gcmarker->getMarkColor()))
         ScanBaseShape(gcmarker, thing);
 }
 
 static void
-ScanShape(GCMarker *gcmarker, const Shape *shape)
+ScanShape(GCMarker *gcmarker, Shape *shape)
 {
   restart:
     PushMarkStack(gcmarker, shape->base());
 
-    jsid id = shape->maybePropid();
+    const HeapId &id = shape->propidRef();
     if (JSID_IS_STRING(id))
         PushMarkStack(gcmarker, JSID_TO_STRING(id));
     else if (JS_UNLIKELY(JSID_IS_OBJECT(id)))
         PushMarkStack(gcmarker, JSID_TO_OBJECT(id));
 
     shape = shape->previous();
     if (shape && shape->markIfUnmarked(gcmarker->getMarkColor()))
         goto restart;
@@ -538,17 +553,17 @@ ScanLinearString(GCMarker *gcmarker, JSL
  * at the same depth as it was on entry. This way we avoid using tags when
  * pushing ropes to the stack as ropes never leaks to other users of the
  * stack. This also assumes that a rope can only point to other ropes or
  * linear strings, it cannot refer to GC things of other types.
  */
 static void
 ScanRope(GCMarker *gcmarker, JSRope *rope)
 {
-    uintptr_t *savedTos = gcmarker->stack.tos;
+    ptrdiff_t savedPos = gcmarker->stack.position();
     for (;;) {
         JS_ASSERT(GetGCThingTraceKind(rope) == JSTRACE_STRING);
         JS_ASSERT(rope->JSString::isRope());
         JS_COMPARTMENT_ASSERT_STR(gcmarker->runtime, rope);
         JS_ASSERT(rope->isMarked());
         JSRope *next = NULL;
 
         JSString *right = rope->rightChild();
@@ -570,24 +585,24 @@ ScanRope(GCMarker *gcmarker, JSRope *rop
                  */
                 if (next && !gcmarker->stack.push(reinterpret_cast<uintptr_t>(next)))
                     gcmarker->delayMarkingChildren(next);
                 next = &left->asRope();
             }
         }
         if (next) {
             rope = next;
-        } else if (savedTos != gcmarker->stack.tos) {
-            JS_ASSERT(savedTos < gcmarker->stack.tos);
+        } else if (savedPos != gcmarker->stack.position()) {
+            JS_ASSERT(savedPos < gcmarker->stack.position());
             rope = reinterpret_cast<JSRope *>(gcmarker->stack.pop());
         } else {
             break;
         }
     }
-    JS_ASSERT(savedTos == gcmarker->stack.tos);
+    JS_ASSERT(savedPos == gcmarker->stack.position());
  }
 
 static inline void
 ScanString(GCMarker *gcmarker, JSString *str)
 {
     if (str->isLinear())
         ScanLinearString(gcmarker, &str->asLinear());
     else
@@ -603,38 +618,20 @@ PushMarkStack(GCMarker *gcmarker, JSStri
      * As string can only refer to other strings we fully scan its GC graph
      * using the explicit stack when navigating the rope tree to avoid
      * dealing with strings on the stack in drainMarkStack.
      */
     if (str->markIfUnmarked())
         ScanString(gcmarker, str);
 }
 
-static inline void
-PushValueArray(GCMarker *gcmarker, JSObject* obj, HeapValue *start, HeapValue *end)
-{
-    JS_ASSERT(start <= end);
-    uintptr_t tagged = reinterpret_cast<uintptr_t>(obj) | GCMarker::ValueArrayTag;
-    uintptr_t startAddr = reinterpret_cast<uintptr_t>(start);
-    uintptr_t endAddr = reinterpret_cast<uintptr_t>(end);
-
-    /* Push in the reverse order so obj will be on top. */
-    if (!gcmarker->stack.push(endAddr, startAddr, tagged)) {
-        /*
-         * If we cannot push the array, we trigger delay marking for the whole
-         * object.
-         */
-        gcmarker->delayMarkingChildren(obj);
-    }
-}
-
 void
 MarkChildren(JSTracer *trc, JSObject *obj)
 {
-    MarkTypeObject(trc, obj->typeFromGC(), "type");
+    MarkTypeObject(trc, &obj->typeFromGC(), "type");
 
     Shape *shape = obj->lastProperty();
     MarkShapeUnbarriered(trc, shape, "shape");
 
     Class *clasp = shape->getObjectClass();
     if (clasp->trace)
         clasp->trace(trc, obj);
 
@@ -705,22 +702,22 @@ MarkChildren(JSTracer *trc, JSScript *sc
     if (script->types)
         script->types->trace(trc);
 
     if (script->hasAnyBreakpointsOrStepMode())
         script->markTrapClosures(trc);
 }
 
 static void
-MarkChildren(JSTracer *trc, const Shape *shape)
+MarkChildren(JSTracer *trc, Shape *shape)
 {
     MarkBaseShapeUnbarriered(trc, shape->base(), "base");
-    MarkId(trc, shape->maybePropid(), "propid");
+    MarkId(trc, &shape->propidRef(), "propid");
     if (shape->previous())
-        MarkShape(trc, shape->previous(), "parent");
+        MarkShape(trc, &shape->previousRef(), "parent");
 }
 
 static inline void
 MarkBaseShapeGetterSetter(JSTracer *trc, BaseShape *base)
 {
     if (base->hasGetterObject())
         MarkObjectUnbarriered(trc, base->getterObject(), "getter");
     if (base->hasSetterObject())
@@ -771,22 +768,22 @@ MarkCycleCollectorChildren(JSTracer *trc
  * This function is used by the cycle collector to trace through a
  * shape. The cycle collector does not care about shapes or base
  * shapes, so those are not marked. Instead, any shapes or base shapes
  * that are encountered have their children marked. Stack space is
  * bounded. If two shapes in a row have the same parent pointer, the
  * parent pointer will only be marked once.
  */
 void
-MarkCycleCollectorChildren(JSTracer *trc, const Shape *shape)
+MarkCycleCollectorChildren(JSTracer *trc, Shape *shape)
 {
     JSObject *prevParent = NULL;
     do {
         MarkCycleCollectorChildren(trc, shape->base(), &prevParent);
-        MarkId(trc, shape->maybePropid(), "propid");
+        MarkId(trc, &shape->propidRef(), "propid");
         shape = shape->previous();
     } while (shape);
 }
 
 static void
 ScanTypeObject(GCMarker *gcmarker, types::TypeObject *type)
 {
     if (!type->singleton) {
@@ -819,49 +816,200 @@ ScanTypeObject(GCMarker *gcmarker, types
 static void
 MarkChildren(JSTracer *trc, types::TypeObject *type)
 {
     if (!type->singleton) {
         unsigned count = type->getPropertyCount();
         for (unsigned i = 0; i < count; i++) {
             types::Property *prop = type->getProperty(i);
             if (prop)
-                MarkId(trc, prop->id, "type_prop");
+                MarkId(trc, &prop->id, "type_prop");
         }
     }
 
     if (type->proto)
-        MarkObject(trc, type->proto, "type_proto");
+        MarkObject(trc, &type->proto, "type_proto");
 
     if (type->singleton && !type->lazy())
-        MarkObject(trc, type->singleton, "type_singleton");
+        MarkObject(trc, &type->singleton, "type_singleton");
 
     if (type->newScript) {
-        MarkObject(trc, type->newScript->fun, "type_new_function");
-        MarkShape(trc, type->newScript->shape, "type_new_shape");
+        MarkObject(trc, &type->newScript->fun, "type_new_function");
+        MarkShape(trc, &type->newScript->shape, "type_new_shape");
     }
 
     if (type->interpretedFunction)
-        MarkObject(trc, type->interpretedFunction, "type_function");
+        MarkObject(trc, &type->interpretedFunction, "type_function");
 }
 
 #ifdef JS_HAS_XML_SUPPORT
 static void
 MarkChildren(JSTracer *trc, JSXML *xml)
 {
     js_TraceXML(trc, xml);
 }
 #endif
 
+template<typename T>
+void
+PushArenaTyped(GCMarker *gcmarker, ArenaHeader *aheader)
+{
+    for (CellIterUnderGC i(aheader); !i.done(); i.next())
+        PushMarkStack(gcmarker, i.get<T>());
+}
+
+void
+PushArena(GCMarker *gcmarker, ArenaHeader *aheader)
+{
+    switch (MapAllocToTraceKind(aheader->getAllocKind())) {
+      case JSTRACE_OBJECT:
+        PushArenaTyped<JSObject>(gcmarker, aheader);
+        break;
+
+      case JSTRACE_STRING:
+        PushArenaTyped<JSString>(gcmarker, aheader);
+        break;
+
+      case JSTRACE_SCRIPT:
+        PushArenaTyped<JSScript>(gcmarker, aheader);
+        break;
+
+      case JSTRACE_SHAPE:
+        PushArenaTyped<js::Shape>(gcmarker, aheader);
+        break;
+
+      case JSTRACE_BASE_SHAPE:
+        PushArenaTyped<js::BaseShape>(gcmarker, aheader);
+        break;
+
+      case JSTRACE_TYPE_OBJECT:
+        PushArenaTyped<js::types::TypeObject>(gcmarker, aheader);
+        break;
+
+#if JS_HAS_XML_SUPPORT
+      case JSTRACE_XML:
+        PushArenaTyped<JSXML>(gcmarker, aheader);
+        break;
+#endif
+    }
+}
+
 } /* namespace gc */
 
 using namespace js::gc;
 
+struct ValueArrayLayout
+{
+    union {
+        HeapValue *end;
+        js::Class *clasp;
+    };
+    union {
+        HeapValue *start;
+        uintptr_t index;
+    };
+    JSObject *obj;
+
+    static void staticAsserts() {
+        /* This should have the same layout as three mark stack items. */
+        JS_STATIC_ASSERT(sizeof(ValueArrayLayout) == 3 * sizeof(uintptr_t));
+    }
+};
+
+/*
+ * During incremental GC, we return from drainMarkStack without having processed
+ * the entire stack. At that point, JS code can run and reallocate slot arrays
+ * that are stored on the stack. To prevent this from happening, we replace all
+ * ValueArrayTag stack items with SavedValueArrayTag. In the latter, slots
+ * pointers are replaced with slot indexes.
+ *
+ * We also replace the slot array end pointer (which can be derived from the obj
+ * pointer) with the object's class. During JS executation, array slowification
+ * can cause the layout of slots to change. We can observe that slowification
+ * happened if the class changed; in that case, we completely rescan the array.
+ */
+void
+GCMarker::saveValueRanges()
+{
+    for (uintptr_t *p = stack.tos; p > stack.stack; ) {
+        uintptr_t tag = *--p & StackTagMask;
+        if (tag == ValueArrayTag) {
+            p -= 2;
+            ValueArrayLayout *arr = reinterpret_cast<ValueArrayLayout *>(p);
+            JSObject *obj = arr->obj;
+
+            if (obj->getClass() == &ArrayClass) {
+                HeapValue *vp = obj->getDenseArrayElements();
+                JS_ASSERT(arr->start >= vp &&
+                          arr->end == vp + obj->getDenseArrayInitializedLength());
+                arr->index = arr->start - vp;
+            } else {
+                HeapValue *vp = obj->fixedSlots();
+                unsigned nfixed = obj->numFixedSlots();
+                if (arr->start >= vp && arr->start < vp + nfixed) {
+                    JS_ASSERT(arr->end == vp + Min(nfixed, obj->slotSpan()));
+                    arr->index = arr->start - vp;
+                } else {
+                    JS_ASSERT(arr->start >= obj->slots &&
+                              arr->end == obj->slots + obj->slotSpan() - nfixed);
+                    arr->index = (arr->start - obj->slots) + nfixed;
+                }
+            }
+            arr->clasp = obj->getClass();
+            p[2] |= SavedValueArrayTag;
+        } else if (tag == SavedValueArrayTag) {
+            p -= 2;
+        }
+    }
+}
+
+bool
+GCMarker::restoreValueArray(JSObject *obj, void **vpp, void **endp)
+{
+    uintptr_t start = stack.pop();
+    js::Class *clasp = reinterpret_cast<js::Class *>(stack.pop());
+
+    JS_ASSERT(obj->getClass() == clasp ||
+              (clasp == &ArrayClass && obj->getClass() == &SlowArrayClass));
+
+    if (clasp == &ArrayClass) {
+        if (obj->getClass() != &ArrayClass)
+            return false;
+
+        uint32_t initlen = obj->getDenseArrayInitializedLength();
+        HeapValue *vp = obj->getDenseArrayElements();
+        if (start < initlen) {
+            *vpp = vp + start;
+            *endp = vp + initlen;
+        } else {
+            /* The object shrunk, in which case no scanning is needed. */
+            *vpp = *endp = vp;
+        }
+    } else {
+        HeapValue *vp = obj->fixedSlots();
+        unsigned nfixed = obj->numFixedSlots();
+        unsigned nslots = obj->slotSpan();
+        if (start < nfixed) {
+            *vpp = vp + start;
+            *endp = vp + Min(nfixed, nslots);
+        } else if (start < nslots) {
+            *vpp = obj->slots + start - nfixed;
+            *endp = obj->slots + nslots - nfixed;
+        } else {
+            /* The object shrunk, in which case no scanning is needed. */
+            *vpp = *endp = obj->slots;
+        }
+    }
+
+    JS_ASSERT(*vpp <= *endp);
+    return true;
+}
+
 inline void
-GCMarker::processMarkStackTop()
+GCMarker::processMarkStackTop(SliceBudget &budget)
 {
     /*
      * The function uses explicit goto and implements the scanning of the
      * object directly. It allows to eliminate the tail recursion and
      * significantly improve the marking performance, see bug 641025.
      */
     HeapValue *vp, *end;
     JSObject *obj;
@@ -880,107 +1028,156 @@ GCMarker::processMarkStackTop()
         JS_ASSERT((addr3 - addr2) % sizeof(Value) == 0);
         vp = reinterpret_cast<HeapValue *>(addr2);
         end = reinterpret_cast<HeapValue *>(addr3);
         goto scan_value_array;
     }
 
     if (tag == ObjectTag) {
         obj = reinterpret_cast<JSObject *>(addr);
+        JS_COMPARTMENT_ASSERT(runtime, obj);
         goto scan_obj;
     }
 
     if (tag == TypeTag) {
         ScanTypeObject(this, reinterpret_cast<types::TypeObject *>(addr));
+    } else if (tag == SavedValueArrayTag) {
+        JS_ASSERT(!(addr & Cell::CellMask));
+        obj = reinterpret_cast<JSObject *>(addr);
+        if (restoreValueArray(obj, (void **)&vp, (void **)&end))
+            goto scan_value_array;
+        else
+            goto scan_obj;
     } else {
         JS_ASSERT(tag == XmlTag);
         MarkChildren(this, reinterpret_cast<JSXML *>(addr));
     }
+    budget.step();
     return;
 
   scan_value_array:
     JS_ASSERT(vp <= end);
     while (vp != end) {
+        budget.step();
+        if (budget.isOverBudget()) {
+            pushValueArray(obj, vp, end);
+            return;
+        }
+
         const Value &v = *vp++;
         if (v.isString()) {
             JSString *str = v.toString();
+            JS_COMPARTMENT_ASSERT_STR(runtime, str);
             if (str->markIfUnmarked())
                 ScanString(this, str);
         } else if (v.isObject()) {
             JSObject *obj2 = &v.toObject();
+            JS_COMPARTMENT_ASSERT(runtime, obj2);
             if (obj2->markIfUnmarked(getMarkColor())) {
-                PushValueArray(this, obj, vp, end);
+                pushValueArray(obj, vp, end);
                 obj = obj2;
                 goto scan_obj;
             }
         }
     }
     return;
 
   scan_obj:
     {
+        JS_COMPARTMENT_ASSERT(runtime, obj);
+
+        budget.step();
+        if (budget.isOverBudget()) {
+            pushObject(obj);
+            return;
+        }
+
         types::TypeObject *type = obj->typeFromGC();
         PushMarkStack(this, type);
 
         Shape *shape = obj->lastProperty();
         PushMarkStack(this, shape);
 
         /* Call the trace hook if necessary. */
         Class *clasp = shape->getObjectClass();
         if (clasp->trace) {
             if (clasp == &ArrayClass) {
                 JS_ASSERT(!shape->isNative());
                 vp = obj->getDenseArrayElements();
                 end = vp + obj->getDenseArrayInitializedLength();
                 goto scan_value_array;
+            } else {
+                JS_ASSERT_IF(runtime->gcIncrementalState != NO_INCREMENTAL,
+                             clasp->flags & JSCLASS_IMPLEMENTS_BARRIERS);
             }
             clasp->trace(this, obj);
         }
 
         if (!shape->isNative())
             return;
 
         unsigned nslots = obj->slotSpan();
         vp = obj->fixedSlots();
         if (obj->slots) {
             unsigned nfixed = obj->numFixedSlots();
             if (nslots > nfixed) {
-                PushValueArray(this, obj, vp, vp + nfixed);
+                pushValueArray(obj, vp, vp + nfixed);
                 vp = obj->slots;
                 end = vp + (nslots - nfixed);
                 goto scan_value_array;
             }
         }
         JS_ASSERT(nslots <= obj->numFixedSlots());
         end = vp + nslots;
         goto scan_value_array;
     }
 }
 
-void
-GCMarker::drainMarkStack()
+bool
+GCMarker::drainMarkStack(SliceBudget &budget)
 {
+#ifdef DEBUG
     JSRuntime *rt = runtime;
-    rt->gcCheckCompartment = rt->gcCurrentCompartment;
+
+    struct AutoCheckCompartment {
+        JSRuntime *runtime;
+        AutoCheckCompartment(JSRuntime *rt) : runtime(rt) {
+            runtime->gcCheckCompartment = runtime->gcCurrentCompartment;
+        }
+        ~AutoCheckCompartment() { runtime->gcCheckCompartment = NULL; }
+    } acc(rt);
+#endif
+
+    if (budget.isOverBudget())
+        return false;
 
     for (;;) {
-        while (!stack.isEmpty())
-            processMarkStackTop();
+        while (!stack.isEmpty()) {
+            processMarkStackTop(budget);
+            if (budget.isOverBudget()) {
+                saveValueRanges();
+                return false;
+            }
+        }
+
         if (!hasDelayedChildren())
             break;
 
         /*
          * Mark children of things that caused too deep recursion during the
          * above tracing. Don't do this until we're done with everything
          * else.
          */
-        markDelayedChildren();
+        if (!markDelayedChildren(budget)) {
+            saveValueRanges();
+            return false;
+        }
     }
 
-    rt->gcCheckCompartment = NULL;
+    return true;
 }
 
 void
 TraceChildren(JSTracer *trc, void *thing, JSGCTraceKind kind)
 {
     switch (kind) {
       case JSTRACE_OBJECT:
         MarkChildren(trc, static_cast<JSObject *>(thing));
--- a/js/src/jsgcmark.h
+++ b/js/src/jsgcmark.h
@@ -40,18 +40,18 @@ namespace gc {
  *     forgiving, since it doesn't demand a HeapPtr as an argument. Its use
  *     should always be accompanied by a comment explaining how write barriers
  *     are implemented for the given field.
  *
  * Additionally, the functions MarkObjectRange and MarkObjectRootRange are
  * defined for marking arrays of object pointers.
  */
 #define DeclMarker(base, type)                                                                    \
-void Mark##base(JSTracer *trc, const HeapPtr<type> &thing, const char *name);                     \
-void Mark##base##Root(JSTracer *trc, type *thing, const char *name);                              \
+void Mark##base(JSTracer *trc, HeapPtr<type> *thing, const char *name);                           \
+void Mark##base##Root(JSTracer *trc, type **thingp, const char *name);                            \
 void Mark##base##Unbarriered(JSTracer *trc, type *thing, const char *name);                       \
 void Mark##base##Range(JSTracer *trc, size_t len, HeapPtr<type> *thing, const char *name);        \
 void Mark##base##RootRange(JSTracer *trc, size_t len, type **thing, const char *name);
 
 DeclMarker(BaseShape, BaseShape)
 DeclMarker(Object, ArgumentsObject)
 DeclMarker(Object, GlobalObject)
 DeclMarker(Object, JSObject)
@@ -78,20 +78,20 @@ void
 MarkKind(JSTracer *trc, void *thing, JSGCTraceKind kind);
 
 void
 MarkGCThingRoot(JSTracer *trc, void *thing, const char *name);
 
 /*** ID Marking ***/
 
 void
-MarkId(JSTracer *trc, const HeapId &id, const char *name);
+MarkId(JSTracer *trc, HeapId *id, const char *name);
 
 void
-MarkIdRoot(JSTracer *trc, const jsid &id, const char *name);
+MarkIdRoot(JSTracer *trc, jsid *id, const char *name);
 
 void
 MarkIdRange(JSTracer *trc, size_t len, js::HeapId *vec, const char *name);
 
 void
 MarkIdRootRange(JSTracer *trc, size_t len, jsid *vec, const char *name);
 
 /*** Value Marking ***/
@@ -111,20 +111,16 @@ MarkValueRootRange(JSTracer *trc, size_t
 inline void
 MarkValueRootRange(JSTracer *trc, Value *begin, Value *end, const char *name)
 {
     MarkValueRootRange(trc, end - begin, begin, name);
 }
 
 /*** Special Cases ***/
 
-/* TypeNewObject contains a HeapPtr<const Shape> that needs a unique cast. */
-void
-MarkShape(JSTracer *trc, const HeapPtr<const Shape> &thing, const char *name);
-
 /* Direct value access used by the write barriers and the methodjit */
 void
 MarkValueUnbarriered(JSTracer *trc, Value *v, const char *name);
 
 /*
  * Mark a value that may be in a different compartment from the compartment
  * being GC'd. (Although it won't be marked if it's in the wrong compartment.)
  */
@@ -139,38 +135,42 @@ void
 MarkChildren(JSTracer *trc, JSObject *obj);
 
 /*
  * Trace through the shape and any shapes it contains to mark
  * non-shape children. This is exposed to the JS API as
  * JS_TraceShapeCycleCollectorChildren.
  */
 void
-MarkCycleCollectorChildren(JSTracer *trc, const Shape *shape);
+MarkCycleCollectorChildren(JSTracer *trc, Shape *shape);
+
+void
+PushArena(GCMarker *gcmarker, ArenaHeader *aheader);
 
 /*** Generic ***/
+
 /*
  * The Mark() functions interface should only be used by code that must be
  * templated.  Other uses should use the more specific, type-named functions.
  */
 
 inline void
 Mark(JSTracer *trc, HeapValue *v, const char *name)
 {
     MarkValue(trc, v, name);
 }
 
 inline void
-Mark(JSTracer *trc, const HeapPtr<JSObject> &o, const char *name)
+Mark(JSTracer *trc, HeapPtr<JSObject> *o, const char *name)
 {
     MarkObject(trc, o, name);
 }
 
 inline void
-Mark(JSTracer *trc, const HeapPtr<JSXML> &xml, const char *name)
+Mark(JSTracer *trc, HeapPtr<JSXML> *xml, const char *name)
 {
     MarkXML(trc, xml, name);
 }
 
 inline bool
 IsMarked(const Value &v)
 {
     if (v.isMarkable())
--- a/js/src/jsinfer.cpp
+++ b/js/src/jsinfer.cpp
@@ -2190,17 +2190,17 @@ TypeCompartment::nukeTypes(JSContext *cx
 
     /*
      * We may or may not be under the GC. In either case don't allocate, and
      * acquire the GC lock so we can update inferenceEnabled for all contexts.
      */
 
 #ifdef JS_THREADSAFE
     AutoLockGC maybeLock;
-    if (!cx->runtime->gcMarkAndSweep)
+    if (!cx->runtime->gcRunning)
         maybeLock.lock(cx->runtime);
 #endif
 
     inferenceEnabled = false;
 
     /* Update the cached inferenceEnabled bit in all contexts. */
     for (JSCList *cl = cx->runtime->contextList.next;
          cl != &cx->runtime->contextList;
@@ -2613,17 +2613,17 @@ struct types::ObjectTableKey
     jsid *ids;
     uint32_t nslots;
     uint32_t nfixed;
     JSObject *proto;
 
     typedef JSObject * Lookup;
 
     static inline uint32_t hash(JSObject *obj) {
-        return (uint32_t) (JSID_BITS(obj->lastProperty()->propid()) ^
+        return (uint32_t) (JSID_BITS(obj->lastProperty()->propid().get()) ^
                          obj->slotSpan() ^ obj->numFixedSlots() ^
                          ((uint32_t)(size_t)obj->getProto() >> 2));
     }
 
     static inline bool match(const ObjectTableKey &v, JSObject *obj) {
         if (obj->slotSpan() != v.nslots ||
             obj->numFixedSlots() != v.nfixed ||
             obj->getProto() != v.proto) {
--- a/js/src/jsinfer.h
+++ b/js/src/jsinfer.h
@@ -663,17 +663,17 @@ struct TypeNewScript
 
     /* Allocation kind to use for newly constructed objects. */
     gc::AllocKind allocKind;
 
     /*
      * Shape to use for newly constructed objects. Reflects all definite
      * properties the object will have.
      */
-    HeapPtr<const Shape> shape;
+    HeapPtrShape  shape;
 
     /*
      * Order in which properties become initialized. We need this in case a
      * scripted setter is added to one of the object's prototypes while it is
      * in the middle of being initialized, so we can walk the stack and fixup
      * any objects which look for in-progress objects which were prematurely
      * set with their final shape. Initialization can traverse stack frames,
      * in which case FRAME_PUSH/FRAME_POP are used.
--- a/js/src/jsinferinlines.h
+++ b/js/src/jsinferinlines.h
@@ -736,17 +736,17 @@ TypeScript::SetArgument(JSContext *cx, J
         SetArgument(cx, script, arg, type);
     }
 }
 
 void
 TypeScript::trace(JSTracer *trc)
 {
     if (hasScope() && global)
-        gc::MarkObject(trc, global, "script_global");
+        gc::MarkObject(trc, &global, "script_global");
 
     /* Note: nesting does not keep anything alive. */
 }
 
 /////////////////////////////////////////////////////////////////////
 // TypeCompartment
 /////////////////////////////////////////////////////////////////////
 
@@ -1338,17 +1338,17 @@ TypeNewScript::writeBarrierPre(TypeNewSc
 {
 #ifdef JSGC_INCREMENTAL
     if (!newScript)
         return;
 
     JSCompartment *comp = newScript->fun->compartment();
     if (comp->needsBarrier()) {
         MarkObjectUnbarriered(comp->barrierTracer(), newScript->fun, "write barrier");
-        MarkShape(comp->barrierTracer(), newScript->shape, "write barrier");
+        MarkShape(comp->barrierTracer(), &newScript->shape, "write barrier");
     }
 #endif
 }
 
 inline void
 TypeNewScript::writeBarrierPost(TypeNewScript *newScript, void *addr)
 {
 }
--- a/js/src/jsinterp.cpp
+++ b/js/src/jsinterp.cpp
@@ -1142,17 +1142,17 @@ inline InterpreterFrames::~InterpreterFr
 void
 js::AssertValidPropertyCacheHit(JSContext *cx,
                                 JSObject *start, JSObject *found,
                                 PropertyCacheEntry *entry)
 {
     jsbytecode *pc;
     cx->stack.currentScript(&pc);
 
-    uint32_t sample = cx->runtime->gcNumber;
+    uint64_t sample = cx->runtime->gcNumber;
     PropertyCacheEntry savedEntry = *entry;
 
     PropertyName *name = GetNameFromBytecode(cx, pc, JSOp(*pc), js_CodeSpec[*pc]);
 
     JSObject *obj, *pobj;
     JSProperty *prop;
     JSBool ok;
 
@@ -1249,17 +1249,17 @@ TypeCheckNextBytecode(JSContext *cx, JSS
 #endif
 }
 
 JS_NEVER_INLINE bool
 js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode)
 {
     JSAutoResolveFlags rf(cx, RESOLVE_INFER);
 
-    gc::VerifyBarriers(cx, true);
+    gc::MaybeVerifyBarriers(cx, true);
 
     JS_ASSERT(!cx->compartment->activeAnalysis);
 
 #if JS_THREADED_INTERP
 #define CHECK_PCCOUNT_INTERRUPTS() JS_ASSERT_IF(script->pcCounters, jumpTable == interruptJumpTable)
 #else
 #define CHECK_PCCOUNT_INTERRUPTS() JS_ASSERT_IF(script->pcCounters, switchMask == -1)
 #endif
@@ -1284,17 +1284,17 @@ js::Interpret(JSContext *cx, StackFrame 
 
     register void * const *jumpTable = normalJumpTable;
 
     typedef GenericInterruptEnabler<void * const *> InterruptEnabler;
     InterruptEnabler interruptEnabler(&jumpTable, interruptJumpTable);
 
 # define DO_OP()            JS_BEGIN_MACRO                                    \
                                 CHECK_PCCOUNT_INTERRUPTS();                   \
-                                js::gc::VerifyBarriers(cx);                   \
+                                js::gc::MaybeVerifyBarriers(cx);              \
                                 JS_EXTENSION_(goto *jumpTable[op]);           \
                             JS_END_MACRO
 # define DO_NEXT_OP(n)      JS_BEGIN_MACRO                                    \
                                 TypeCheckNextBytecode(cx, script, n, regs);   \
                                 op = (JSOp) *(regs.pc += (n));                \
                                 DO_OP();                                      \
                             JS_END_MACRO
 
@@ -1561,17 +1561,17 @@ js::Interpret(JSContext *cx, StackFrame 
         JS_ASSERT(js_CodeSpec[op].length == 1);
         len = 1;
       advance_pc:
         regs.pc += len;
         op = (JSOp) *regs.pc;
 
       do_op:
         CHECK_PCCOUNT_INTERRUPTS();
-        js::gc::VerifyBarriers(cx);
+        js::gc::MaybeVerifyBarriers(cx);
         switchOp = intN(op) | switchMask;
       do_switch:
         switch (switchOp) {
 #endif
 
 #if JS_THREADED_INTERP
   interrupt:
 #else /* !JS_THREADED_INTERP */
@@ -4419,11 +4419,11 @@ END_CASE(JSOP_ARRAYPUSH)
 #ifdef JS_METHODJIT
     /*
      * This path is used when it's guaranteed the method can be finished
      * inside the JIT.
      */
   leave_on_safe_point:
 #endif
 
-    gc::VerifyBarriers(cx, true);
+    gc::MaybeVerifyBarriers(cx, true);
     return interpReturnOK;
 }
--- a/js/src/jsiter.cpp
+++ b/js/src/jsiter.cpp
@@ -84,17 +84,17 @@ using namespace js;
 using namespace js::gc;
 
 static void iterator_finalize(JSContext *cx, JSObject *obj);
 static void iterator_trace(JSTracer *trc, JSObject *obj);
 static JSObject *iterator_iterator(JSContext *cx, JSObject *obj, JSBool keysonly);
 
 Class js::IteratorClass = {
     "Iterator",
-    JSCLASS_HAS_PRIVATE |
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
     JSCLASS_HAS_CACHED_PROTO(JSProto_Iterator),
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
     JS_ResolveStub,
     JS_ConvertStub,
@@ -143,19 +143,19 @@ Class js::ElementIteratorClass = {
 };
 
 static const gc::AllocKind ITERATOR_FINALIZE_KIND = gc::FINALIZE_OBJECT2;
 
 void
 NativeIterator::mark(JSTracer *trc)
 {
     for (HeapPtr<JSFlatString> *str = begin(); str < end(); str++)
-        MarkString(trc, *str, "prop");
+        MarkString(trc, str, "prop");
     if (obj)
-        MarkObject(trc, obj, "obj");
+        MarkObject(trc, &obj, "obj");
 }
 
 static void
 iterator_finalize(JSContext *cx, JSObject *obj)
 {
     JS_ASSERT(obj->isIterator());
 
     NativeIterator *ni = obj->getNativeIterator();
@@ -1414,17 +1414,17 @@ generator_trace(JSTracer *trc, JSObject 
         return;
 
     JS_ASSERT(gen->liveFrame() == gen->floatingFrame());
     MarkGenerator(trc, gen);
 }
 
 Class js::GeneratorClass = {
     "Generator",
-    JSCLASS_HAS_PRIVATE,
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS,
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
     JS_ResolveStub,
     JS_ConvertStub,
     generator_finalize,
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -409,18 +409,21 @@ js_TraceSharpMap(JSTracer *trc, JSSharpO
      * confusing js_EnterSharpObject. So to address the problem we simply
      * mark all objects from map->table.
      *
      * An alternative "proper" solution is to use JSTempValueRooter in
      * MarkSharpObjects with code to remove during finalization entries
      * with otherwise unreachable objects. But this is way too complex
      * to justify spending efforts.
      */
-    for (JSSharpTable::Range r = map->table.all(); !r.empty(); r.popFront())
-        MarkObjectRoot(trc, r.front().key, "sharp table entry");
+    for (JSSharpTable::Range r = map->table.all(); !r.empty(); r.popFront()) {
+        JSObject *tmp = r.front().key;
+        MarkObjectRoot(trc, &tmp, "sharp table entry");
+        JS_ASSERT(tmp == r.front().key);
+    }
 }
 
 #if JS_HAS_TOSOURCE
 static JSBool
 obj_toSource(JSContext *cx, uintN argc, Value *vp)
 {
     bool comma = false;
     const jschar *vchars;
@@ -2755,16 +2758,23 @@ NewObject(JSContext *cx, Class *clasp, t
     HeapValue *slots;
     if (!PreallocateObjectDynamicSlots(cx, shape, &slots))
         return NULL;
 
     JSObject *obj = JSObject::create(cx, kind, shape, typeRoot, slots);
     if (!obj)
         return NULL;
 
+    /*
+     * This will cancel an already-running incremental GC from doing any more
+     * slices, and it will prevent any future incremental GCs.
+     */
+    if (clasp->trace && !(clasp->flags & JSCLASS_IMPLEMENTS_BARRIERS))
+        cx->runtime->gcIncrementalEnabled = false;
+
     Probes::createObject(cx, obj);
     return obj;
 }
 
 JSObject *
 js::NewObjectWithGivenProto(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent,
                             gc::AllocKind kind)
 {
@@ -3467,27 +3477,27 @@ JSObject::TradeGuts(JSContext *cx, JSObj
         if (a->isNative())
             a->shape_->setNumFixedSlots(reserved.newafixed);
         else
             a->shape_ = reserved.newashape;
 
         a->slots = reserved.newaslots;
         a->initSlotRange(0, reserved.bvals.begin(), bcap);
         if (a->hasPrivate())
-            a->setPrivate(bpriv);
+            a->initPrivate(bpriv);
 
         if (b->isNative())
             b->shape_->setNumFixedSlots(reserved.newbfixed);
         else
             b->shape_ = reserved.newbshape;
 
         b->slots = reserved.newbslots;
         b->initSlotRange(0, reserved.avals.begin(), acap);
         if (b->hasPrivate())
-            b->setPrivate(apriv);
+            b->initPrivate(apriv);
 
         /* Make sure the destructor for reserved doesn't free the slots. */
         reserved.newaslots = NULL;
         reserved.newbslots = NULL;
     }
 
 #ifdef JSGC_GENERATIONAL
     Shape::writeBarrierPost(a->shape_, &a->shape_);
--- a/js/src/jsobj.h
+++ b/js/src/jsobj.h
@@ -863,17 +863,17 @@ struct JSObject : js::gc::Cell
 
     inline js::types::TypeObject *getType(JSContext *cx);
 
     js::types::TypeObject *type() const {
         JS_ASSERT(!hasLazyType());
         return type_;
     }
 
-    const js::HeapPtr<js::types::TypeObject> &typeFromGC() const {
+    js::HeapPtr<js::types::TypeObject> &typeFromGC() {
         /* Direct field access for use by GC. */
         return type_;
     }
 
     static inline size_t offsetOfType() { return offsetof(JSObject, type_); }
     inline js::HeapPtrTypeObject *addressOfType() { return &type_; }
 
     inline void setType(js::types::TypeObject *newType);
@@ -949,16 +949,17 @@ struct JSObject : js::gc::Cell
 
     inline js::GlobalObject &global() const;
 
     /* Private data accessors. */
 
     inline bool hasPrivate() const;
     inline void *getPrivate() const;
     inline void setPrivate(void *data);
+    inline void initPrivate(void *data);
 
     /* Access private data for an object with a known number of fixed slots. */
     inline void *getPrivate(size_t nfixed) const;
 
     /* N.B. Infallible: NULL means 'no principal', not an error. */
     inline JSPrincipals *principals(JSContext *cx);
 
     /* Remove the type (and prototype) or parent from a new object. */
@@ -1350,16 +1351,17 @@ struct JSObject : js::gc::Cell
     static bool thisObject(JSContext *cx, const js::Value &v, js::Value *vp);
 
     bool swap(JSContext *cx, JSObject *other);
 
     inline void initArrayClass();
 
     static inline void writeBarrierPre(JSObject *obj);
     static inline void writeBarrierPost(JSObject *obj, void *addr);
+    static inline void readBarrier(JSObject *obj);
     inline void privateWriteBarrierPre(void **oldval);
     inline void privateWriteBarrierPost(void **oldval);
 
     /*
      * In addition to the generic object interface provided by JSObject,
      * specific types of objects may provide additional operations. To access,
      * these addition operations, callers should use the pattern:
      *
--- a/js/src/jsobjinlines.h
+++ b/js/src/jsobjinlines.h
@@ -114,16 +114,22 @@ JSObject::setPrivate(void *data)
 {
     void **pprivate = &privateRef(numFixedSlots());
 
     privateWriteBarrierPre(pprivate);
     *pprivate = data;
     privateWriteBarrierPost(pprivate);
 }
 
+inline void
+JSObject::initPrivate(void *data)
+{
+    privateRef(numFixedSlots()) = data;
+}
+
 inline bool
 JSObject::enumerate(JSContext *cx, JSIterateOp iterop, js::Value *statep, jsid *idp)
 {
     JSNewEnumerateOp op = getOps()->enumerate;
     return (op ? op : JS_EnumerateState)(cx, this, iterop, statep, idp);
 }
 
 inline bool
@@ -597,30 +603,42 @@ JSObject::initDenseArrayElements(uintN d
 
 inline void
 JSObject::moveDenseArrayElements(uintN dstStart, uintN srcStart, uintN count)
 {
     JS_ASSERT(dstStart + count <= getDenseArrayCapacity());
     JS_ASSERT(srcStart + count <= getDenseArrayInitializedLength());
 
     /*
-     * Use a custom write barrier here since it's performance sensitive. We
-     * only want to barrier the elements that are being overwritten.
-     */
-    uintN markStart, markEnd;
-    if (dstStart > srcStart) {
-        markStart = js::Max(srcStart + count, dstStart);
-        markEnd = dstStart + count;
+     * Using memmove here would skip write barriers. Also, we need to consider
+     * an array containing [A, B, C], in the following situation:
+     *
+     * 1. Incremental GC marks slot 0 of array (i.e., A), then returns to JS code.
+     * 2. JS code moves slots 1..2 into slots 0..1, so it contains [B, C, C].
+     * 3. Incremental GC finishes by marking slots 1 and 2 (i.e., C).
+     *
+     * Since normal marking never happens on B, it is very important that the
+     * write barrier is invoked here on B, despite the fact that it exists in
+     * the array before and after the move.
+    */
+    if (compartment()->needsBarrier()) {
+        if (dstStart < srcStart) {
+            js::HeapValue *dst = elements + dstStart;
+            js::HeapValue *src = elements + srcStart;
+            for (unsigned i = 0; i < count; i++, dst++, src++)
+                *dst = *src;
+        } else {
+            js::HeapValue *dst = elements + dstStart + count - 1;
+            js::HeapValue *src = elements + srcStart + count - 1;
+            for (unsigned i = 0; i < count; i++, dst--, src--)
+                *dst = *src;
+        }
     } else {
-        markStart = dstStart;
-        markEnd = js::Min(dstStart + count, srcStart);
+        memmove(elements + dstStart, elements + srcStart, count * sizeof(js::Value));
     }
-    prepareElementRangeForOverwrite(markStart, markEnd);
-
-    memmove(elements + dstStart, elements + srcStart, count * sizeof(js::Value));
 }
 
 inline void
 JSObject::moveDenseArrayElementsUnbarriered(uintN dstStart, uintN srcStart, uintN count)
 {
     JS_ASSERT(!compartment()->needsBarrier());
 
     JS_ASSERT(dstStart + count <= getDenseArrayCapacity());
@@ -2122,13 +2140,25 @@ JSObject::writeBarrierPre(JSObject *obj)
     if (comp->needsBarrier()) {
         JS_ASSERT(!comp->rt->gcRunning);
         MarkObjectUnbarriered(comp->barrierTracer(), obj, "write barrier");
     }
 #endif
 }
 
 inline void
+JSObject::readBarrier(JSObject *obj)
+{
+#ifdef JSGC_INCREMENTAL
+    JSCompartment *comp = obj->compartment();
+    if (comp->needsBarrier()) {
+        JS_ASSERT(!comp->rt->gcRunning);
+        MarkObjectUnbarriered(comp->barrierTracer(), obj, "read barrier");
+    }
+#endif
+}
+
+inline void
 JSObject::writeBarrierPost(JSObject *obj, void *addr)
 {
 }
 
 #endif /* jsobjinlines_h___ */
--- a/js/src/jspropertycache.cpp
+++ b/js/src/jspropertycache.cpp
@@ -277,17 +277,17 @@ PropertyCache::purge(JSContext *cx)
   { static FILE *fp;
     if (!fp)
         fp = fopen("/tmp/propcache.stats", "w");
     if (fp) {
         fputs("Property cache stats for ", fp);
 #ifdef JS_THREADSAFE
         fprintf(fp, "thread %lu, ", (unsigned long) cx->thread->id);
 #endif
-        fprintf(fp, "GC %u\n", cx->runtime->gcNumber);
+        fprintf(fp, "GC %lu\n", (unsigned long)cx->runtime->gcNumber);
 
 # define P(mem) fprintf(fp, "%11s %10lu\n", #mem, (unsigned long)mem)
         P(fills);
         P(nofills);
         P(rofills);
         P(disfills);
         P(oddfills);
         P(add2dictfills);
--- a/js/src/jsproxy.cpp
+++ b/js/src/jsproxy.cpp
@@ -1306,17 +1306,17 @@ static JSType
 proxy_TypeOf(JSContext *cx, JSObject *proxy)
 {
     JS_ASSERT(proxy->isProxy());
     return Proxy::typeOf(cx, proxy);
 }
 
 JS_FRIEND_DATA(Class) js::ObjectProxyClass = {
     "Proxy",
-    Class::NON_NATIVE | JSCLASS_HAS_RESERVED_SLOTS(4),
+    Class::NON_NATIVE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(4),
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
     JS_ResolveStub,
     proxy_Convert,
     proxy_Finalize,          /* finalize    */
@@ -1362,17 +1362,17 @@ JS_FRIEND_DATA(Class) js::ObjectProxyCla
         proxy_Fix,           /* fix             */
         NULL,                /* thisObject      */
         NULL,                /* clear           */
     }
 };
 
 JS_FRIEND_DATA(Class) js::OuterWindowProxyClass = {
     "Proxy",
-    Class::NON_NATIVE | JSCLASS_HAS_RESERVED_SLOTS(4),
+    Class::NON_NATIVE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(4),
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
     JS_ResolveStub,
     JS_ConvertStub,
     proxy_Finalize,          /* finalize    */
@@ -1440,17 +1440,17 @@ proxy_Construct(JSContext *cx, uintN arg
     JSObject *proxy = &JS_CALLEE(cx, vp).toObject();
     JS_ASSERT(proxy->isProxy());
     bool ok = Proxy::construct(cx, proxy, argc, JS_ARGV(cx, vp), vp);
     return ok;
 }
 
 JS_FRIEND_DATA(Class) js::FunctionProxyClass = {
     "Proxy",
-    Class::NON_NATIVE | JSCLASS_HAS_RESERVED_SLOTS(6),
+    Class::NON_NATIVE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_HAS_RESERVED_SLOTS(6),
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
     JS_ResolveStub,
     JS_ConvertStub,
     NULL,                    /* finalize */
--- a/js/src/jsscope.cpp
+++ b/js/src/jsscope.cpp
@@ -1057,17 +1057,17 @@ JSObject::rollbackProperties(JSContext *
     }
 }
 
 Shape *
 JSObject::replaceWithNewEquivalentShape(JSContext *cx, Shape *oldShape, Shape *newShape)
 {
     JS_ASSERT_IF(oldShape != lastProperty(),
                  inDictionaryMode() &&
-                 nativeLookup(cx, oldShape->maybePropid()) == oldShape);
+                 nativeLookup(cx, oldShape->propidRef()) == oldShape);
 
     JSObject *self = this;
 
     if (!inDictionaryMode()) {
         RootObject selfRoot(cx, &self);
         RootShape newRoot(cx, &newShape);
         if (!toDictionaryMode(cx))
             return NULL;
@@ -1081,17 +1081,17 @@ JSObject::replaceWithNewEquivalentShape(
         if (!newShape)
             return NULL;
         new (newShape) Shape(oldShape->base()->unowned(), 0);
     }
 
     PropertyTable &table = self->lastProperty()->table();
     Shape **spp = oldShape->isEmptyShape()
                   ? NULL
-                  : table.search(oldShape->maybePropid(), false);
+                  : table.search(oldShape->propidRef(), false);
 
     /*
      * Splice the new shape into the same position as the old shape, preserving
      * enumeration order (see bug 601399).
      */
     StackShape nshape(oldShape);
     newShape->initDictionaryShape(nshape, self->numFixedSlots(), oldShape->listp);
 
--- a/js/src/jsscope.h
+++ b/js/src/jsscope.h
@@ -557,16 +557,20 @@ struct Shape : public js::gc::Cell
         JS_ASSERT(!(flags & NON_NATIVE) == getObjectClass()->isNative());
         return !(flags & NON_NATIVE);
     }
 
     const HeapPtrShape &previous() const {
         return parent;
     }
 
+    HeapPtrShape &previousRef() {
+        return parent;
+    }
+
     class Range {
       protected:
         friend struct Shape;
         const Shape *cursor;
 
       public:
         Range(const Shape *shape) : cursor(shape) { }
 
@@ -765,18 +769,22 @@ struct Shape : public js::gc::Cell
 
     void incrementNumLinearSearches() {
         uint32_t count = numLinearSearches();
         JS_ASSERT(count < LINEAR_SEARCHES_MAX);
         slotInfo = slotInfo & ~LINEAR_SEARCHES_MASK;
         slotInfo = slotInfo | ((count + 1) << LINEAR_SEARCHES_SHIFT);
     }
 
-    jsid propid() const { JS_ASSERT(!isEmptyShape()); return maybePropid(); }
-    const HeapId &maybePropid() const { JS_ASSERT(!JSID_IS_VOID(propid_)); return propid_; }
+    const HeapId &propid() const {
+        JS_ASSERT(!isEmptyShape());
+        JS_ASSERT(!JSID_IS_VOID(propid_));
+        return propid_;
+    }
+    HeapId &propidRef() { JS_ASSERT(!JSID_IS_VOID(propid_)); return propid_; }
 
     int16_t shortid() const { JS_ASSERT(hasShortID()); return maybeShortid(); }
     int16_t maybeShortid() const { return shortid_; }
 
     /*
      * If SHORTID is set in shape->flags, we use shape->shortid rather
      * than id when calling shape's getter or setter.
      */
@@ -990,17 +998,17 @@ struct StackShape
     {
         JS_ASSERT(base);
         JS_ASSERT(!JSID_IS_VOID(propid));
         JS_ASSERT(slot <= SHAPE_INVALID_SLOT);
     }
 
     StackShape(const Shape *shape)
       : base(shape->base()->unowned()),
-        propid(shape->maybePropid()),
+        propid(const_cast<Shape *>(shape)->propidRef()),
         slot_(shape->slotInfo & Shape::SLOT_MASK),
         attrs(shape->attrs),
         flags(shape->flags),
         shortid(shape->shortid_)
     {}
 
     bool hasSlot() const { return (attrs & JSPROP_SHARED) == 0; }
     bool hasMissingSlot() const { return maybeSlot() == SHAPE_INVALID_SLOT; }
@@ -1076,27 +1084,27 @@ Shape::search(JSContext *cx, Shape *star
         if (start->isBigEnoughForAPropertyTable()) {
             RootShape startRoot(cx, &start);
             RootId idRoot(cx, &id);
             if (start->hashify(cx)) {
                 Shape **spp = start->table().search(id, adding);
                 return SHAPE_FETCH(spp);
             }
         }
-        /* 
+        /*
          * No table built -- there weren't enough entries, or OOM occurred.
          * Don't increment numLinearSearches, to keep hasTable() false.
          */
         JS_ASSERT(!start->hasTable());
     } else {
         start->incrementNumLinearSearches();
     }
 
     for (Shape *shape = start; shape; shape = shape->parent) {
-        if (shape->maybePropid() == id)
+        if (shape->propidRef() == id)
             return shape;
     }
 
     return NULL;
 }
 
 } // namespace js
 
--- a/js/src/jsscript.cpp
+++ b/js/src/jsscript.cpp
@@ -314,17 +314,17 @@ Bindings::makeImmutable()
     JS_ASSERT(lastBinding);
     JS_ASSERT(!lastBinding->inDictionary());
 }
 
 void
 Bindings::trace(JSTracer *trc)
 {
     if (lastBinding)
-        MarkShape(trc, lastBinding, "shape");
+        MarkShape(trc, &lastBinding, "shape");
 }
 
 #ifdef JS_CRASH_DIAGNOSTICS
 
 void
 CheckScript(JSScript *script, JSScript *prev)
 {
     if (script->cookie1[0] != JS_SCRIPT_COOKIE || script->cookie2[0] != JS_SCRIPT_COOKIE) {
--- a/js/src/jstypedarray.cpp
+++ b/js/src/jstypedarray.cpp
@@ -2175,16 +2175,17 @@ Class ArrayBuffer::slowClass = {
     JS_ResolveStub,
     JS_ConvertStub,
     JS_FinalizeStub
 };
 
 Class js::ArrayBufferClass = {
     "ArrayBuffer",
     JSCLASS_HAS_PRIVATE |
+    JSCLASS_IMPLEMENTS_BARRIERS |
     Class::NON_NATIVE |
     JSCLASS_HAS_RESERVED_SLOTS(ARRAYBUFFER_RESERVED_SLOTS) |
     JSCLASS_HAS_CACHED_PROTO(JSProto_ArrayBuffer),
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
@@ -2293,17 +2294,17 @@ JSFunctionSpec _typedArray::jsfuncs[] = 
     JS_ConvertStub,                                                            \
     JS_FinalizeStub                                                            \
 }
 
 #define IMPL_TYPED_ARRAY_FAST_CLASS(_typedArray)                               \
 {                                                                              \
     #_typedArray,                                                              \
     JSCLASS_HAS_RESERVED_SLOTS(TypedArray::FIELD_MAX) |                        \
-    JSCLASS_HAS_PRIVATE |                                                      \
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |                        \
     JSCLASS_FOR_OF_ITERATION |                                                 \
     Class::NON_NATIVE,                                                         \
     JS_PropertyStub,         /* addProperty */                                 \
     JS_PropertyStub,         /* delProperty */                                 \
     JS_PropertyStub,         /* getProperty */                                 \
     JS_StrictPropertyStub,   /* setProperty */                                 \
     JS_EnumerateStub,                                                          \
     JS_ResolveStub,                                                            \
--- a/js/src/jswatchpoint.cpp
+++ b/js/src/jswatchpoint.cpp
@@ -197,45 +197,53 @@ bool
 WatchpointMap::markIteratively(JSTracer *trc)
 {
     bool marked = false;
     for (Map::Range r = map.all(); !r.empty(); r.popFront()) {
         Map::Entry &e = r.front();
         bool objectIsLive = !IsAboutToBeFinalized(e.key.object);
         if (objectIsLive || e.value.held) {
             if (!objectIsLive) {
-                MarkObject(trc, e.key.object, "held Watchpoint object");
+                HeapPtrObject tmp(e.key.object);
+                MarkObject(trc, &tmp, "held Watchpoint object");
+                JS_ASSERT(tmp == e.key.object);
                 marked = true;
             }
 
             const HeapId &id = e.key.id;
             JS_ASSERT(JSID_IS_STRING(id) || JSID_IS_INT(id));
-            MarkId(trc, id, "WatchKey::id");
+            HeapId tmp(id.get());
+            MarkId(trc, &tmp, "WatchKey::id");
+            JS_ASSERT(tmp.get() == id.get());
 
             if (e.value.closure && IsAboutToBeFinalized(e.value.closure)) {
-                MarkObject(trc, e.value.closure, "Watchpoint::closure");
+                MarkObject(trc, &e.value.closure, "Watchpoint::closure");
                 marked = true;
             }
         }
     }
     return marked;
 }
 
 void
 WatchpointMap::markAll(JSTracer *trc)
 {
     for (Map::Range r = map.all(); !r.empty(); r.popFront()) {
         Map::Entry &e = r.front();
-        MarkObject(trc, e.key.object, "held Watchpoint object");
+        HeapPtrObject tmpObj(e.key.object);
+        MarkObject(trc, &tmpObj, "held Watchpoint object");
+        JS_ASSERT(tmpObj == e.key.object);
 
         const HeapId &id = e.key.id;
         JS_ASSERT(JSID_IS_STRING(id) || JSID_IS_INT(id));
-        MarkId(trc, id, "WatchKey::id");
+        HeapId tmpId(id.get());
+        MarkId(trc, &tmpId, "WatchKey::id");
+        JS_ASSERT(tmpId.get() == id.get());
 
-        MarkObject(trc, e.value.closure, "Watchpoint::closure");
+        MarkObject(trc, &e.value.closure, "Watchpoint::closure");
     }
 }
 
 void
 WatchpointMap::sweepAll(JSRuntime *rt)
 {
     if (rt->gcCurrentCompartment) {
         if (WatchpointMap *wpmap = rt->gcCurrentCompartment->watchpointMap)
--- a/js/src/jsweakmap.cpp
+++ b/js/src/jsweakmap.cpp
@@ -57,28 +57,28 @@
 using namespace js;
 
 namespace js {
 
 bool
 WeakMapBase::markAllIteratively(JSTracer *tracer)
 {
     bool markedAny = false;
-    JSRuntime *rt = tracer->context->runtime;
+    JSRuntime *rt = tracer->runtime;
     for (WeakMapBase *m = rt->gcWeakMapList; m; m = m->next) {
         if (m->markIteratively(tracer))
             markedAny = true;
     }
     return markedAny;
 }
 
 void
 WeakMapBase::sweepAll(JSTracer *tracer)
 {
-    JSRuntime *rt = tracer->context->runtime;
+    JSRuntime *rt = tracer->runtime;
     for (WeakMapBase *m = rt->gcWeakMapList; m; m = m->next)
         m->sweep(tracer);
 }
 
 void
 WeakMapBase::traceAllMappings(WeakMapTracer *tracer)
 {
     JSRuntime *rt = tracer->runtime;
@@ -309,34 +309,42 @@ WeakMap_mark(JSTracer *trc, JSObject *ob
 {
     if (ObjectValueMap *map = GetObjectMap(obj))
         map->trace(trc);
 }
 
 static void
 WeakMap_finalize(JSContext *cx, JSObject *obj)
 {
-    ObjectValueMap *map = GetObjectMap(obj);
-    cx->delete_(map);
+    if (ObjectValueMap *map = GetObjectMap(obj)) {
+        map->check();
+#ifdef DEBUG
+        map->~ObjectValueMap();
+        memset(map, 0xdc, sizeof(ObjectValueMap));
+        cx->free_(map);
+#else
+        cx->delete_(map);
+#endif
+    }
 }
 
 static JSBool
 WeakMap_construct(JSContext *cx, uintN argc, Value *vp)
 {
     JSObject *obj = NewBuiltinClassInstance(cx, &WeakMapClass);
     if (!obj)
         return false;
 
     vp->setObject(*obj);
     return true;
 }
 
 Class js::WeakMapClass = {
     "WeakMap",
-    JSCLASS_HAS_PRIVATE |
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
     JSCLASS_HAS_CACHED_PROTO(JSProto_WeakMap),
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
     JS_ResolveStub,
     JS_ConvertStub,
--- a/js/src/jsweakmap.h
+++ b/js/src/jsweakmap.h
@@ -122,17 +122,17 @@ class WeakMapBase {
             // many keys as possible have been marked, and add ourselves to the list of
             // known-live WeakMaps to be scanned in the iterative marking phase, by
             // markAllIteratively.
             JS_ASSERT(!tracer->eagerlyTraceWeakMaps);
 
             // Add ourselves to the list if we are not already in the list. We can already
             // be in the list if the weak map is marked more than once due delayed marking.
             if (next == WeakMapNotInList) {
-                JSRuntime *rt = tracer->context->runtime;
+                JSRuntime *rt = tracer->runtime;
                 next = rt->gcWeakMapList;
                 rt->gcWeakMapList = this;
             }
         } else {
             // If we're not actually doing garbage collection, the keys won't be marked
             // nicely as needed by the true ephemeral marking algorithm --- custom tracers
             // such as the cycle collector must use their own means for cycle detection.
             // So here we do a conservative approximation: pretend all keys are live.
@@ -151,16 +151,18 @@ class WeakMapBase {
 
     // Remove entries whose keys are dead from all weak maps marked as live in this
     // garbage collection.
     static void sweepAll(JSTracer *tracer);
 
     // Trace all delayed weak map bindings. Used by the cycle collector.
     static void traceAllMappings(WeakMapTracer *tracer);
 
+    void check() { JS_ASSERT(next == WeakMapNotInList); }
+
     // Remove everything from the live weak map list.
     static void resetWeakMapList(JSRuntime *rt);
 
   protected:
     // Instance member functions called by the above. Instantiations of WeakMap override
     // these with definitions appropriate for their Key and Value types.
     virtual void nonMarkingTrace(JSTracer *tracer) = 0;
     virtual bool markIteratively(JSTracer *tracer) = 0;
@@ -199,29 +201,29 @@ class WeakMap : public HashMap<Key, Valu
     Range nondeterministicAll() {
         return Base::all();
     }
 
   private:
     void nonMarkingTrace(JSTracer *trc) {
         ValueMarkPolicy vp(trc);
         for (Range r = Base::all(); !r.empty(); r.popFront())
-            vp.mark(r.front().value);
+            vp.mark(&r.front().value);
     }
 
     bool markIteratively(JSTracer *trc) {
         KeyMarkPolicy kp(trc);
         ValueMarkPolicy vp(trc);
         bool markedAny = false;
         for (Range r = Base::all(); !r.empty(); r.popFront()) {
             const Key &k = r.front().key;
             Value &v = r.front().value;
             /* If the entry is live, ensure its key and value are marked. */
             if (kp.isMarked(k)) {
-                markedAny |= vp.mark(v);
+                markedAny |= vp.mark(&v);
             }
             JS_ASSERT_IF(kp.isMarked(k), vp.isMarked(v));
         }
         return markedAny;
     }
 
     void sweep(JSTracer *trc) {
         KeyMarkPolicy kp(trc);
@@ -259,52 +261,52 @@ class DefaultMarkPolicy<HeapValue> {
     JSTracer *tracer;
   public:
     DefaultMarkPolicy(JSTracer *t) : tracer(t) { }
     bool isMarked(const HeapValue &x) {
         if (x.isMarkable())
             return !IsAboutToBeFinalized(x);
         return true;
     }
-    bool mark(HeapValue &x) {
-        if (isMarked(x))
+    bool mark(HeapValue *x) {
+        if (isMarked(*x))
             return false;
-        js::gc::MarkValue(tracer, &x, "WeakMap entry");
+        js::gc::MarkValue(tracer, x, "WeakMap entry");
         return true;
     }
 };
 
 template <>
 class DefaultMarkPolicy<HeapPtrObject> {
   private:
     JSTracer *tracer;
   public:
     DefaultMarkPolicy(JSTracer *t) : tracer(t) { }
     bool isMarked(const HeapPtrObject &x) {
         return !IsAboutToBeFinalized(x);
     }
-    bool mark(HeapPtrObject &x) {
-        if (isMarked(x))
+    bool mark(HeapPtrObject *x) {
+        if (isMarked(*x))
             return false;
         js::gc::MarkObject(tracer, x, "WeakMap entry");
         return true;
     }
 };
 
 template <>
 class DefaultMarkPolicy<HeapPtrScript> {
   private:
     JSTracer *tracer;
   public:
     DefaultMarkPolicy(JSTracer *t) : tracer(t) { }
     bool isMarked(const HeapPtrScript &x) {
         return !IsAboutToBeFinalized(x);
     }
-    bool mark(HeapPtrScript &x) {
-        if (isMarked(x))
+    bool mark(HeapPtrScript *x) {
+        if (isMarked(*x))
             return false;
         js::gc::MarkScript(tracer, x, "WeakMap entry");
         return true;
     }
 };
 
 // Default trace policies
 
--- a/js/src/jsxml.cpp
+++ b/js/src/jsxml.cpp
@@ -861,26 +861,26 @@ attr_identity(const JSXML *xmla, const J
     return qname_identity(xmla->name, xmlb->name);
 }
 
 void
 js_XMLArrayCursorTrace(JSTracer *trc, JSXMLArrayCursor<JSXML> *cursor)
 {
     for (; cursor; cursor = cursor->next) {
         if (cursor->root)
-            MarkXML(trc, (const HeapPtr<JSXML> &)cursor->root, "cursor_root");
+            MarkXML(trc, &(HeapPtr<JSXML> &)cursor->root, "cursor_root");
     }
 }
 
 void
 js_XMLArrayCursorTrace(JSTracer *trc, JSXMLArrayCursor<JSObject> *cursor)
 {
     for (; cursor; cursor = cursor->next) {
         if (cursor->root)
-            MarkObject(trc, (const HeapPtr<JSObject> &)cursor->root, "cursor_root");
+            MarkObject(trc, &(HeapPtr<JSObject> &)cursor->root, "cursor_root");
     }
 }
 
 template<class T>
 static HeapPtr<T> *
 ReallocateVector(HeapPtr<T> *vector, size_t count)
 {
 #if JS_BITS_PER_WORD == 32
@@ -5364,17 +5364,17 @@ js_ConcatenateXML(JSContext *cx, JSObjec
     vp->setObject(*listobj);
 out:
     js_LeaveLocalRootScopeWithResult(cx, *vp);
     return ok;
 }
 
 JS_FRIEND_DATA(Class) js::XMLClass = {
     js_XML_str,
-    JSCLASS_HAS_PRIVATE |
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
     JSCLASS_HAS_CACHED_PROTO(JSProto_XML),
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
     JS_ResolveStub,
     xml_convert,
@@ -7323,36 +7323,36 @@ void
 JSXML::writeBarrierPost(JSXML *xml, void *addr)
 {
 }
 
 void
 js_TraceXML(JSTracer *trc, JSXML *xml)
 {
     if (xml->object)
-        MarkObject(trc, xml->object, "object");
+        MarkObject(trc, &xml->object, "object");
     if (xml->name)
-        MarkObject(trc, xml->name, "name");
+        MarkObject(trc, &xml->name, "name");
     if (xml->parent)
-        MarkXML(trc, xml->parent, "xml_parent");
+        MarkXML(trc, &xml->parent, "xml_parent");
 
     if (JSXML_HAS_VALUE(xml)) {
         if (xml->xml_value)
-            MarkString(trc, xml->xml_value, "value");
+            MarkString(trc, &xml->xml_value, "value");
         return;
     }
 
     MarkXMLRange(trc, xml->xml_kids.length, xml->xml_kids.vector, "xml_kids");
     js_XMLArrayCursorTrace(trc, xml->xml_kids.cursors);
 
     if (xml->xml_class == JSXML_CLASS_LIST) {
         if (xml->xml_target)
-            MarkXML(trc, xml->xml_target, "target");
+            MarkXML(trc, &xml->xml_target, "target");
         if (xml->xml_targetprop)
-            MarkObject(trc, xml->xml_targetprop, "targetprop");
+            MarkObject(trc, &xml->xml_targetprop, "targetprop");
     } else {
         MarkObjectRange(trc, xml->xml_namespaces.length,
                         xml->xml_namespaces.vector,
                         "xml_namespaces");
         js_XMLArrayCursorTrace(trc, xml->xml_namespaces.cursors);
 
         MarkXMLRange(trc, xml->xml_attrs.length, xml->xml_attrs.vector, "xml_attrs");
         js_XMLArrayCursorTrace(trc, xml->xml_attrs.cursors);
@@ -7893,21 +7893,21 @@ struct JSXMLFilter
 static void
 xmlfilter_trace(JSTracer *trc, JSObject *obj)
 {
     JSXMLFilter *filter = (JSXMLFilter *) obj->getPrivate();
     if (!filter)
         return;
 
     JS_ASSERT(filter->list);
-    MarkXML(trc, filter->list, "list");
+    MarkXML(trc, &filter->list, "list");
     if (filter->result)
-        MarkXML(trc, filter->result, "result");
+        MarkXML(trc, &filter->result, "result");
     if (filter->kid)
-        MarkXML(trc, filter->kid, "kid");
+        MarkXML(trc, &filter->kid, "kid");
 
     /*
      * We do not need to trace the cursor as that would be done when
      * tracing the filter->list.
      */
 }
 
 static void
@@ -7917,17 +7917,17 @@ xmlfilter_finalize(JSContext *cx, JSObje
     if (!filter)
         return;
 
     cx->delete_(filter);
 }
 
 Class js_XMLFilterClass = {
     "XMLFilter",
-    JSCLASS_HAS_PRIVATE | JSCLASS_IS_ANONYMOUS,
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_IS_ANONYMOUS,
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
     JS_ResolveStub,
     JS_ConvertStub,
     xmlfilter_finalize,
--- a/js/src/methodjit/Compiler.cpp
+++ b/js/src/methodjit/Compiler.cpp
@@ -3919,17 +3919,17 @@ mjit::Compiler::emitStubCall(void *ptr, 
     JaegerSpew(JSpew_Insns, " ---- END STUB CALL ---- \n");
     return cl;
 }
 
 void
 mjit::Compiler::interruptCheckHelper()
 {
     Jump jump;
-    if (cx->runtime->gcZeal() >= js::gc::ZealVerifierThreshold) {
+    if (cx->runtime->gcZeal() == js::gc::ZealVerifierValue) {
         /* For barrier verification, always take the interrupt so we can verify. */
         jump = masm.jump();
     } else {
         void *interrupt = (void*) &cx->runtime->interrupt;
 #if defined(JS_CPU_X86) || defined(JS_CPU_ARM) || defined(JS_CPU_MIPS)
         jump = masm.branch32(Assembler::NotEqual, AbsoluteAddress(interrupt), Imm32(0));
 #else
         /* Handle processors that can't load from absolute addresses. */
@@ -6887,17 +6887,19 @@ mjit::Compiler::jsop_regexp()
     JSObject *obj = script->getRegExp(GET_UINT32_INDEX(PC));
     RegExpStatics *res = globalObj ? globalObj->getRegExpStatics() : NULL;
 
     if (!globalObj ||
         &obj->global() != globalObj ||
         !cx->typeInferenceEnabled() ||
         analysis->localsAliasStack() ||
         types::TypeSet::HasObjectFlags(cx, globalObj->getType(cx),
-                                       types::OBJECT_FLAG_REGEXP_FLAGS_SET)) {
+                                       types::OBJECT_FLAG_REGEXP_FLAGS_SET) ||
+        cx->runtime->gcIncrementalState == gc::MARK)
+    {
         prepareStubCall(Uses(0));
         masm.move(ImmPtr(obj), Registers::ArgReg1);
         INLINE_STUBCALL(stubs::RegExp, REJOIN_FALLTHROUGH);
         frame.pushSynced(JSVAL_TYPE_OBJECT);
         return true;
     }
 
     RegExpObject *reobj = &obj->asRegExp();
@@ -6941,20 +6943,21 @@ mjit::Compiler::jsop_regexp()
                     frame.push(ObjectValue(*obj));
                     return true;
                 }
             }
         }
     }
 
     /*
-     * Force creation of the RegExpShared in the script's RegExpObject
-     * so that we grab it in the getNewObject template copy. Note that
-     * JIT code is discarded on every GC, which permits us to burn in
-     * the pointer to the RegExpShared.
+     * Force creation of the RegExpShared in the script's RegExpObject so that
+     * we grab it in the getNewObject template copy. Note that JIT code is
+     * discarded on every GC, which permits us to burn in the pointer to the
+     * RegExpShared. We don't do this during an incremental
+     * GC, since we don't discard JIT code after every marking slice.
      */
     if (!reobj->getShared(cx))
         return false;
 
     RegisterID result = frame.allocReg();
     Jump emptyFreeList = masm.getNewObject(cx, result, obj);
 
     stubcc.linkExit(emptyFreeList, Uses(0));
--- a/js/src/methodjit/Compiler.h
+++ b/js/src/methodjit/Compiler.h
@@ -479,17 +479,17 @@ private:
     Label argsCheckFallthrough;
     Jump argsCheckJump;
 #endif
     bool debugMode_;
     bool inlining_;
     bool hasGlobalReallocation;
     bool oomInVector;       // True if we have OOM'd appending to a vector. 
     bool overflowICSpace;   // True if we added a constant pool in a reserved space.
-    uint32_t gcNumber;
+    uint64_t gcNumber;
     enum { NoApplyTricks, LazyArgsObj } applyTricks;
     PCLengthEntry *pcLengths;
 
     Compiler *thisFromCtor() { return this; }
 
     friend class CompilerAllocPolicy;
   public:
     Compiler(JSContext *cx, JSScript *outerScript, unsigned chunkIndex, bool isConstructing);
--- a/js/src/methodjit/MethodJIT.h
+++ b/js/src/methodjit/MethodJIT.h
@@ -397,17 +397,17 @@ struct RecompilationMonitor
      * If either inline frame expansion or recompilation occurs, then ICs and
      * stubs should not depend on the frame or JITs being intact. The two are
      * separated for logging.
      */
     unsigned recompilations;
     unsigned frameExpansions;
 
     /* If a GC occurs it may discard jit code on the stack. */
-    unsigned gcNumber;
+    uint64_t gcNumber;
 
     RecompilationMonitor(JSContext *cx)
         : cx(cx),
           recompilations(cx->compartment->types.recompilations),
           frameExpansions(cx->compartment->types.frameExpansions),
           gcNumber(cx->runtime->gcNumber)
     {}
 
--- a/js/src/methodjit/PolyIC.cpp
+++ b/js/src/methodjit/PolyIC.cpp
@@ -97,17 +97,17 @@ class PICLinker : public LinkerHelper
 class PICStubCompiler : public BaseCompiler
 {
   protected:
     const char *type;
     VMFrame &f;
     JSScript *script;
     ic::PICInfo &pic;
     void *stub;
-    uint32_t gcNumber;
+    uint64_t gcNumber;
 
   public:
     bool canCallHook;
 
     PICStubCompiler(const char *type, VMFrame &f, JSScript *script, ic::PICInfo &pic, void *stub)
       : BaseCompiler(f.cx), type(type), f(f), script(script), pic(pic), stub(stub),
         gcNumber(f.cx->runtime->gcNumber), canCallHook(pic.canCallHook)
     { }
--- a/js/src/methodjit/StubCalls.cpp
+++ b/js/src/methodjit/StubCalls.cpp
@@ -873,17 +873,17 @@ stubs::DebuggerStatement(VMFrame &f, jsb
             break;
         }
     }
 }
 
 void JS_FASTCALL
 stubs::Interrupt(VMFrame &f, jsbytecode *pc)
 {
-    gc::VerifyBarriers(f.cx);
+    gc::MaybeVerifyBarriers(f.cx);
 
     if (!js_HandleExecutionInterrupt(f.cx))
         THROW();
 }
 
 void JS_FASTCALL
 stubs::RecompileForInline(VMFrame &f)
 {
--- a/js/src/shell/js.cpp
+++ b/js/src/shell/js.cpp
@@ -1281,16 +1281,17 @@ GC(JSContext *cx, uintN argc, jsval *vp)
 static const struct ParamPair {
     const char      *name;
     JSGCParamKey    param;
 } paramMap[] = {
     {"maxBytes",            JSGC_MAX_BYTES },
     {"maxMallocBytes",      JSGC_MAX_MALLOC_BYTES},
     {"gcBytes",             JSGC_BYTES},
     {"gcNumber",            JSGC_NUMBER},
+    {"sliceTimeBudget",     JSGC_SLICE_TIME_BUDGET}
 };
 
 static JSBool
 GCParameter(JSContext *cx, uintN argc, jsval *vp)
 {
     JSString *str;
     if (argc == 0) {
         str = JS_ValueToString(cx, JSVAL_VOID);
@@ -1422,16 +1423,45 @@ ScheduleGC(JSContext *cx, uintN argc, js
         return JS_FALSE;
     if (argc == 2)
         compartment = js_ValueToBoolean(vp[3]);
 
     JS_ScheduleGC(cx, count, compartment);
     *vp = JSVAL_VOID;
     return JS_TRUE;
 }
+
+static JSBool
+VerifyBarriers(JSContext *cx, uintN argc, jsval *vp)
+{
+    gc::VerifyBarriers(cx);
+    *vp = JSVAL_VOID;
+    return JS_TRUE;
+}
+
+static JSBool
+GCSlice(JSContext *cx, uintN argc, jsval *vp)
+{
+    uint32_t budget;
+
+    if (argc != 1) {
+        JS_ReportErrorNumber(cx, my_GetErrorMessage, NULL,
+                             (argc < 1)
+                             ? JSSMSG_NOT_ENOUGH_ARGS
+                             : JSSMSG_TOO_MANY_ARGS,
+                             "gcslice");
+        return JS_FALSE;
+    }
+    if (!JS_ValueToECMAUint32(cx, vp[2], &budget))
+        return JS_FALSE;
+
+    GCDebugSlice(cx, budget);
+    *vp = JSVAL_VOID;
+    return JS_TRUE;
+}
 #endif /* JS_GC_ZEAL */
 
 typedef struct JSCountHeapNode JSCountHeapNode;
 
 struct JSCountHeapNode {
     void                *thing;
     JSGCTraceKind       kind;
     JSCountHeapNode     *next;
@@ -1468,17 +1498,17 @@ CountHeapNotify(JSTracer *trc, void **th
     if (entry->key)
         return;
     entry->key = thing;
 
     node = countTracer->recycleList;
     if (node) {
         countTracer->recycleList = node->next;
     } else {
-        node = (JSCountHeapNode *) JS_malloc(trc->context, sizeof *node);
+        node = (JSCountHeapNode *) js_malloc(sizeof *node);
         if (!node) {
             countTracer->ok = JS_FALSE;
             return;
         }
     }
     node->thing = thing;
     node->kind = kind;
     node->next = countTracer->traceList;
@@ -1570,17 +1600,17 @@ CountHeap(JSContext *cx, uintN argc, jsv
             counter++;
         countTracer.traceList = node->next;
         node->next = countTracer.recycleList;
         countTracer.recycleList = node;
         JS_TraceChildren(&countTracer.base, node->thing, node->kind);
     }
     while ((node = countTracer.recycleList) != NULL) {
         countTracer.recycleList = node->next;
-        JS_free(cx, node);
+        js_free(node);
     }
     JS_DHashTableFinish(&countTracer.visited);
 
     return countTracer.ok && JS_NewNumberValue(cx, (jsdouble) counter, vp);
 }
 
 static jsrefcount finalizeCount = 0;
 
@@ -3996,16 +4026,18 @@ static JSFunctionSpec shell_functions[] 
     JS_FN("gc",             ::GC,           0,0),
     JS_FN("gcparam",        GCParameter,    2,0),
     JS_FN("countHeap",      CountHeap,      0,0),
     JS_FN("makeFinalizeObserver", MakeFinalizeObserver, 0,0),
     JS_FN("finalizeCount",  FinalizeCount,  0,0),
 #ifdef JS_GC_ZEAL
     JS_FN("gczeal",         GCZeal,         2,0),
     JS_FN("schedulegc",     ScheduleGC,     1,0),
+    JS_FN("verifybarriers", VerifyBarriers, 0,0),
+    JS_FN("gcslice",        GCSlice,        1,0),
 #endif
     JS_FN("internalConst",  InternalConst,  1,0),
     JS_FN("setDebug",       SetDebug,       1,0),
     JS_FN("setDebuggerHandler", SetDebuggerHandler, 1,0),
     JS_FN("setThrowHook",   SetThrowHook,   1,0),
     JS_FN("trap",           Trap,           3,0),
     JS_FN("untrap",         Untrap,         2,0),
     JS_FN("line2pc",        LineToPC,       0,0),
@@ -4109,16 +4141,18 @@ static const char *const shell_help_mess
 "finalizeCount()\n"
 "  return the current value of the finalization counter that is incremented\n"
 "  each time an object returned by the makeFinalizeObserver is finalized",
 #ifdef JS_GC_ZEAL
 "gczeal(level, [freq], [compartmentGC?])\n"
 "                         How zealous the garbage collector should be",
 "schedulegc(num, [compartmentGC?])\n"
 "                         Schedule a GC to happen after num allocations",
+"verifybarriers()         Start or end a run of the write barrier verifier",
+"gcslice(n)               Run an incremental GC slice that marks ~n objects",
 #endif
 "internalConst(name)\n"
 "  Query an internal constant for the engine. See InternalConst source for the\n"
 "  list of constant names",
 "setDebug(debug)          Set debug mode",
 "setDebuggerHandler(f)    Set handler for debugger keyword to f",
 "setThrowHook(f)          Set throw hook to f",
 "trap([fun, [pc,]] exp)   Trap bytecode execution",
@@ -5452,17 +5486,17 @@ main(int argc, char **argv, char **envp)
 
     if (!InitWatchdog(rt))
         return 1;
 
     cx = NewContext(rt);
     if (!cx)
         return 1;
 
-    JS_SetGCParameter(rt, JSGC_MODE, JSGC_MODE_COMPARTMENT);
+    JS_SetGCParameter(rt, JSGC_MODE, JSGC_MODE_INCREMENTAL);
     JS_SetGCParameterForThread(cx, JSGC_MAX_CODE_CACHE_BYTES, 16 * 1024 * 1024);
 
     /* Must be done before creating the global object */
     if (op.getBoolOption('D'))
         JS_ToggleOptions(cx, JSOPTION_PCCOUNT);
 
     result = Shell(cx, &op, envp);
 
--- a/js/src/vm/Debugger.cpp
+++ b/js/src/vm/Debugger.cpp
@@ -1058,34 +1058,43 @@ Debugger::markKeysInCompartment(JSTracer
      * enumerating WeakMap keys. However in this case we need access, so we
      * make a base-class reference. Range is public in HashMap.
      */
     typedef HashMap<HeapPtrObject, HeapPtrObject, DefaultHasher<HeapPtrObject>, RuntimeAllocPolicy>
         ObjectMap;
     const ObjectMap &objStorage = objects;
     for (ObjectMap::Range r = objStorage.all(); !r.empty(); r.popFront()) {
         const HeapPtrObject &key = r.front().key;
-        if (key->compartment() == comp && IsAboutToBeFinalized(key))
-            gc::MarkObject(tracer, key, "cross-compartment WeakMap key");
+        if (key->compartment() == comp && IsAboutToBeFinalized(key)) {
+            HeapPtrObject tmp(key);
+            gc::MarkObject(tracer, &tmp, "cross-compartment WeakMap key");
+            JS_ASSERT(tmp == key);
+        }
     }
 
     const ObjectMap &envStorage = environments;
     for (ObjectMap::Range r = envStorage.all(); !r.empty(); r.popFront()) {
         const HeapPtrObject &key = r.front().key;
-        if (key->compartment() == comp && IsAboutToBeFinalized(key))
-            js::gc::MarkObject(tracer, key, "cross-compartment WeakMap key");
+        if (key->compartment() == comp && IsAboutToBeFinalized(key)) {
+            HeapPtrObject tmp(key);
+            js::gc::MarkObject(tracer, &tmp, "cross-compartment WeakMap key");
+            JS_ASSERT(tmp == key);
+        }
     }
 
     typedef HashMap<HeapPtrScript, HeapPtrObject, DefaultHasher<HeapPtrScript>, RuntimeAllocPolicy>
         ScriptMap;
     const ScriptMap &scriptStorage = scripts;
     for (ScriptMap::Range r = scriptStorage.all(); !r.empty(); r.popFront()) {
         const HeapPtrScript &key = r.front().key;
-        if (key->compartment() == comp && IsAboutToBeFinalized(key))
-            gc::MarkScript(tracer, key, "cross-compartment WeakMap key");
+        if (key->compartment() == comp && IsAboutToBeFinalized(key)) {
+            HeapPtrScript tmp(key);
+            gc::MarkScript(tracer, &tmp, "cross-compartment WeakMap key");
+            JS_ASSERT(tmp == key);
+        }
     }
 }
 
 /*
  * Ordinarily, WeakMap keys and values are marked because at some point it was
  * discovered that the WeakMap was live; that is, some object containing the
  * WeakMap was marked during mark phase.
  *
@@ -1171,42 +1180,41 @@ Debugger::markAllIteratively(GCMarker *t
                 Debugger *dbg = *p;
 
                 /*
                  * dbg is a Debugger with at least one debuggee. Check three things:
                  *   - dbg is actually in a compartment being GC'd
                  *   - it isn't already marked
                  *   - it actually has hooks that might be called
                  */
-                const HeapPtrObject &dbgobj = dbg->toJSObject();
+                HeapPtrObject &dbgobj = dbg->toJSObjectRef();
                 if (comp && comp != dbgobj->compartment())
                     continue;
 
                 bool dbgMarked = !IsAboutToBeFinalized(dbgobj);
                 if (!dbgMarked && dbg->hasAnyLiveHooks()) {
                     /*
                      * obj could be reachable only via its live, enabled
                      * debugger hooks, which may yet be called.
                      */
-                    MarkObject(trc, dbgobj, "enabled Debugger");
+                    MarkObject(trc, &dbgobj, "enabled Debugger");
                     markedAny = true;
                     dbgMarked = true;
                 }
 
                 if (dbgMarked) {
                     /* Search for breakpoints to mark. */
                     for (Breakpoint *bp = dbg->firstBreakpoint(); bp; bp = bp->nextInDebugger()) {
                         if (!IsAboutToBeFinalized(bp->site->script)) {
                             /*
                              * The debugger and the script are both live.
                              * Therefore the breakpoint handler is live.
                              */
-                            const HeapPtrObject &handler = bp->getHandler();
-                            if (IsAboutToBeFinalized(handler)) {
-                                MarkObject(trc, bp->getHandler(), "breakpoint handler");
+                            if (IsAboutToBeFinalized(bp->getHandler())) {
+                                MarkObject(trc, &bp->getHandlerRef(), "breakpoint handler");
                                 markedAny = true;
                             }
                         }
                     }
                 }
             }
         }
     }
@@ -1219,30 +1227,30 @@ Debugger::traceObject(JSTracer *trc, JSO
     if (Debugger *dbg = Debugger::fromJSObject(obj))
         dbg->trace(trc);
 }
 
 void
 Debugger::trace(JSTracer *trc)
 {
     if (uncaughtExceptionHook)
-        MarkObject(trc, uncaughtExceptionHook, "hooks");
+        MarkObject(trc, &uncaughtExceptionHook, "hooks");
 
     /*
      * Mark Debugger.Frame objects. These are all reachable from JS, because the
      * corresponding StackFrames are still on the stack.
      *
      * (Once we support generator frames properly, we will need
      * weakly-referenced Debugger.Frame objects as well, for suspended generator
      * frames.)
      */
     for (FrameMap::Range r = frames.all(); !r.empty(); r.popFront()) {
-        const HeapPtrObject &frameobj = r.front().value;
+        HeapPtrObject &frameobj = r.front().value;
         JS_ASSERT(frameobj->getPrivate());
-        MarkObject(trc, frameobj, "live Debugger.Frame");
+        MarkObject(trc, &frameobj, "live Debugger.Frame");
     }
 
     /* Trace the weak map from JSScript instances to Debugger.Script objects. */
     scripts.trace(trc);
 
     /* Trace the referent -> Debugger.Object weak map. */
     objects.trace(trc);
 
@@ -1310,17 +1318,19 @@ Debugger::finalize(JSContext *cx, JSObje
         JS_ASSERT(cx->runtime->gcCurrentCompartment == dbg->object->compartment());
         for (GlobalObjectSet::Enum e(dbg->debuggees); !e.empty(); e.popFront())
             dbg->removeDebuggeeGlobal(cx, e.front(), NULL, &e);
     }
     cx->delete_(dbg);
 }
 
 Class Debugger::jsclass = {
-    "Debugger", JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUG_COUNT),
+    "Debugger",
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
+    JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUG_COUNT),
     JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
     JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, Debugger::finalize,
     NULL,                 /* reserved0   */
     NULL,                 /* checkAccess */
     NULL,                 /* call        */
     NULL,                 /* construct   */
     NULL,                 /* xdrObject   */
     NULL,                 /* hasInstance */
@@ -1841,17 +1851,19 @@ DebuggerScript_trace(JSTracer *trc, JSOb
         /* This comes from a private pointer, so no barrier needed. */
         if (JSScript *script = GetScriptReferent(obj))
             MarkScriptUnbarriered(trc, script, "Debugger.Script referent");
 
     }
 }
 
 Class DebuggerScript_class = {
-    "Script", JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGSCRIPT_COUNT),
+    "Script",
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
+    JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGSCRIPT_COUNT),
     JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
     JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, NULL,
     NULL,                 /* reserved0   */
     NULL,                 /* checkAccess */
     NULL,                 /* call        */
     NULL,                 /* construct   */
     NULL,                 /* xdrObject   */
     NULL,                 /* hasInstance */
@@ -2943,17 +2955,19 @@ DebuggerObject_trace(JSTracer *trc, JSOb
          * is okay.
          */
         if (JSObject *referent = (JSObject *) obj->getPrivate())
             MarkObjectUnbarriered(trc, referent, "Debugger.Object referent");
     }
 }
 
 Class DebuggerObject_class = {
-    "Object", JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGOBJECT_COUNT),
+    "Object",
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
+    JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGOBJECT_COUNT),
     JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
     JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, NULL,
     NULL,                 /* reserved0   */
     NULL,                 /* checkAccess */
     NULL,                 /* call        */
     NULL,                 /* construct   */
     NULL,                 /* xdrObject   */
     NULL,                 /* hasInstance */
@@ -3585,17 +3599,19 @@ DebuggerEnv_trace(JSTracer *trc, JSObjec
          * is okay.
          */
         if (Env *referent = (JSObject *) obj->getPrivate())
             MarkObjectUnbarriered(trc, referent, "Debugger.Environment referent");
     }
 }
 
 Class DebuggerEnv_class = {
-    "Environment", JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGENV_COUNT),
+    "Environment",
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
+    JSCLASS_HAS_RESERVED_SLOTS(JSSLOT_DEBUGENV_COUNT),
     JS_PropertyStub, JS_PropertyStub, JS_PropertyStub, JS_StrictPropertyStub,
     JS_EnumerateStub, JS_ResolveStub, JS_ConvertStub, NULL,
     NULL,                 /* reserved0   */
     NULL,                 /* checkAccess */
     NULL,                 /* call        */
     NULL,                 /* construct   */
     NULL,                 /* xdrObject   */
     NULL,                 /* hasInstance */
--- a/js/src/vm/Debugger.h
+++ b/js/src/vm/Debugger.h
@@ -231,16 +231,17 @@ class Debugger {
     inline Breakpoint *firstBreakpoint() const;
 
   public:
     Debugger(JSContext *cx, JSObject *dbg);
     ~Debugger();
 
     bool init(JSContext *cx);
     inline const js::HeapPtrObject &toJSObject() const;
+    inline js::HeapPtrObject &toJSObjectRef();
     static inline Debugger *fromJSObject(JSObject *obj);
     static Debugger *fromChildJSObject(JSObject *obj);
 
     /*********************************** Methods for interaction with the GC. */
 
     /*
      * A Debugger object is live if:
      *   * the Debugger JSObject is live (Debugger::trace handles this case); OR
@@ -426,16 +427,17 @@ class Breakpoint {
   public:
     static Breakpoint *fromDebuggerLinks(JSCList *links);
     static Breakpoint *fromSiteLinks(JSCList *links);
     Breakpoint(Debugger *debugger, BreakpointSite *site, JSObject *handler);
     void destroy(JSContext *cx);
     Breakpoint *nextInDebugger();
     Breakpoint *nextInSite();
     const HeapPtrObject &getHandler() const { return handler; }
+    HeapPtrObject &getHandlerRef() { return handler; }
 };
 
 Debugger *
 Debugger::fromLinks(JSCList *links)
 {
     unsigned char *p = reinterpret_cast<unsigned char *>(links);
     return reinterpret_cast<Debugger *>(p - offsetof(Debugger, link));
 }
@@ -450,16 +452,23 @@ Debugger::firstBreakpoint() const
 
 const js::HeapPtrObject &
 Debugger::toJSObject() const
 {
     JS_ASSERT(object);
     return object;
 }
 
+js::HeapPtrObject &
+Debugger::toJSObjectRef()
+{
+    JS_ASSERT(object);
+    return object;
+}
+
 Debugger *
 Debugger::fromJSObject(JSObject *obj)
 {
     JS_ASSERT(js::GetObjectClass(obj) == &jsclass);
     return (Debugger *) obj->getPrivate();
 }
 
 bool
--- a/js/src/vm/RegExpObject-inl.h
+++ b/js/src/vm/RegExpObject-inl.h
@@ -76,16 +76,24 @@ inline RegExpShared *
 RegExpObject::getShared(JSContext *cx)
 {
     if (RegExpShared *shared = maybeShared())
         return shared;
     return createShared(cx);
 }
 
 inline void
+RegExpObject::setShared(JSContext *cx, RegExpShared *shared)
+{
+    if (shared)
+        shared->prepareForUse(cx);
+    JSObject::setPrivate(shared);
+}
+
+inline void
 RegExpObject::setLastIndex(const Value &v)
 {
     setSlot(LAST_INDEX_SLOT, v);
 }
 
 inline void
 RegExpObject::setLastIndex(double d)
 {
@@ -143,11 +151,17 @@ inline RegExpShared *
 RegExpToShared(JSContext *cx, JSObject &obj)
 {
     JS_ASSERT(ObjectClassIs(obj, ESClass_RegExp, cx));
     if (obj.isRegExp())
         return obj.asRegExp().getShared(cx);
     return Proxy::regexp_toShared(cx, &obj);
 }
 
+inline void
+RegExpShared::prepareForUse(JSContext *cx)
+{
+    gcNumberWhenUsed = cx->runtime->gcNumber;
+}
+
 } /* namespace js */
 
 #endif
--- a/js/src/vm/RegExpObject.cpp
+++ b/js/src/vm/RegExpObject.cpp
@@ -57,58 +57,58 @@ JS_STATIC_ASSERT(MultilineFlag == JSREG_
 JS_STATIC_ASSERT(StickyFlag == JSREG_STICKY);
 
 /* RegExpObjectBuilder */
 
 RegExpObjectBuilder::RegExpObjectBuilder(JSContext *cx, RegExpObject *reobj)
   : cx(cx), reobj_(reobj)
 {
     if (reobj_)
-        reobj_->setPrivate(NULL);
+        reobj_->setShared(cx, NULL);
 }
 
 bool
 RegExpObjectBuilder::getOrCreate()
 {
     if (reobj_)
         return true;
 
     JSObject *obj = NewBuiltinClassInstance(cx, &RegExpClass);
     if (!obj)
         return false;
-    obj->setPrivate(NULL);
+    obj->initPrivate(NULL);
 
     reobj_ = &obj->asRegExp();
     return true;
 }
 
 bool
 RegExpObjectBuilder::getOrCreateClone(RegExpObject *proto)
 {
     JS_ASSERT(!reobj_);
 
     JSObject *clone = NewObjectWithGivenProto(cx, &RegExpClass, proto, proto->getParent());
     if (!clone)
         return false;
-    clone->setPrivate(NULL);
+    clone->initPrivate(NULL);
 
     reobj_ = &clone->asRegExp();
     return true;
 }
 
 RegExpObject *
 RegExpObjectBuilder::build(JSAtom *source, RegExpShared &shared)
 {
     if (!getOrCreate())
         return NULL;
 
     if (!reobj_->init(cx, source, shared.getFlags()))
         return NULL;
 
-    reobj_->setPrivate(&shared);
+    reobj_->setShared(cx, &shared);
     return reobj_;
 }
 
 RegExpObject *
 RegExpObjectBuilder::build(JSAtom *source, RegExpFlag flags)
 {
     if (!getOrCreate())
         return NULL;
@@ -325,23 +325,28 @@ RegExpCode::execute(JSContext *cx, const
     return RegExpRunStatus_Success;
 }
 
 /* RegExpObject */
 
 static void
 regexp_trace(JSTracer *trc, JSObject *obj)
 {
-    if (trc->runtime->gcRunning)
+     /*
+      * We have to check both conditions, since:
+      *   1. During TraceRuntime, gcRunning is set
+      *   2. When a write barrier executes, IS_GC_MARKING_TRACER is true.
+      */
+    if (trc->runtime->gcRunning && IS_GC_MARKING_TRACER(trc))
         obj->setPrivate(NULL);
 }
 
 Class js::RegExpClass = {
     js_RegExp_str,
-    JSCLASS_HAS_PRIVATE |
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
     JSCLASS_HAS_RESERVED_SLOTS(RegExpObject::RESERVED_SLOTS) |
     JSCLASS_HAS_CACHED_PROTO(JSProto_RegExp),
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,        /* enumerate */
     JS_ResolveStub,
@@ -355,18 +360,18 @@ Class js::RegExpClass = {
     js_XDRRegExpObject,
 #else
     NULL
 #endif
     NULL,                    /* hasInstance */
     regexp_trace
 };
 
-RegExpShared::RegExpShared(RegExpFlag flags)
-  : parenCount(0), flags(flags), activeUseCount(0)
+RegExpShared::RegExpShared(JSRuntime *rt, RegExpFlag flags)
+  : parenCount(0), flags(flags), activeUseCount(0), gcNumberWhenUsed(rt->gcNumber)
 {}
 
 RegExpObject *
 RegExpObject::create(JSContext *cx, RegExpStatics *res, const jschar *chars, size_t length,
                      RegExpFlag flags, TokenStream *tokenStream)
 {
     RegExpFlag staticsFlags = res->getFlags();
     return createNoStatics(cx, chars, length, RegExpFlag(flags | staticsFlags), tokenStream);
@@ -397,17 +402,17 @@ RegExpObject::createNoStatics(JSContext 
 RegExpShared *
 RegExpObject::createShared(JSContext *cx)
 {
     JS_ASSERT(!maybeShared());
     RegExpShared *shared = cx->compartment->regExps.get(cx, getSource(), getFlags());
     if (!shared)
         return NULL;
 
-    setPrivate(shared);
+    setShared(cx, shared);
     return shared;
 }
 
 Shape *
 RegExpObject::assignInitialShape(JSContext *cx)
 {
     JS_ASSERT(isRegExp());
     JS_ASSERT(nativeEmpty());
@@ -611,38 +616,39 @@ RegExpCompartment::init(JSContext *cx)
         js_ReportOutOfMemory(cx);
         return false;
     }
 
     return true;
 }
 
 void
-RegExpCompartment::purge()
+RegExpCompartment::sweep(JSRuntime *rt)
 {
     for (Map::Enum e(map_); !e.empty(); e.popFront()) {
+        /* See the comment on RegExpShared lifetime in RegExpObject.h. */
         RegExpShared *shared = e.front().value;
-        if (shared->activeUseCount == 0) {
+        if (shared->activeUseCount == 0 && shared->gcNumberWhenUsed < rt->gcStartNumber) {
             Foreground::delete_(shared);
             e.removeFront();
         }
     }
 }
 
 inline RegExpShared *
 RegExpCompartment::get(JSContext *cx, JSAtom *keyAtom, JSAtom *source, RegExpFlag flags, Type type)
 {
-    DebugOnly<size_t> gcNumberBefore = cx->runtime->gcNumber;
+    DebugOnly<uint64_t> gcNumberBefore = cx->runtime->gcNumber;
 
     Key key(keyAtom, flags, type);
     Map::AddPtr p = map_.lookupForAdd(key);
     if (p)
         return p->value;
 
-    RegExpShared *shared = cx->runtime->new_<RegExpShared>(flags);
+    RegExpShared *shared = cx->runtime->new_<RegExpShared>(cx->runtime, flags);
     if (!shared || !shared->compile(cx, source))
         goto error;
 
     /*
      * The compilation path only mallocs so cannot GC. Thus, it is safe to add
      * the regexp directly.
      */
     JS_ASSERT(cx->runtime->gcNumber == gcNumberBefore);
--- a/js/src/vm/RegExpObject.h
+++ b/js/src/vm/RegExpObject.h
@@ -164,16 +164,17 @@ class RegExpObject : public JSObject
     bool ignoreCase() const { return getSlot(IGNORE_CASE_FLAG_SLOT).toBoolean(); }
     bool global() const     { return getSlot(GLOBAL_FLAG_SLOT).toBoolean(); }
     bool multiline() const  { return getSlot(MULTILINE_FLAG_SLOT).toBoolean(); }
     bool sticky() const     { return getSlot(STICKY_FLAG_SLOT).toBoolean(); }
 
     inline RegExpShared &shared() const;
     inline RegExpShared *maybeShared();
     inline RegExpShared *getShared(JSContext *cx);
+    inline void setShared(JSContext *cx, RegExpShared *shared);
 
   private:
     friend class RegExpObjectBuilder;
 
     /*
      * Compute the initial shape to associate with fresh RegExp objects,
      * encoding their initial properties. Return the shape after
      * changing this regular expression object's last property to it.
@@ -185,16 +186,19 @@ class RegExpObject : public JSObject
     /*
      * Precondition: the syntax for |source| has already been validated.
      * Side effect: sets the private field.
      */
     RegExpShared *createShared(JSContext *cx);
 
     RegExpObject() MOZ_DELETE;
     RegExpObject &operator=(const RegExpObject &reo) MOZ_DELETE;
+
+    /* Call setShared in preference to setPrivate. */
+    void setPrivate(void *priv) MOZ_DELETE;
 };
 
 class RegExpObjectBuilder
 {
     JSContext       *cx;
     RegExpObject    *reobj_;
 
     bool getOrCreate();
@@ -288,29 +292,49 @@ class RegExpCode
 
     RegExpRunStatus
     execute(JSContext *cx, const jschar *chars, size_t length, size_t start,
             int *output, size_t outputCount);
 };
 
 }  /* namespace detail */
 
-/* The compiled representation of a regexp. */
+/*
+ * A RegExpShared is the compiled representation of a regexp. A RegExpShared is
+ * pointed to by potentially multiple RegExpObjects. Additionally, C++ code may
+ * have pointers to RegExpShareds on the stack. The RegExpShareds are tracked in
+ * a RegExpCompartment hashtable, and most are destroyed on every GC.
+ *
+ * During a GC, the trace hook for RegExpObject clears any pointers to
+ * RegExpShareds so that there will be no dangling pointers when they are
+ * deleted. However, some RegExpShareds are not deleted:
+ *
+ *   1. Any RegExpShared with pointers from the C++ stack is not deleted.
+ *   2. Any RegExpShared that was installed in a RegExpObject during an
+ *      incremental GC is not deleted. This is because the RegExpObject may have
+ *      been traced through before the new RegExpShared was installed, in which
+ *      case deleting the RegExpShared would turn the RegExpObject's reference
+ *      into a dangling pointer
+ *
+ * The activeUseCount and gcNumberWhenUsed fields are used to track these two
+ * conditions.
+ */
 class RegExpShared
 {
     friend class RegExpCompartment;
 
     detail::RegExpCode code;
     uintN              parenCount;
     RegExpFlag         flags;
-    size_t             activeUseCount;
+    size_t             activeUseCount;   /* See comment above. */
+    uint64_t           gcNumberWhenUsed; /* See comment above. */
 
     bool compile(JSContext *cx, JSAtom *source);
 
-    RegExpShared(RegExpFlag flags);
+    RegExpShared(JSRuntime *rt, RegExpFlag flags);
     JS_DECLARE_ALLOCATION_FRIENDS_FOR_PRIVATE_CONSTRUCTOR;
 
   public:
     /*
      * Extend the lifetime of a given RegExpShared to at least the lifetime of
      * the Guard object. See Regular Expression comment at the top.
      */
     class Guard {
@@ -333,16 +357,19 @@ class RegExpShared
                 re_->activeUseCount--;
             }
         }
         bool initialized() const { return !!re_; }
         RegExpShared *operator->() { JS_ASSERT(initialized()); return re_; }
         RegExpShared &operator*() { JS_ASSERT(initialized()); return *re_; }
     };
 
+    /* Called when a RegExpShared is installed into a RegExpObject. */
+    inline void prepareForUse(JSContext *cx);
+
     /* Primary interface: run this regular expression on the given string. */
 
     RegExpRunStatus
     execute(JSContext *cx, const jschar *chars, size_t length, size_t *lastIndex,
             MatchPairs **output);
 
     /* Accessors */
 
@@ -383,17 +410,17 @@ class RegExpCompartment
 
     RegExpShared *get(JSContext *cx, JSAtom *key, JSAtom *source, RegExpFlag flags, Type type);
 
   public:
     RegExpCompartment(JSRuntime *rt);
     ~RegExpCompartment();
 
     bool init(JSContext *cx);
-    void purge();
+    void sweep(JSRuntime *rt);
 
     /* Return a regexp corresponding to the given (source, flags) pair. */
     RegExpShared *get(JSContext *cx, JSAtom *source, RegExpFlag flags);
 
     /* Like 'get', but compile 'maybeOpt' (if non-null). */
     RegExpShared *get(JSContext *cx, JSAtom *source, JSString *maybeOpt);
 
     /*
--- a/js/src/vm/RegExpStatics.cpp
+++ b/js/src/vm/RegExpStatics.cpp
@@ -66,17 +66,17 @@ resc_trace(JSTracer *trc, JSObject *obj)
     void *pdata = obj->getPrivate();
     JS_ASSERT(pdata);
     RegExpStatics *res = static_cast<RegExpStatics *>(pdata);
     res->mark(trc);
 }
 
 Class js::RegExpStaticsClass = {
     "RegExpStatics",
-    JSCLASS_HAS_PRIVATE,
+    JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS,
     JS_PropertyStub,         /* addProperty */
     JS_PropertyStub,         /* delProperty */
     JS_PropertyStub,         /* getProperty */
     JS_StrictPropertyStub,   /* setProperty */
     JS_EnumerateStub,
     JS_ResolveStub,
     JS_ConvertStub,
     resc_finalize,
--- a/js/src/vm/RegExpStatics.h
+++ b/js/src/vm/RegExpStatics.h
@@ -103,17 +103,17 @@ class RegExpStatics
                 continue;
             int start = get(i, 0);
             int limit = get(i, 1);
             JS_ASSERT(mpiLen >= size_t(limit) && limit >= start && start >= 0);
         }
 #endif
     }
 
-    /* 
+    /*
      * Since the first pair indicates the whole match, the paren pair
      * numbers have to be in the range [1, pairCount).
      */
     void checkParenNum(size_t pairNum) const {
         JS_ASSERT(1 <= pairNum);
         JS_ASSERT(pairNum < pairCount());
     }
 
@@ -200,21 +200,21 @@ class RegExpStatics
 
     /* Returns whether results for a non-empty match are present. */
     bool matched() const {
         JS_ASSERT(pairCount() > 0);
         JS_ASSERT_IF(get(0, 1) == -1, get(1, 1) == -1);
         return get(0, 1) - get(0, 0) > 0;
     }
 
-    void mark(JSTracer *trc) const {
+    void mark(JSTracer *trc) {
         if (pendingInput)
-            MarkString(trc, pendingInput, "res->pendingInput");
+            MarkString(trc, &pendingInput, "res->pendingInput");
         if (matchPairsInput)
-            MarkString(trc, matchPairsInput, "res->matchPairsInput");
+            MarkString(trc, &matchPairsInput, "res->matchPairsInput");
     }
 
     bool pairIsPresent(size_t pairNum) const {
         return get(pairNum, 0) >= 0;
     }
 
     /* Value creators. */
 
--- a/js/src/vm/Stack.cpp
+++ b/js/src/vm/Stack.cpp
@@ -527,16 +527,25 @@ StackSpace::mark(JSTracer *trc)
             pc = fp->prevpc(&site);
             JS_ASSERT_IF(fp->prev(), !site);
         }
         gc::MarkValueRootRange(trc, seg->slotsBegin(), slotsEnd, "vm_stack");
         nextSegEnd = (Value *)seg;
     }
 }
 
+void
+StackSpace::markActiveCompartments()
+{
+    for (StackSegment *seg = seg_; seg; seg = seg->prevInMemory()) {
+        for (StackFrame *fp = seg->maybefp(); (Value *)fp > (Value *)seg; fp = fp->prev())
+            MarkCompartmentActive(fp);
+    }
+}
+
 JS_FRIEND_API(bool)
 StackSpace::ensureSpaceSlow(JSContext *cx, MaybeReportError report, Value *from, ptrdiff_t nvals,
                             JSCompartment *dest) const
 {
     assertInvariants();
 
     /* See CX_COMPARTMENT comment. */
     if (dest == (JSCompartment *)CX_COMPARTMENT)
--- a/js/src/vm/Stack.h
+++ b/js/src/vm/Stack.h
@@ -1550,16 +1550,19 @@ class StackSpace
      */
     inline Value *getStackLimit(JSContext *cx, MaybeReportError report);
     bool tryBumpLimit(JSContext *cx, Value *from, uintN nvals, Value **limit);
 
     /* Called during GC: mark segments, frames, and slots under firstUnused. */
     void mark(JSTracer *trc);
     void markFrameSlots(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc);
 
+    /* Called during GC: sets active flag on compartments with active frames. */
+    void markActiveCompartments();
+
     /* We only report the committed size;  uncommitted size is uninteresting. */
     JS_FRIEND_API(size_t) sizeOfCommitted();
 };
 
 /*****************************************************************************/
 
 class ContextStack
 {
--- a/js/xpconnect/idl/nsIXPConnect.idl
+++ b/js/xpconnect/idl/nsIXPConnect.idl
@@ -393,17 +393,17 @@ interface nsIXPCFunctionThisTranslator :
 
 enum nsGCType {
     nsGCNormal,
     nsGCShrinking,
     nsGCIncremental
 };
 %}
 
-[uuid(686bb1d0-4711-11e1-b86c-0800200c9a66)]
+[uuid(e92bf5e0-494c-11e1-b86c-0800200c9a66)]
 interface nsIXPConnect : nsISupports
 {
 %{ C++
   NS_DEFINE_STATIC_CID_ACCESSOR(NS_XPCONNECT_CID)
 %}
 
     /**
      * Initializes classes on a global object that has already been created.
@@ -730,16 +730,22 @@ interface nsIXPConnect : nsISupports
     /**
      * Trigger a JS garbage collection.
      * Use a js::gcreason::Reason from jsfriendapi.h for the kind.
      * Use the nsGCType enum for the kind.
      */
     void GarbageCollect(in PRUint32 reason, in PRUint32 kind);
 
     /**
+     * Signals a good place to do an incremental GC slice, because the
+     * browser is drawing a frame.
+     */
+    void NotifyDidPaint();
+
+    /**
      * Define quick stubs on the given object, @a proto.
      *
      * @param cx
      *     A context.  Requires request.
      * @param proto
      *     The (newly created) prototype object for a DOM class.  The JS half
      *     of an XPCWrappedNativeProto.
      * @param flags
--- a/js/xpconnect/src/XPCInlines.h
+++ b/js/xpconnect/src/XPCInlines.h
@@ -599,17 +599,18 @@ inline
 void XPCWrappedNativeTearOff::SetJSObject(JSObject*  JSObj)
 {
         mJSObject = JSObj;
 }
 
 inline
 XPCWrappedNativeTearOff::~XPCWrappedNativeTearOff()
 {
-    NS_ASSERTION(!(GetInterface()||GetNative()||GetJSObjectPreserveColor()), "tearoff not empty in dtor");
+    NS_ASSERTION(!(GetInterface()||GetNative()||GetJSObjectPreserveColor()),
+                 "tearoff not empty in dtor");
 }
 
 /***************************************************************************/
 
 inline JSBool
 XPCWrappedNative::HasInterfaceNoQI(const nsIID& iid)
 {
     return nsnull != GetSet()->FindInterfaceWithIID(iid);
--- a/js/xpconnect/src/XPCJSRuntime.cpp
+++ b/js/xpconnect/src/XPCJSRuntime.cpp
@@ -906,16 +906,18 @@ JSBool XPCJSRuntime::GCCallback(JSContex
 #ifdef XPC_TRACK_DEFERRED_RELEASES
             printf("XPC - Begin deferred Release of %d nsISupports pointers\n",
                    self->mNativesToReleaseArray.Length());
 #endif
             DoDeferredRelease(self->mNativesToReleaseArray);
 #ifdef XPC_TRACK_DEFERRED_RELEASES
             printf("XPC - End deferred Releases\n");
 #endif
+
+            self->GetXPConnect()->ClearGCBeforeCC();
             break;
         }
         default:
             break;
     }
 
     nsTArray<JSGCCallback> callbacks(self->extraGCCallbacks);
     for (PRUint32 i = 0; i < callbacks.Length(); ++i) {
@@ -1885,16 +1887,28 @@ AccumulateTelemetryCallback(int id, uint
         Telemetry::Accumulate(Telemetry::GC_MS, sample);
         break;
       case JS_TELEMETRY_GC_MARK_MS:
         Telemetry::Accumulate(Telemetry::GC_MARK_MS, sample);
         break;
       case JS_TELEMETRY_GC_SWEEP_MS:
         Telemetry::Accumulate(Telemetry::GC_SWEEP_MS, sample);
         break;
+      case JS_TELEMETRY_GC_SLICE_MS:
+        Telemetry::Accumulate(Telemetry::GC_SLICE_MS, sample);
+        break;
+      case JS_TELEMETRY_GC_MMU_50:
+        Telemetry::Accumulate(Telemetry::GC_MMU_50, sample);
+        break;
+      case JS_TELEMETRY_GC_RESET:
+        Telemetry::Accumulate(Telemetry::GC_RESET, sample);
+        break;
+      case JS_TELEMETRY_GC_INCREMENTAL_DISABLED:
+        Telemetry::Accumulate(Telemetry::GC_INCREMENTAL_DISABLED, sample);
+        break;
     }
 }
 
 bool XPCJSRuntime::gNewDOMBindingsEnabled;
 
 bool PreserveWrapper(JSContext *cx, JSObject *obj)
 {
     JS_ASSERT(IS_WRAPPER_CLASS(js::GetObjectClass(obj)));
--- a/js/xpconnect/src/nsXPConnect.cpp
+++ b/js/xpconnect/src/nsXPConnect.cpp
@@ -401,34 +401,34 @@ nsXPConnect::Collect(PRUint32 reason, PR
     // will already be marked by the JS GC and will thus be colored black
     // themselves. Any C++ objects they hold will have a missing (untraversed)
     // edge from the JS object to the C++ object and so it will be marked black
     // too. This decreases the number of objects that the cycle collector has to
     // deal with.
     // To improve debugging, if DEBUG_CC is defined all JS objects are
     // traversed.
 
-    mNeedGCBeforeCC = false;
-
     XPCCallContext ccx(NATIVE_CALLER);
     if (!ccx.IsValid())
         return;
 
     JSContext *cx = ccx.GetJSContext();
 
     // We want to scan the current thread for GC roots only if it was in a
     // request prior to the Collect call to avoid false positives during the
     // cycle collection. So to compensate for JS_BeginRequest in
     // XPCCallContext::Init we disable the conservative scanner if that call
     // has started the request on this thread.
     js::AutoSkipConservativeScan ascs(cx);
     MOZ_ASSERT(reason < js::gcreason::NUM_REASONS);
     js::gcreason::Reason gcreason = (js::gcreason::Reason)reason;
     if (kind == nsGCShrinking) {
         js::ShrinkingGC(cx, gcreason);
+    } else if (kind == nsGCIncremental) {
+        js::IncrementalGC(cx, gcreason);
     } else {
         MOZ_ASSERT(kind == nsGCNormal);
         js::GCForReason(cx, gcreason);
     }
 }
 
 NS_IMETHODIMP
 nsXPConnect::GarbageCollect(PRUint32 reason, PRUint32 kind)
@@ -2820,16 +2820,33 @@ nsXPConnect::GetTelemetryValue(JSContext
     v = DOUBLE_TO_JSVAL(i);
     if (!JS_DefineProperty(cx, obj, "customIter", v, NULL, NULL, attrs))
         return NS_ERROR_OUT_OF_MEMORY;
 
     *rval = OBJECT_TO_JSVAL(obj);
     return NS_OK;
 }
 
+NS_IMETHODIMP
+nsXPConnect::NotifyDidPaint()
+{
+    JSRuntime *rt = mRuntime->GetJSRuntime();
+    if (!js::WantGCSlice(rt))
+        return NS_OK;
+
+    XPCCallContext ccx(NATIVE_CALLER);
+    if (!ccx.IsValid())
+        return UnexpectedFailure(NS_ERROR_FAILURE);
+
+    JSContext *cx = ccx.GetJSContext();
+
+    js::NotifyDidPaint(cx);
+    return NS_OK;
+}
+
 /* These are here to be callable from a debugger */
 JS_BEGIN_EXTERN_C
 JS_EXPORT_API(void) DumpJSStack()
 {
     nsresult rv;
     nsCOMPtr<nsIXPConnect> xpc(do_GetService(nsIXPConnect::GetCID(), &rv));
     if (NS_SUCCEEDED(rv) && xpc)
         xpc->DebugDumpJSStack(true, true, false);
--- a/js/xpconnect/src/xpcprivate.h
+++ b/js/xpconnect/src/xpcprivate.h
@@ -313,17 +313,18 @@ typedef nsDataHashtable<xpc::PtrAndPrinc
         result = (char*) nsMemory::Clone(src,                                 \
                                          sizeof(char)*(strlen(src)+1));       \
     else                                                                      \
         result = nsnull;                                                      \
     *dest = result;                                                           \
     return (result || !src) ? NS_OK : NS_ERROR_OUT_OF_MEMORY
 
 
-#define WRAPPER_SLOTS (JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(1))
+#define WRAPPER_SLOTS (JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | \
+                       JSCLASS_HAS_RESERVED_SLOTS(1))
 
 #define INVALID_OBJECT ((JSObject *)1)
 
 /***************************************************************************/
 // Auto locking support class...
 
 // We PROMISE to never screw this up.
 #ifdef _MSC_VER
@@ -515,16 +516,17 @@ public:
     // Called by module code on dll shutdown.
     static void ReleaseXPConnectSingleton();
 
     virtual ~nsXPConnect();
 
     JSBool IsShuttingDown() const {return mShuttingDown;}
 
     void EnsureGCBeforeCC() { mNeedGCBeforeCC = true; }
+    void ClearGCBeforeCC() { mNeedGCBeforeCC = false; }
 
     nsresult GetInfoForIID(const nsIID * aIID, nsIInterfaceInfo** info);
     nsresult GetInfoForName(const char * name, nsIInterfaceInfo** info);
 
     // nsCycleCollectionParticipant
     NS_IMETHOD Root(void *p);
     NS_IMETHOD Unlink(void *p);
     NS_IMETHOD Unroot(void *p);
--- a/js/xpconnect/src/xpcpublic.h
+++ b/js/xpconnect/src/xpcpublic.h
@@ -70,17 +70,18 @@ xpc_CreateGlobalObject(JSContext *cx, JS
 
 nsresult
 xpc_CreateMTGlobalObject(JSContext *cx, JSClass *clasp,
                          nsISupports *ptr, JSObject **global,
                          JSCompartment **compartment);
 
 #define XPCONNECT_GLOBAL_FLAGS                                                \
     JSCLASS_XPCONNECT_GLOBAL | JSCLASS_HAS_PRIVATE |                          \
-    JSCLASS_PRIVATE_IS_NSISUPPORTS | JSCLASS_GLOBAL_FLAGS_WITH_SLOTS(1)
+    JSCLASS_PRIVATE_IS_NSISUPPORTS | JSCLASS_IMPLEMENTS_BARRIERS |            \
+    JSCLASS_GLOBAL_FLAGS_WITH_SLOTS(1)
 
 void
 TraceXPCGlobal(JSTracer *trc, JSObject *obj);
 
 // XXX where should this live?
 NS_EXPORT_(void)
 xpc_LocalizeContext(JSContext *cx);
 
@@ -177,18 +178,22 @@ xpc_GCThingIsGrayCCThing(void *thing);
 extern void
 xpc_UnmarkGrayObjectRecursive(JSObject* obj);
 
 // Remove the gray color from the given JSObject and any other objects that can
 // be reached through it.
 inline void
 xpc_UnmarkGrayObject(JSObject *obj)
 {
-    if (obj && xpc_IsGrayGCThing(obj))
-        xpc_UnmarkGrayObjectRecursive(obj);
+    if (obj) {
+        if (xpc_IsGrayGCThing(obj))
+            xpc_UnmarkGrayObjectRecursive(obj);
+        else if (js::IsIncrementalBarrierNeededOnObject(obj))
+            js::IncrementalReferenceBarrier(obj);
+    }
 }
 
 // If aVariant is an XPCVariant, this marks the object to be in aGeneration.
 // This also unmarks the gray JSObject.
 extern void
 xpc_MarkInCCGeneration(nsISupports* aVariant, PRUint32 aGeneration);
 
 // Unmarks aWrappedJS's JSObject.
--- a/layout/base/nsPresShell.cpp
+++ b/layout/base/nsPresShell.cpp
@@ -5423,16 +5423,34 @@ PresShell::ProcessSynthMouseMoveEvent(bo
     shell->DispatchSynthMouseMove(&event, !aFromScroll);
   }
 
   if (!aFromScroll) {
     mSynthMouseMoveEvent.Forget();
   }
 }
 
+class nsAutoNotifyDidPaint
+{
+public:
+  nsAutoNotifyDidPaint(bool aWillSendDidPaint)
+    : mWillSendDidPaint(aWillSendDidPaint)
+  {
+  }
+  ~nsAutoNotifyDidPaint()
+  {
+    if (!mWillSendDidPaint && nsContentUtils::XPConnect()) {
+      nsContentUtils::XPConnect()->NotifyDidPaint();
+    }
+  }
+
+private:
+  bool mWillSendDidPaint;
+};
+
 void
 PresShell::Paint(nsIView*           aViewToPaint,
                  nsIWidget*         aWidgetToPaint,
                  const nsRegion&    aDirtyRegion,
                  const nsIntRegion& aIntDirtyRegion,
                  bool               aWillSendDidPaint)
 {
 #ifdef NS_FUNCTION_TIMER
@@ -5446,16 +5464,18 @@ PresShell::Paint(nsIView*           aVie
                            NSCoordToFloat(bounds__.YMost()));
 #endif
 
   SAMPLE_LABEL("Paint", "PresShell::Paint");
   NS_ASSERTION(!mIsDestroying, "painting a destroyed PresShell");
   NS_ASSERTION(aViewToPaint, "null view");
   NS_ASSERTION(aWidgetToPaint, "Can't paint without a widget");
 
+  nsAutoNotifyDidPaint notifyDidPaint(aWillSendDidPaint);
+
   nsPresContext* presContext = GetPresContext();
   AUTO_LAYOUT_PHASE_ENTRY_POINT(presContext, Paint);
 
   nsIFrame* frame = aViewToPaint->GetFrame();
 
   bool isRetainingManager;
   LayerManager* layerManager =
     aWidgetToPaint->GetLayerManager(&isRetainingManager);
@@ -5739,16 +5759,17 @@ PresShell::RecordMouseLocation(nsGUIEven
     printf("[ps=%p]got mouse exit for %p\n",
            this, aEvent->widget);
     printf("[ps=%p]clearing mouse location\n",
            this);
 #endif
   }
 }
 
+#ifdef MOZ_TOUCH
 static void
 EvictTouchPoint(nsCOMPtr<nsIDOMTouch>& aTouch)
 {
   nsIWidget *widget = nsnull;
   // is there an easier/better way to dig out the widget?
   nsCOMPtr<nsINode> node(do_QueryInterface(aTouch->GetTarget()));
   if (!node) {
     return;
@@ -5786,16 +5807,17 @@ EvictTouchPoint(nsCOMPtr<nsIDOMTouch>& a
 static PLDHashOperator
 AppendToTouchList(const PRUint32& aKey, nsCOMPtr<nsIDOMTouch>& aData, void *aTouchList)
 {
   nsTArray<nsCOMPtr<nsIDOMTouch> > *touches = static_cast<nsTArray<nsCOMPtr<nsIDOMTouch> > *>(aTouchList);
   aData->mChanged = false;
   touches->AppendElement(aData);
   return PL_DHASH_NEXT;
 }
+#endif // MOZ_TOUCH
 
 nsresult
 PresShell::HandleEvent(nsIFrame        *aFrame,
                        nsGUIEvent*     aEvent,
                        bool            aDontRetargetEvents,
                        nsEventStatus*  aEventStatus)
 {
   NS_ASSERTION(aFrame, "null frame");
@@ -7216,16 +7238,20 @@ PresShell::DidPaint()
   NS_ASSERTION(mPresContext->IsRoot(), "Should only call DidPaint on root presshells");
 
   nsRootPresContext* rootPresContext = mPresContext->GetRootPresContext();
   // This should only be called on root presshells, but maybe if a document
   // tree is torn down we might not be a root presshell...
   if (rootPresContext == mPresContext) {
     rootPresContext->UpdatePluginGeometry();
   }
+
+  if (nsContentUtils::XPConnect()) {
+    nsContentUtils::XPConnect()->NotifyDidPaint();
+  }
 }
 
 bool
 PresShell::IsVisible()
 {
   if (!mViewManager)
     return false;
 
--- a/layout/generic/nsBlockFrame.cpp
+++ b/layout/generic/nsBlockFrame.cpp
@@ -1934,20 +1934,21 @@ nsBlockFrame::ReflowDirtyLines(nsBlockRe
       aState.ReconstructMarginAbove(line);
     }
 
     bool reflowedPrevLine = !needToRecoverState;
     if (needToRecoverState) {
       needToRecoverState = false;
 
       // Update aState.mPrevChild as if we had reflowed all of the frames in
-      // this line.  This is expensive in some cases, since it requires
-      // walking |GetNextSibling|.
+      // this line.
       if (line->IsDirty())
-        aState.mPrevChild = line.prev()->LastChild();
+        NS_ASSERTION(line->mFirstChild->GetPrevSibling() ==
+                     line.prev()->LastChild(), "unexpected line frames");
+        aState.mPrevChild = line->mFirstChild->GetPrevSibling();
     }
 
     // Now repair the line and update |aState.mY| by calling
     // |ReflowLine| or |SlideLine|.
     // If we're going to reflow everything again, then no need to reflow
     // the dirty line ... unless the line has floats, in which case we'd
     // better reflow it now to refresh its float cache, which may contain
     // dangling frame pointers! Ugh! This reflow of the line may be
@@ -2127,19 +2128,21 @@ nsBlockFrame::ReflowDirtyLines(nsBlockRe
     aState.mY = aState.ClearFloats(aState.mY, inlineFloatBreakType);
   }
 
   if (needToRecoverState) {
     // Is this expensive?
     aState.ReconstructMarginAbove(line);
 
     // Update aState.mPrevChild as if we had reflowed all of the frames in
-    // the last line.  This is expensive in some cases, since it requires
-    // walking |GetNextSibling|.
-    aState.mPrevChild = line.prev()->LastChild();
+    // the last line.
+    NS_ASSERTION(line == line_end || line->mFirstChild->GetPrevSibling() ==
+                 line.prev()->LastChild(), "unexpected line frames");
+    aState.mPrevChild =
+      line == line_end ? mFrames.LastChild() : line->mFirstChild->GetPrevSibling();
   }
 
   // Should we really have to do this?
   if (repositionViews)
     ::PlaceFrameView(this);
 
   // We can skip trying to pull up the next line if our height is constrained
   // (so we can report being incomplete) and there is no next in flow or we
@@ -4391,17 +4394,22 @@ nsBlockFrame::PushLines(nsBlockReflowSta
       overflowLines = new nsLineList();
     }
     if (overflowLines) {
       // First, remove the frames we're pushing from mFrames
       nsIFrame* oldLastChild = mFrames.LastChild();
       if (firstLine) {
         mFrames.Clear();
       } else {
-        mFrames.RemoveFramesAfter(aLineBefore->LastChild());
+        nsIFrame* f = overBegin->mFirstChild;
+        nsIFrame* lineBeforeLastFrame =
+          f ? f->GetPrevSibling() : aLineBefore->LastChild();
+        NS_ASSERTION(!f || lineBeforeLastFrame == aLineBefore->LastChild(),
+                     "unexpected line frames");
+        mFrames.RemoveFramesAfter(lineBeforeLastFrame);
       }
       if (!overflowLines->empty()) {
         // XXXbz If we switch overflow lines to nsFrameList, we should
         // change this SetNextSibling call.
         oldLastChild->SetNextSibling(overflowLines->front()->mFirstChild);
       }
       overflowLines->splice(overflowLines->begin(), mLines, overBegin,
                             end_lines());
@@ -4708,17 +4716,19 @@ nsBlockFrame::AppendFrames(ChildListID  
     }
     else {
       NS_ERROR("unexpected child list");
       return NS_ERROR_INVALID_ARG;
     }
   }
 
   // Find the proper last-child for where the append should go
-  nsIFrame* lastKid = mLines.empty() ? nsnull : mLines.back()->LastChild();
+  nsIFrame* lastKid = mFrames.LastChild();
+  NS_ASSERTION((mLines.empty() ? nsnull : mLines.back()->LastChild()) ==
+               lastKid, "out-of-sync mLines / mFrames");
 
   // Add frames after the last child
 #ifdef NOISY_REFLOW_REASON
   ListTag(stdout);
   printf(": append ");
   nsFrame::ListTag(stdout, aFrameList);
   if (lastKid) {
     printf(" after ");
@@ -5389,18 +5399,26 @@ nsBlockFrame::DoRemoveFrame(nsIFrame* aD
 
     if (!(aFlags & FRAMES_ARE_EMPTY)) {
       line->MarkDirty();
       line->SetInvalidateTextRuns(true);
     }
 
     // If the frame being deleted is the last one on the line then
     // optimize away the line->Contains(next-in-flow) call below.
-    bool isLastFrameOnLine = (1 == line->GetChildCount() ||
-                                line->LastChild() == aDeletedFrame);
+    bool isLastFrameOnLine = 1 == line->GetChildCount();
+    if (!isLastFrameOnLine) {
+      line_iterator next = line.next();
+      nsIFrame* lastFrame = next != line_end ?
+        next->mFirstChild->GetPrevSibling() :
+        (searchingOverflowList ? line->LastChild() : mFrames.LastChild());
+      NS_ASSERTION(next == line_end || lastFrame == line->LastChild(),
+                   "unexpected line frames");
+      isLastFrameOnLine = lastFrame == aDeletedFrame;
+    }
 
     // Remove aDeletedFrame from the line
     nsIFrame* nextFrame = aDeletedFrame->GetNextSibling();
     if (line->mFirstChild == aDeletedFrame) {
       // We should be setting this to null if aDeletedFrame
       // is the only frame on the line. HOWEVER in that case
       // we will be removing the line anyway, see below.
       line->mFirstChild = nextFrame;
--- a/layout/generic/nsColumnSetFrame.cpp
+++ b/layout/generic/nsColumnSetFrame.cpp
@@ -361,17 +361,17 @@ nsColumnSetFrame::ChooseColumnStrategy(c
     colHeight = aReflowState.ComputedHeight();
   }
 
   nscoord colGap = GetColumnGap(this, colStyle);
   PRInt32 numColumns = colStyle->mColumnCount;
 
   bool isBalancing = colStyle->mColumnFill == NS_STYLE_COLUMN_FILL_BALANCE;
   if (isBalancing) {
-    const PRUint32 MAX_NESTED_COLUMN_BALANCING = 5;
+    const PRUint32 MAX_NESTED_COLUMN_BALANCING = 2;
     PRUint32 cnt = 1;
     for (const nsHTMLReflowState* rs = aReflowState.parentReflowState;
          rs && cnt < MAX_NESTED_COLUMN_BALANCING;
          rs = rs->parentReflowState) {
       if (rs->mFlags.mIsColumnBalancing) {
         ++cnt;
       }
     }
--- a/layout/reftests/svg/reftest.list
+++ b/layout/reftests/svg/reftest.list
@@ -237,16 +237,17 @@ random-if(winWidget) == text-gradient-02
 # Tests for bug 546813: sanity-check using HTML text, then test SVG behavior.
 fails-if(Android) != text-language-00.xhtml text-language-00-ref.xhtml
 fails-if(Android) random-if(gtk2Widget) != text-language-01.xhtml text-language-01-ref.xhtml # Fails on Linux tryserver due to lack of CJK fonts.
 == text-layout-01.svg text-layout-01-ref.svg
 == text-layout-02.svg text-layout-02-ref.svg
 == text-layout-03.svg text-layout-03-ref.svg
 == text-layout-04.svg text-layout-04-ref.svg
 == text-layout-05.svg text-layout-05-ref.svg
+== text-layout-06.svg text-layout-06-ref.svg
 == text-scale-01.svg text-scale-01-ref.svg
 == text-stroke-scaling-01.svg text-stroke-scaling-01-ref.svg
 == stroke-dasharray-and-pathLength-01.svg pass.svg
 == stroke-dasharray-and-text-01.svg stroke-dasharray-and-text-01-ref.svg 
 == stroke-linecap-square-w-zero-length-segs-01.svg pass.svg
 == stroke-linecap-square-w-zero-length-segs-02.svg pass.svg
 == textPath-01.svg textPath-01-ref.svg
 == textPath-02.svg pass.svg
new file mode 100644
--- /dev/null
+++ b/layout/reftests/svg/text-layout-06-ref.svg
@@ -0,0 +1,10 @@
+<!--
+     Any copyright is dedicated to the Public Domain.
+     http://creativecommons.org/publicdomain/zero/1.0/
+-->
+<svg xmlns="http://www.w3.org/2000/svg">
+  <title>Reference to check fill and stroke handling</title>
+
+  <text x="50" y="80" font-size="80" fill="blue" stroke="none">A B</text>
+  <text x="50" y="80" font-size="80" fill="none" stroke="yellow" stroke-width="2">A B</text>
+</svg>
new file mode 100644
--- /dev/null
+++ b/layout/reftests/svg/text-layout-06.svg
@@ -0,0 +1,9 @@
+<!--
+     Any copyright is dedicated to the Public Domain.
+     http://creativecommons.org/publicdomain/zero/1.0/
+-->
+<svg xmlns="http://www.w3.org/2000/svg">
+  <title>Testcase to check fill and stroke handling</title>
+
+  <text x="50" y="80" font-size="80" fill="blue" stroke="yellow" stroke-width="2">A B</text>
+</svg>
--- a/layout/svg/base/src/nsSVGGlyphFrame.cpp
+++ b/layout/svg/base/src/nsSVGGlyphFrame.cpp
@@ -369,17 +369,17 @@ nsSVGGlyphFrame::PaintSVG(nsSVGRenderSta
   // it so we don't leak them into the next object we draw
   gfx->Save();
   SetupGlobalTransform(gfx);
 
   CharacterIterator iter(this, true);
   iter.SetInitialMatrix(gfx);
 
   nsRefPtr<gfxPattern> strokePattern;
-  DrawMode drawMode = SetupCairoState(gfx, &strokePattern);
+  DrawMode drawMode = SetupCairoState(gfx, getter_AddRefs(strokePattern));
 
   if (drawMode) {
     DrawCharacters(&iter, gfx, drawMode, strokePattern);
   }
   
   gfx->Restore();
 
   return NS_OK;
@@ -879,49 +879,55 @@ nsSVGGlyphFrame::GetBaselineOffset(float
   default:
     NS_WARNING("We don't know about this type of dominant-baseline");
     return 0.0;
   }
   return baselineAppUnits * aMetricsScale;
 }
 
 DrawMode
-nsSVGGlyphFrame::SetupCairoState(gfxContext *context, nsRefPtr<gfxPattern> *strokePattern) {
+nsSVGGlyphFrame::SetupCairoState(gfxContext *aContext, gfxPattern **aStrokePattern)
+{
   DrawMode toDraw = DrawMode(0);
   const nsStyleSVG* style = GetStyleSVG();
 
   if (HasStroke()) {
-    gfxContextMatrixAutoSaveRestore matrixRestore(context);
-    context->IdentityMatrix();
+    gfxContextMatrixAutoSaveRestore matrixRestore(aContext);
+    aContext->IdentityMatrix();
 
     toDraw = DrawMode(toDraw | gfxFont::GLYPH_STROKE);
 
-    SetupCairoStrokeHitGeometry(context);
+    SetupCairoStrokeHitGeometry(aContext);
     float opacity = style->mStrokeOpacity;
     nsSVGPaintServerFrame *ps = GetPaintServer(&style->mStroke,
                                                nsSVGEffects::StrokeProperty());
 
+    nsRefPtr<gfxPattern> strokePattern;
+
     if (ps) {
       // Gradient or Pattern: can get pattern directly from frame
-      *strokePattern = ps->GetPaintServerPattern(this, opacity);
+      strokePattern = ps->GetPaintServerPattern(this, opacity);
+    }
 
-      NS_ASSERTION(*strokePattern, "No pattern returned from paint server");
-    } else {
+    if (!strokePattern) {
       nscolor color;
-      nsSVGUtils::GetFallbackOrPaintColor(context, GetStyleContext(),
+      nsSVGUtils::GetFallbackOrPaintColor(aContext, GetStyleContext(),
                                           &nsStyleSVG::mStroke, &opacity,
                                           &color);
-      *strokePattern = new gfxPattern(gfxRGBA(NS_GET_R(color) / 255.0,
-                                              NS_GET_G(color) / 255.0,
-                                              NS_GET_B(color) / 255.0,
-                                              NS_GET_A(color) / 255.0 * opacity));
+      strokePattern = new gfxPattern(gfxRGBA(NS_GET_R(color) / 255.0,
+                                             NS_GET_G(color) / 255.0,
+                                             NS_GET_B(color) / 255.0,
+                                             NS_GET_A(color) / 255.0 * opacity));
     }
+
+    *aStrokePattern = nsnull;
+    strokePattern.swap(*aStrokePattern);
   }
 
-  if (SetupCairoFill(context)) {
+  if (SetupCairoFill(aContext)) {
     toDraw = DrawMode(toDraw | gfxFont::GLYPH_FILL);
   }
 
   return toDraw;
 }
 
 //----------------------------------------------------------------------
 
--- a/layout/svg/base/src/nsSVGGlyphFrame.h
+++ b/layout/svg/base/src/nsSVGGlyphFrame.h
@@ -263,12 +263,13 @@ protected:
   gfxPoint mPosition;
   // The start index into the position and rotation data
   PRUint32 mStartIndex;
   bool mCompressWhitespace;
   bool mTrimLeadingWhitespace;
   bool mTrimTrailingWhitespace;
 
 private:
-  DrawMode SetupCairoState(gfxContext *context, nsRefPtr<gfxPattern> *strokePattern);
+  DrawMode SetupCairoState(gfxContext *aContext,
+                           gfxPattern **aStrokePattern);
 };
 
 #endif
new file mode 100644
--- /dev/null
+++ b/layout/svg/crashtests/725918-1.svg
@@ -0,0 +1,4 @@
+<svg xmlns="http://www.w3.org/2000/svg">
+  <text stroke="url(#p)">t</text>
+  <pattern id="p"/>
+</svg>
--- a/layout/svg/crashtests/crashtests.list
+++ b/layout/svg/crashtests/crashtests.list
@@ -118,8 +118,9 @@ load 669025-2.svg
 load 682411-1.svg
 load 692203-1.svg
 load 692203-2.svg
 load 693424-1.svg
 load 709920-1.svg
 load 709920-2.svg
 load 713413-1.svg
 load 722003-1.svg
+load 725918-1.svg
--- a/modules/libpref/src/init/all.js
+++ b/modules/libpref/src/init/all.js
@@ -651,16 +651,18 @@ pref("javascript.options.jit_hardening",
 pref("javascript.options.typeinference", true);
 // This preference limits the memory usage of javascript.
 // If you want to change these values for your device,
 // please find Bug 417052 comment 17 and Bug 456721
 // Comment 32 and Bug 613551.
 pref("javascript.options.mem.high_water_mark", 128);
 pref("javascript.options.mem.max", -1);
 pref("javascript.options.mem.gc_per_compartment", true);
+pref("javascript.options.mem.gc_incremental", true);
+pref("javascript.options.mem.gc_incremental_slice_ms", 10);
 pref("javascript.options.mem.log", false);
 pref("javascript.options.gc_on_memory_pressure", true);
 
 // advanced prefs
 pref("advanced.mailftp",                    false);
 pref("image.animation_mode",                "normal");
 
 // Same-origin policy for file URIs, "false" is traditional
--- a/toolkit/components/telemetry/TelemetryHistograms.h
+++ b/toolkit/components/telemetry/TelemetryHistograms.h
@@ -76,16 +76,20 @@ HISTOGRAM(FORGET_SKIPPABLE_MAX, 1, 10000
 /**
  * GC telemetry
  */
 HISTOGRAM(GC_REASON, 1, 20, 20, LINEAR, "Reason (enum value) for initiating a GC")
 HISTOGRAM_BOOLEAN(GC_IS_COMPARTMENTAL, "Is it a compartmental GC?")
 HISTOGRAM(GC_MS, 1, 10000, 50, EXPONENTIAL, "Time spent running JS GC (ms)")
 HISTOGRAM(GC_MARK_MS, 1, 10000, 50, EXPONENTIAL, "Time spent running JS GC mark phase (ms)")
 HISTOGRAM(GC_SWEEP_MS, 1, 10000, 50, EXPONENTIAL, "Time spent running JS GC sweep phase (ms)")
+HISTOGRAM(GC_SLICE_MS, 1, 10000, 50, EXPONENTIAL, "Time spent running a JS GC slice (ms)")
+HISTOGRAM(GC_MMU_50, 1, 100, 20, LINEAR, "Minimum percentage of time spent outside GC over any 50ms window")
+HISTOGRAM_BOOLEAN(GC_RESET, "Was an incremental GC canceled?")
+HISTOGRAM_BOOLEAN(GC_INCREMENTAL_DISABLED, "Is incremental GC permanently disabled?")
 
 HISTOGRAM(TELEMETRY_PING, 1, 3000, 10, EXPONENTIAL, "Time taken to submit telemetry info (ms)")
 HISTOGRAM_BOOLEAN(TELEMETRY_SUCCESS,  "Successful telemetry submission")
 HISTOGRAM(MEMORY_JS_COMPARTMENTS_SYSTEM, 1, 1000, 50, EXPONENTIAL, "Total JavaScript compartments used for add-ons and internals.")
 HISTOGRAM(MEMORY_JS_COMPARTMENTS_USER, 1, 1000, 50, EXPONENTIAL, "Total JavaScript compartments used for web pages")
 HISTOGRAM(MEMORY_JS_GC_HEAP, 1024, 512 * 1024, 50, EXPONENTIAL, "Memory used by the garbage-collected JavaScript heap (KB)")
 HISTOGRAM(MEMORY_RESIDENT, 32 * 1024, 1024 * 1024, 50, EXPONENTIAL, "Resident memory size (KB)")
 HISTOGRAM(MEMORY_STORAGE_SQLITE, 1024, 512 * 1024, 50, EXPONENTIAL, "Memory used by SQLite (KB)")
--- a/toolkit/content/aboutSupport.js
+++ b/toolkit/content/aboutSupport.js
@@ -111,16 +111,17 @@ window.onload = function () {
   } catch (e) {
   }
   document.getElementById("version-box").textContent = version;
 
   // Update the other sections.
   populatePreferencesSection();
   populateExtensionsSection();
   populateGraphicsSection();
+  populateJavaScriptSection();
 }
 
 function populateExtensionsSection() {
   AddonManager.getAddonsByTypes(["extension"], function(extensions) {
     extensions.sort(function(a,b) {
       if (a.isActive != b.isActive)
         return b.isActive ? 1 : -1;
       let lc = a.name.localeCompare(b.name);
@@ -377,16 +378,23 @@ function populateGraphicsSection() {
   appendChildren(graphics_tbody, [
     createParentElement("tr", [
       createHeader(bundle.GetStringFromName("acceleratedWindows")),
       createElement("td", msg),
     ])
   ]);
 }
 
+function populateJavaScriptSection() {
+  let enabled = window.QueryInterface(Ci.nsIInterfaceRequestor)
+        .getInterface(Ci.nsIDOMWindowUtils)
+        .isIncrementalGCEnabled();
+  document.getElementById("javascript-incremental-gc").textContent = enabled ? "1" : "0";
+}
+
 function getPrefValue(aName) {
   let value = "";
   let type = Services.prefs.getPrefType(aName);
   switch (type) {
     case Ci.nsIPrefBranch.PREF_STRING:
       value = Services.prefs.getComplexValue(aName, Ci.nsISupportsString).data;
       break;
     case Ci.nsIPrefBranch.PREF_BOOL:
--- a/toolkit/content/aboutSupport.xhtml
+++ b/toolkit/content/aboutSupport.xhtml
@@ -238,13 +238,31 @@
 
         <tbody id="graphics-info-properties">
         </tbody>
 
         <tbody id="graphics-failures-tbody">
         </tbody>
       </table>
 
+      <!-- - - - - - - - - - - - - - - - - - - - - -->
+      <h2 class="major-section">
+        &aboutSupport.jsTitle;
+      </h2>
+
+      <table>
+        <tbody>
+          <tr>
+            <th class="column">
+              &aboutSupport.jsIncrementalGC;
+            </th>
+
+            <td id="javascript-incremental-gc">
+            </td>
+          </tr>
+	</tbody>
+      </table>
+
     </div>
 
   </body>
 
 </html>
--- a/toolkit/content/tests/browser/Makefile.in
+++ b/toolkit/content/tests/browser/Makefile.in
@@ -51,13 +51,14 @@ DIRS = \
 include $(topsrcdir)/config/rules.mk
 
 _BROWSER_TEST_FILES = \
   browser_keyevents_during_autoscrolling.js \
   browser_bug295977_autoscroll_overflow.js \
   browser_bug594509.js \
   browser_Geometry.js \
   browser_save_resend_postdata.js \
+  browser_browserDrop.js \
   browser_Services.js \
   $(NULL)
 
 libs:: $(_BROWSER_TEST_FILES)
 	$(INSTALL) $(foreach f,$^,"$f") $(DEPTH)/_tests/testing/mochitest/browser/$(relativesrcdir)
new file mode 100644
--- /dev/null
+++ b/toolkit/content/tests/browser/browser_browserDrop.js
@@ -0,0 +1,61 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+function test() {
+  waitForExplicitFinish();
+
+  let newTab = gBrowser.selectedTab = gBrowser.addTab();
+  registerCleanupFunction(function () {
+    gBrowser.removeTab(newTab);
+  });
+
+  let scriptLoader = Cc["@mozilla.org/moz/jssubscript-loader;1"].
+                     getService(Ci.mozIJSSubScriptLoader);
+  let chromeUtils = {};
+  scriptLoader.loadSubScript("chrome://mochikit/content/tests/SimpleTest/ChromeUtils.js", chromeUtils);
+
+  let browser = gBrowser.selectedBrowser;
+
+  var linkHandlerActivated = 0;
+  // Don't worry about clobbering the droppedLinkHandler, since we're closing
+  // this tab after the test anyways
+  browser.droppedLinkHandler = function dlh(e, url, name) {
+    linkHandlerActivated++;
+    ok(!/(javascript|data)/i.test(url), "javascript link should not be dropped");
+  }
+
+  var receivedDropCount = 0;
+  function dropListener() {
+    receivedDropCount++;
+    if (receivedDropCount == triggeredDropCount) {
+      // Wait for the browser's system-phase event handler to run.
+      executeSoon(function () {
+        is(linkHandlerActivated, validDropCount,
+           "link handler was called correct number of times");
+        finish();
+      })
+    }
+  }
+  browser.addEventListener("drop", dropListener, false);
+  registerCleanupFunction(function () {
+    browser.removeEventListener("drop", dropListener, false);
+  });
+
+  var triggeredDropCount = 0;
+  var validDropCount = 0;
+  function drop(text, valid) {
+    triggeredDropCount++;
+    if (valid)
+      validDropCount++;
+    executeSoon(function () {
+      chromeUtils.synthesizeDrop(browser, browser, [[{type: "text/plain", data: text}]], "copy", window, EventUtils);
+    });
+  }
+
+  drop("mochi.test/first", true);
+  drop("javascript:'bad'");
+  drop("jAvascript:'also bad'");
+  drop("mochi.test/second", true);
+  drop("data:text/html,bad");
+  drop("mochi.test/third", true);
+}
--- a/toolkit/content/widgets/browser.xml
+++ b/toolkit/content/widgets/browser.xml
@@ -1294,25 +1294,22 @@
       <![CDATA[
         if (!this.droppedLinkHandler || event.defaultPrevented)
           return;
 
         let name = { };
         let linkHandler = Components.classes["@mozilla.org/content/dropped-link-handler;1"].
                             getService(Components.interfaces.nsIDroppedLinkHandler);
         try {
-          var uri = linkHandler.dropLink(event, name);
+          // Pass true to prevent the dropping of javascript:/data: URIs
+          var uri = linkHandler.dropLink(event, name, true);
         } catch (ex) {
           return;
         }
 
-        // don't allow dropping javascript or data urls
-        if (/^\s*(javascript|data):/.test(uri))
-          return;
-
         if (uri) {
           this.droppedLinkHandler(event, uri, name.value);
         }
       ]]>
       </handler>
     </handlers>
 
   </binding>
--- a/toolkit/locales/en-US/chrome/global/aboutSupport.dtd
+++ b/toolkit/locales/en-US/chrome/global/aboutSupport.dtd
@@ -39,12 +39,15 @@ variant of aboutSupport.showDir.label. -
 <!ENTITY aboutSupport.showWin.label "Show Folder">
 
 <!ENTITY aboutSupport.modifiedKeyPrefsTitle "Important Modified Preferences">
 <!ENTITY aboutSupport.modifiedPrefsName "Name">
 <!ENTITY aboutSupport.modifiedPrefsValue "Value">
 
 <!ENTITY aboutSupport.graphicsTitle "Graphics">
 
+<!ENTITY aboutSupport.jsTitle "JavaScript">
+<!ENTITY aboutSupport.jsIncrementalGC "Incremental GC">
+
 <!ENTITY aboutSupport.installationHistoryTitle "Installation History">
 <!ENTITY aboutSupport.updateHistoryTitle "Update History">
 
 <!ENTITY aboutSupport.copyToClipboard.label "Copy all to clipboard">