Merge autoland to mozilla-central r=merge a=merge
authorTiberius Oros <toros@mozilla.com>
Fri, 05 Jan 2018 11:56:24 +0200
changeset 449660 df1519b33fe0e809f40a155c8a490b2f07d66cfe
parent 449562 3acb14b949150529ec761f845f9a3d61ee341dac (current diff)
parent 449659 96c9efcb4a31b996cb92cf8fc56a9c3764f91250 (diff)
child 449690 81362f7306fe413b19fdba27cd0e9a5525d902e1
push id8527
push userCallek@gmail.com
push dateThu, 11 Jan 2018 21:05:50 +0000
treeherdermozilla-beta@95342d212a7a [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersmerge, merge
milestone59.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Merge autoland to mozilla-central r=merge a=merge
services/sync/tests/tps/test_addon_sanity.js
--- a/browser/base/content/browser.js
+++ b/browser/base/content/browser.js
@@ -1033,22 +1033,22 @@ function _loadURIWithFlags(browser, uri,
     // createFixupURI throws if it can't create a URI. If that's the case then
     // we still need to pass down the uri because docshell handles this case.
     requiredRemoteType = gMultiProcessBrowser ? E10SUtils.DEFAULT_REMOTE_TYPE
                                               : E10SUtils.NOT_REMOTE;
   }
 
   let mustChangeProcess = requiredRemoteType != currentRemoteType;
   let newFrameloader = false;
-  if (browser.getAttribute("isPreloadBrowser") == "true" && uri != "about:newtab") {
+  if (browser.getAttribute("preloadedState") === "consumed" && uri != "about:newtab") {
     // Leaving about:newtab from a used to be preloaded browser should run the process
     // selecting algorithm again.
     mustChangeProcess = true;
     newFrameloader = true;
-    browser.removeAttribute("isPreloadBrowser");
+    browser.removeAttribute("preloadedState");
   }
 
   // !requiredRemoteType means we're loading in the parent/this process.
   if (!requiredRemoteType) {
     browser.inLoadURI = true;
   }
   try {
     if (!mustChangeProcess) {
@@ -1120,18 +1120,18 @@ function _loadURIWithFlags(browser, uri,
 function LoadInOtherProcess(browser, loadOptions, historyIndex = -1) {
   let tab = gBrowser.getTabForBrowser(browser);
   SessionStore.navigateAndRestore(tab, loadOptions, historyIndex);
 }
 
 // Called when a docshell has attempted to load a page in an incorrect process.
 // This function is responsible for loading the page in the correct process.
 function RedirectLoad({ target: browser, data }) {
-  if (browser.getAttribute("isPreloadBrowser") == "true") {
-    browser.removeAttribute("isPreloadBrowser");
+  if (browser.getAttribute("preloadedState") === "consumed") {
+    browser.removeAttribute("preloadedState");
     data.loadOptions.newFrameloader = true;
   }
 
   if (data.loadOptions.reloadInFreshProcess) {
     // Convert the fresh process load option into a large allocation remote type
     // to use common processing from this point.
     data.loadOptions.remoteType = E10SUtils.LARGE_ALLOCATION_REMOTE_TYPE;
     data.loadOptions.newFrameloader = true;
--- a/browser/base/content/content.js
+++ b/browser/base/content/content.js
@@ -309,17 +309,17 @@ var AboutNetAndCertErrorListener = {
         let now = Date.now();
         let certRange = this._getCertValidityRange();
 
         let approximateDate = now - difference * 1000;
         // If the difference is more than a day, we last fetched the date in the last 5 days,
         // and adjusting the date per the interval would make the cert valid, warn the user:
         if (Math.abs(difference) > 60 * 60 * 24 && (now - lastFetched) <= 60 * 60 * 24 * 5 &&
             certRange.notBefore < approximateDate && certRange.notAfter > approximateDate) {
-          let formatter = Services.intl.createDateTimeFormat(undefined, {
+          let formatter = new Services.intl.DateTimeFormat(undefined, {
             dateStyle: "short"
           });
           let systemDate = formatter.format(new Date());
           // negative difference means local time is behind server time
           approximateDate = formatter.format(new Date(approximateDate));
 
           content.document.getElementById("wrongSystemTime_URL")
             .textContent = content.document.location.hostname;
@@ -345,17 +345,17 @@ var AboutNetAndCertErrorListener = {
           let buildDate = new Date(year, month, day);
           let systemDate = new Date();
 
           // We don't check the notBefore of the cert with the build date,
           // as it is of course almost certain that it is now later than the build date,
           // so we shouldn't exclude the possibility that the cert has become valid
           // since the build date.
           if (buildDate > systemDate && new Date(certRange.notAfter) > buildDate) {
-            let formatter = Services.intl.createDateTimeFormat(undefined, {
+            let formatter = new Services.intl.DateTimeFormat(undefined, {
               dateStyle: "short"
             });
 
             content.document.getElementById("wrongSystemTimeWithoutReference_URL")
               .textContent = content.document.location.hostname;
             content.document.getElementById("wrongSystemTimeWithoutReference_systemDate")
               .textContent = formatter.format(systemDate);
 
--- a/browser/base/content/pageinfo/pageInfo.js
+++ b/browser/base/content/pageinfo/pageInfo.js
@@ -1011,17 +1011,17 @@ function formatNumber(number) {
   return (+number).toLocaleString(); // coerce number to a numeric value before calling toLocaleString()
 }
 
 function formatDate(datestr, unknown) {
   var date = new Date(datestr);
   if (!date.valueOf())
     return unknown;
 
-  const dateTimeFormatter = Services.intl.createDateTimeFormat(undefined, {
+  const dateTimeFormatter = new Services.intl.DateTimeFormat(undefined, {
     dateStyle: "long", timeStyle: "long"
   });
   return dateTimeFormatter.format(date);
 }
 
 function doCopy() {
   if (!gClipboardHelper)
     return;
--- a/browser/base/content/tabbrowser.xml
+++ b/browser/base/content/tabbrowser.xml
@@ -2131,18 +2131,21 @@
             this._preloadedBrowser = null;
 
             // Attach the nsIFormFillController now that we know the browser
             // will be used. If we do that before and the preloaded browser
             // won't be consumed until shutdown then we leak a docShell.
             // Also, we do not need to take care of attaching nsIFormFillControllers
             // in the case that the browser is remote, as remote browsers take
             // care of that themselves.
-            if (browser && this.hasAttribute("autocompletepopup")) {
-              browser.setAttribute("autocompletepopup", this.getAttribute("autocompletepopup"));
+            if (browser) {
+              browser.setAttribute("preloadedState", "consumed");
+              if (this.hasAttribute("autocompletepopup")) {
+                browser.setAttribute("autocompletepopup", this.getAttribute("autocompletepopup"));
+              }
             }
 
             return browser;
           ]]>
         </body>
       </method>
 
       <method name="_isPreloadingEnabled">
@@ -2234,18 +2237,33 @@
               }
               b.presetOpenerWindow(aParams.openerWindow);
             }
 
             if (!aParams.isPreloadBrowser && this.hasAttribute("autocompletepopup")) {
               b.setAttribute("autocompletepopup", this.getAttribute("autocompletepopup"));
             }
 
+            /*
+             * This attribute is meant to describe if the browser is the
+             * preloaded browser. There are 2 defined states: "preloaded" or
+             * "consumed". The order of events goes as follows:
+             *   1. The preloaded browser is created and the 'preloadedState'
+             *      attribute for that browser is set to "preloaded".
+             *   2. When a new tab is opened and it is time to show that
+             *      preloaded browser, the 'preloadedState' attribute for that
+             *      browser is set to "consumed"
+             *   3. When we then navigate away from about:newtab, the "consumed"
+             *      browsers will attempt to switch to a new content process,
+             *      therefore the 'preloadedState' attribute is removed from
+             *      that browser altogether
+             * See more details on Bug 1420285.
+             */
             if (aParams.isPreloadBrowser) {
-              b.setAttribute("isPreloadBrowser", "true");
+              b.setAttribute("preloadedState", "preloaded");
             }
 
             if (this.hasAttribute("selectmenulist"))
               b.setAttribute("selectmenulist", this.getAttribute("selectmenulist"));
 
             if (this.hasAttribute("datetimepicker")) {
               b.setAttribute("datetimepicker", this.getAttribute("datetimepicker"));
             }
--- a/browser/base/content/test/about/browser_aboutCertError.js
+++ b/browser/base/content/test/about/browser_aboutCertError.js
@@ -146,17 +146,17 @@ add_task(async function checkWrongSystem
         text: div.textContent,
         systemDate: systemDateDiv.textContent,
         actualDate: actualDateDiv.textContent,
         learnMoreLink: learnMoreLink.href
       };
     });
   }
 
-  let formatter = Services.intl.createDateTimeFormat(undefined, {
+  let formatter = new Services.intl.DateTimeFormat(undefined, {
     dateStyle: "short"
   });
 
   // pretend we have a positively skewed (ahead) system time
   let serverDate = new Date("2015/10/27");
   let serverDateFmt = formatter.format(serverDate);
   let localDateFmt = formatter.format(new Date());
 
--- a/browser/base/content/test/performance/browser_tabstrip_overflow_underflow_reflows.js
+++ b/browser/base/content/test/performance/browser_tabstrip_overflow_underflow_reflows.js
@@ -45,21 +45,76 @@ add_task(async function() {
     await BrowserTestUtils.waitForEvent(gBrowser.selectedTab, "transitionend",
         false, e => e.propertyName === "max-width");
     await switchDone;
     await BrowserTestUtils.waitForCondition(() => {
       return gBrowser.tabContainer.arrowScrollbox.hasAttribute("scrolledtoend");
     });
   }, EXPECTED_OVERFLOW_REFLOWS, window);
 
-  await withReflowObserver(async function() {
+  Assert.ok(gBrowser.tabContainer.hasAttribute("overflow"),
+            "Tabs should now be overflowed.");
+
+  // Now test that opening and closing a tab while overflowed doesn't cause
+  // us to reflow.
+  await withReflowObserver(async function(dirtyFrame) {
+    let switchDone = BrowserTestUtils.waitForEvent(window, "TabSwitchDone");
+    BrowserOpenTab();
+    await switchDone;
+    await BrowserTestUtils.waitForCondition(() => {
+      return gBrowser.tabContainer.arrowScrollbox.hasAttribute("scrolledtoend");
+    });
+  }, [], window);
+
+  await withReflowObserver(async function(dirtyFrame) {
     let switchDone = BrowserTestUtils.waitForEvent(window, "TabSwitchDone");
-    let transitionPromise =
-      BrowserTestUtils.waitForEvent(gBrowser.selectedTab,
-                                    "transitionend", false,
-                                    e => e.propertyName === "max-width");
     await BrowserTestUtils.removeTab(gBrowser.selectedTab, { animate: true });
-    await transitionPromise;
     await switchDone;
-  }, EXPECTED_UNDERFLOW_REFLOWS, window);
+  }, [], window);
+
+  // At this point, we have an overflowed tab strip, and we've got the last tab
+  // selected. This should mean that the first tab is scrolled out of view.
+  // Let's test that we don't reflow when switching to that first tab.
+  let lastTab = gBrowser.selectedTab;
+  let arrowScrollbox = gBrowser.tabContainer.arrowScrollbox;
+
+  // First, we'll check that the first tab is actually scrolled
+  // at least partially out of view.
+  Assert.ok(arrowScrollbox.scrollPosition > 0,
+            "First tab should be partially scrolled out of view.");
+
+  // Now switch to the first tab. We shouldn't flush layout at all.
+  await withReflowObserver(async function(dirtyFrame) {
+    let firstTab = gBrowser.tabContainer.firstChild;
+    await BrowserTestUtils.switchTab(gBrowser, firstTab);
+    await BrowserTestUtils.waitForCondition(() => {
+      return gBrowser.tabContainer.arrowScrollbox.hasAttribute("scrolledtostart");
+    });
+  }, [], window);
+
+  // Okay, now close the last tab. The tabstrip should stay overflowed, but removing
+  // one more after that should underflow it.
+  await BrowserTestUtils.removeTab(lastTab);
+
+  Assert.ok(gBrowser.tabContainer.hasAttribute("overflow"),
+            "Tabs should still be overflowed.");
+
+  // Depending on the size of the window, it might take one or more tab
+  // removals to put the tab strip out of the overflow state, so we'll just
+  // keep testing removals until that occurs.
+  while (gBrowser.tabContainer.hasAttribute("overflow")) {
+    lastTab = gBrowser.tabContainer.lastElementChild;
+    if (gBrowser.selectedTab !== lastTab) {
+      await BrowserTestUtils.switchTab(gBrowser, lastTab);
+    }
+
+    // ... and make sure we don't flush layout when closing it, and exiting
+    // the overflowed state.
+    await withReflowObserver(async function() {
+      let switchDone = BrowserTestUtils.waitForEvent(window, "TabSwitchDone");
+      await BrowserTestUtils.removeTab(lastTab, { animate: true });
+      await switchDone;
+      await BrowserTestUtils.waitForCondition(() => !lastTab.isConnected);
+    }, EXPECTED_UNDERFLOW_REFLOWS, window);
+  }
 
   await removeAllButFirstTab();
 });
--- a/browser/components/feeds/FeedWriter.js
+++ b/browser/components/feeds/FeedWriter.js
@@ -187,17 +187,17 @@ FeedWriter.prototype = {
 
   __dateFormatter: null,
   get _dateFormatter() {
     if (!this.__dateFormatter) {
       const dtOptions = {
         timeStyle: "short",
         dateStyle: "long"
       };
-      this.__dateFormatter = Services.intl.createDateTimeFormat(undefined, dtOptions);
+      this.__dateFormatter = new Services.intl.DateTimeFormat(undefined, dtOptions);
     }
     return this.__dateFormatter;
   },
 
   /**
    * Returns the feed type.
    */
   __feedType: null,
--- a/browser/components/places/content/places.js
+++ b/browser/components/places/content/places.js
@@ -408,17 +408,17 @@ var PlacesOrganizer = {
    * Populates the restore menu with the dates of the backups available.
    */
   populateRestoreMenu: function PO_populateRestoreMenu() {
     let restorePopup = document.getElementById("fileRestorePopup");
 
     const dtOptions = {
       dateStyle: "long"
     };
-    let dateFormatter = Services.intl.createDateTimeFormat(undefined, dtOptions);
+    let dateFormatter = new Services.intl.DateTimeFormat(undefined, dtOptions);
 
     // Remove existing menu items.  Last item is the restoreFromFile item.
     while (restorePopup.childNodes.length > 1)
       restorePopup.firstChild.remove();
 
     (async function() {
       let backupFiles = await PlacesBackups.getBackupFiles();
       if (backupFiles.length == 0)
--- a/browser/components/places/content/treeView.js
+++ b/browser/components/places/content/treeView.js
@@ -551,29 +551,29 @@ PlacesTreeView.prototype = {
   },
 
   // We use a different formatter for times within the current day,
   // so we cache both a "today" formatter and a general date formatter.
   __todayFormatter: null,
   get _todayFormatter() {
     if (!this.__todayFormatter) {
       const dtOptions = { timeStyle: "short" };
-      this.__todayFormatter = Services.intl.createDateTimeFormat(undefined, dtOptions);
+      this.__todayFormatter = new Services.intl.DateTimeFormat(undefined, dtOptions);
     }
     return this.__todayFormatter;
   },
 
   __dateFormatter: null,
   get _dateFormatter() {
     if (!this.__dateFormatter) {
       const dtOptions = {
         dateStyle: "short",
         timeStyle: "short"
       };
-      this.__dateFormatter = Services.intl.createDateTimeFormat(undefined, dtOptions);
+      this.__dateFormatter = new Services.intl.DateTimeFormat(undefined, dtOptions);
     }
     return this.__dateFormatter;
   },
 
   COLUMN_TYPE_UNKNOWN: 0,
   COLUMN_TYPE_TITLE: 1,
   COLUMN_TYPE_URI: 2,
   COLUMN_TYPE_DATE: 3,
--- a/browser/components/places/tests/chrome/test_treeview_date.xul
+++ b/browser/components/places/tests/chrome/test_treeview_date.xul
@@ -133,17 +133,17 @@
                 if (node.uri == "http://at.midnight.com/" ||
                     node.uri == "http://after.midnight.com/") {
                   dtOptions.dateStyle = undefined;
                 } else if (node.uri != "http://before.midnight.com/") {
                   // Avoid to test spurious uris, due to how the test works
                   // a redirecting uri could be put in the tree while we test.
                   break;
                 }
-                let timeStr = Services.intl.createDateTimeFormat(undefined, dtOptions).format(timeObj);
+                let timeStr = new Services.intl.DateTimeFormat(undefined, dtOptions).format(timeObj);
 
                 is(text, timeStr, "Date format is correct");
                 break;
               case "visitCount":
                 is(text, 1, "Visit count is correct");
                 break;
             }
           }
--- a/browser/components/preferences/cookies.js
+++ b/browser/components/preferences/cookies.js
@@ -486,17 +486,17 @@ var gCookiesWindow = {
         break;
     }
     this._view._rowCount = hostCount.value;
   },
 
   formatExpiresString(aExpires) {
     if (aExpires) {
       var date = new Date(1000 * aExpires);
-      const dateTimeFormatter = Services.intl.createDateTimeFormat(undefined, {
+      const dateTimeFormatter = new Services.intl.DateTimeFormat(undefined, {
         dateStyle: "long", timeStyle: "long"
       });
       return dateTimeFormatter.format(date);
     }
     return this._bundle.getString("expireAtEndOfSession");
   },
 
   _getUserContextString(aUserContextId) {
--- a/browser/extensions/activity-stream/lib/TelemetryFeed.jsm
+++ b/browser/extensions/activity-stream/lib/TelemetryFeed.jsm
@@ -241,17 +241,17 @@ this.TelemetryFeed = class TelemetryFeed
   /**
    * handleNewTabInit - Handle NEW_TAB_INIT, which creates a new session and sets the a flag
    *                    for session.perf based on whether or not this new tab is preloaded
    *
    * @param  {obj} action the Action object
    */
   handleNewTabInit(action) {
     const session = this.addSession(au.getPortIdOfSender(action), action.data.url);
-    session.perf.is_preloaded = action.data.browser.getAttribute("isPreloadBrowser") === "true";
+    session.perf.is_preloaded = action.data.browser.getAttribute("preloadedState") === "preloaded";
   }
 
   /**
    * createPing - Create a ping with common properties
    *
    * @param  {string} id The portID of the session, if a session is relevant (optional)
    * @return {obj}    A telemetry ping
    */
new file mode 100644
--- /dev/null
+++ b/build/debian-packages/python-wheezy.diff
@@ -0,0 +1,149 @@
+diff -u python2.7-2.7.9/debian/changelog python2.7-2.7.9/debian/changelog
+--- python2.7-2.7.9/debian/changelog
++++ python2.7-2.7.9/debian/changelog
+@@ -1,3 +1,17 @@
++python2.7 (2.7.9-2.deb7moz1) wheezy; urgency=medium
++
++  * Mozilla backport for wheezy.
++  * debian/control.in:
++    - Remove gcc 4.9 build dependency, wheezy doesn't have it.
++    - Remove libexpat1-dev dependency from python dev packages. It prevents
++      from co-installing some i386 -dev packages.
++  * debian/rules:
++    - Adapt ar, ranlib and objcopy paths to work on wheezy.
++  * debian/control.in, debian/rules: Don't generate the -doc package, sphinx
++    is too old in wheezy.
++
++ -- Mike Hommey <glandium@mozilla.com>  Fri, 13 Oct 2017 19:20:21 +0900
++
+ python2.7 (2.7.9-2+deb8u1) jessie; urgency=medium
+ 
+   * Backport upstream commit b3ce713fb9beebfff9848cefa0acbd59acc68fe9
+diff -u python2.7-2.7.9/debian/control.in python2.7-2.7.9/debian/control.in
+--- python2.7-2.7.9/debian/control.in
++++ python2.7-2.7.9/debian/control.in
+@@ -3,7 +3,6 @@
+ Priority: optional
+ Maintainer: Matthias Klose <doko@debian.org>
+ Build-Depends: debhelper (>= 5), @bd_dpkgdev@
+-  gcc (>= 4:4.9.2),
+   quilt, autoconf, autotools-dev,
+   lsb-release, sharutils,
+   libreadline-dev, libtinfo-dev, libncursesw5-dev (>= 5.3), @bd_gcc@
+@@ -112,7 +111,7 @@
+ Package: @PVER@-dev
+ Architecture: any
+ Multi-Arch: allowed
+-Depends: @PVER@ (= ${binary:Version}), lib@PVER@-dev (= ${binary:Version}), lib@PVER@ (= ${binary:Version}), libexpat1-dev, ${shlibs:Depends}, ${misc:Depends}
++Depends: @PVER@ (= ${binary:Version}), lib@PVER@-dev (= ${binary:Version}), lib@PVER@ (= ${binary:Version}), ${shlibs:Depends}, ${misc:Depends}
+ Recommends: libc6-dev | libc-dev
+ Replaces: @PVER@ (<< 2.7-3)
+ Description: Header files and a static library for Python (v@VER@)
+@@ -127,7 +126,7 @@
+ Architecture: any
+ Multi-Arch: same
+ Pre-Depends: multiarch-support
+-Depends: lib@PVER@-stdlib (= ${binary:Version}), lib@PVER@ (= ${binary:Version}), libexpat1-dev, ${shlibs:Depends}, ${misc:Depends}
++Depends: lib@PVER@-stdlib (= ${binary:Version}), lib@PVER@ (= ${binary:Version}), ${shlibs:Depends}, ${misc:Depends}
+ Replaces: @PVER@ (<< 2.7-3), @PVER@-dev (<< 2.7.3-10), @PVER@-minimal (<< 2.7.3-10)
+ Recommends: libc6-dev | libc-dev
+ Description: Header files and a static library for Python (v@VER@)
+@@ -161,27 +160,6 @@
+  IDLE is an Integrated Development Environment for Python (v@VER@).
+  IDLE is written using Tkinter and therefore quite platform-independent.
+ 
+-Package: @PVER@-doc
+-Section: doc
+-Architecture: all
+-Depends: libjs-jquery, libjs-underscore, ${misc:Depends}
+-Suggests: @PVER@
+-Description: Documentation for the high-level object-oriented language Python (v@VER@)
+- These is the official set of documentation for the interactive high-level
+- object-oriented language Python (v@VER@). All documents are provided
+- in HTML format. The package consists of ten documents:
+- .
+-   * What's New in Python@VER@
+-   * Tutorial
+-   * Python Library Reference
+-   * Macintosh Module Reference
+-   * Python Language Reference
+-   * Extending and Embedding Python
+-   * Python/C API Reference
+-   * Installing Python Modules
+-   * Documenting Python
+-   * Distributing Python Modules
+-
+ Package: @PVER@-dbg
+ Section: debug
+ Architecture: any
+diff -u python2.7-2.7.9/debian/rules python2.7-2.7.9/debian/rules
+--- python2.7-2.7.9/debian/rules
++++ python2.7-2.7.9/debian/rules
+@@ -121,8 +121,8 @@
+ CC = $(DEB_HOST_GNU_TYPE)-gcc
+ CXX=$(DEB_HOST_GNU_TYPE)-g++
+ 
+-AR=$(DEB_HOST_GNU_TYPE)-ar
+-RANLIB=$(DEB_HOST_GNU_TYPE)-ranlib
++AR=$(DEB_HOST_GNU_TYPE)-gcc-ar-4.7
++RANLIB=$(DEB_HOST_GNU_TYPE)-gcc-ranlib-4.7
+ 
+ DPKG_CFLAGS  := $(shell dpkg-buildflags --get CPPFLAGS; dpkg-buildflags --get CFLAGS)
+ DPKG_LDFLAGS := $(shell dpkg-buildflags --get LDFLAGS)
+@@ -171,8 +171,6 @@
+       LTO_CFLAGS += -ffat-lto-objects
+     endif
+     EXTRA_OPT_CFLAGS += $(LTO_CFLAGS)
+-    AR=$(DEB_HOST_GNU_TYPE)-gcc-ar
+-    RANLIB=$(DEB_HOST_GNU_TYPE)-gcc-ranlib
+ endif
+ 
+ make_build_target = $(if $(with_pgo),profile-opt)
+@@ -628,7 +626,6 @@
+ 
+ stamps/stamp-doc-html:
+ 	dh_testdir
+-	$(MAKE) -C Doc html
+ 	touch stamps/stamp-doc-html
+ 
+ build-doc: stamps/stamp-patch stamps/stamp-build-doc
+@@ -1215,30 +1212,6 @@
+ 	dh_testdir -i
+ 	dh_testroot -i
+ 
+-	: # $(p_doc) package
+-	dh_installdirs -p$(p_doc) \
+-		usr/share/doc/$(p_base) \
+-		usr/share/doc/$(p_doc)
+-	dh_installdocs -p$(p_doc)
+-	cp -a Doc/build/html $(d_doc)/usr/share/doc/$(p_base)/
+-	rm -f $(d_doc)/usr/share/doc/$(p_base)/html/_static/jquery.js
+-	rm -f $(d_doc)/usr/share/doc/$(p_base)/html/_static/underscore.js
+-	dh_link -p$(p_doc) \
+-		/usr/share/doc/$(p_base)/html \
+-			/usr/share/doc/$(p_doc)/html \
+-		/usr/share/javascript/jquery/jquery.js \
+-			/usr/share/doc/$(p_base)/html/_static/jquery.js \
+-		/usr/share/javascript/underscore/underscore.js \
+-			/usr/share/doc/$(p_base)/html/_static/underscore.js
+-
+-	: # devhelp docs
+-	$(buildd_static)/python debian/pyhtml2devhelp.py \
+-		$(d_doc)/usr/share/doc/$(p_base)/html index.html $(VER) \
+-		> $(d_doc)/usr/share/doc/$(p_base)/html/$(PVER).devhelp
+-	gzip -9v $(d_doc)/usr/share/doc/$(p_base)/html/$(PVER).devhelp
+-	dh_link -p$(p_doc) \
+-		/usr/share/doc/$(p_base)/html /usr/share/devhelp/books/$(PVER)
+-
+ 	for i in $(p_ltst); do \
+ 	  rm -rf debian/$$i/usr/share/doc/$$i; \
+ 	  ln -s $(p_lbase) debian/$$i/usr/share/doc/$$i; \
+@@ -1298,7 +1271,7 @@
+ endif
+ 
+ 	find $(d_ldbg) $(d_ldev) -name '*.a' ! -type l \
+-		| xargs -n 1 $(DEB_HOST_GNU_TYPE)-objcopy -p --remove-section=.gnu.lto_.*
++		| xargs -n 1 objcopy -p --remove-section=.gnu.lto_.*
+ 	dh_strip -a -N$(p_dbg) -N$(p_ldbg) -Xdebug -Xdbg --dbg-package=$(p_dbg)
+ 	cp Tools/gdb/libpython.py $(d_dbg)/usr/lib/debug/usr/bin/$(PVER)-gdb.py
+ 	ln -sf $(PVER)-gdb.py $(d_dbg)/usr/lib/debug/usr/bin/$(PVER)-dbg-gdb.py
--- a/devtools/client/aboutdebugging/test/browser_service_workers.js
+++ b/devtools/client/aboutdebugging/test/browser_service_workers.js
@@ -8,17 +8,17 @@
 const SERVICE_WORKER = URL_ROOT + "service-workers/empty-sw.js";
 const TAB_URL = URL_ROOT + "service-workers/empty-sw.html";
 
 add_task(function* () {
   yield enableServiceWorkerDebugging();
 
   let { tab, document } = yield openAboutDebugging("workers");
 
-  let swTab = yield addTab(TAB_URL);
+  let swTab = yield addTab(TAB_URL, { background: true });
 
   let serviceWorkersElement = getServiceWorkerList(document);
 
   yield waitUntil(() => {
     // Check that the service worker appears in the UI
     let names = [...document.querySelectorAll("#service-workers .target-name")];
     names = names.map(element => element.textContent);
     return names.includes(SERVICE_WORKER);
--- a/devtools/client/aboutdebugging/test/browser_service_workers_fetch_flag.js
+++ b/devtools/client/aboutdebugging/test/browser_service_workers_fetch_flag.js
@@ -7,17 +7,17 @@
 // but http:// is ok with dom.serviceWorkers.testing.enabled turned on.
 const EMPTY_SW_TAB_URL = URL_ROOT + "service-workers/empty-sw.html";
 const FETCH_SW_TAB_URL = URL_ROOT + "service-workers/fetch-sw.html";
 
 function* testBody(url, expecting) {
   yield enableServiceWorkerDebugging();
   let { tab, document } = yield openAboutDebugging("workers");
 
-  let swTab = yield addTab(url);
+  let swTab = yield addTab(url, {background: true});
 
   let serviceWorkersElement = getServiceWorkerList(document);
 
   info("Wait for fetch flag.");
   yield waitUntil(() => {
     let fetchFlags =
       [...document.querySelectorAll("#service-workers .service-worker-fetch-flag")];
     fetchFlags = fetchFlags.map(element => element.textContent);
--- a/devtools/client/aboutdebugging/test/browser_service_workers_push.js
+++ b/devtools/client/aboutdebugging/test/browser_service_workers_push.js
@@ -17,17 +17,17 @@ const TAB_URL = URL_ROOT + "service-work
 add_task(function* () {
   yield enableServiceWorkerDebugging();
   let { tab, document } = yield openAboutDebugging("workers");
 
   // Listen for mutations in the service-workers list.
   let serviceWorkersElement = getServiceWorkerList(document);
 
   // Open a tab that registers a push service worker.
-  let swTab = yield addTab(TAB_URL);
+  let swTab = yield addTab(TAB_URL, { background: true });
 
   info("Make the test page notify us when the service worker sends a message.");
 
   yield ContentTask.spawn(swTab.linkedBrowser, {}, function () {
     let win = content.wrappedJSObject;
     win.navigator.serviceWorker.addEventListener("message", function (event) {
       sendAsyncMessage(event.data);
     });
--- a/devtools/client/aboutdebugging/test/browser_service_workers_push_service.js
+++ b/devtools/client/aboutdebugging/test/browser_service_workers_push_service.js
@@ -53,17 +53,17 @@ add_task(function* () {
   };
 
   let { tab, document } = yield openAboutDebugging("workers");
 
   // Listen for mutations in the service-workers list.
   let serviceWorkersElement = document.getElementById("service-workers");
 
   // Open a tab that registers a push service worker.
-  let swTab = yield addTab(TAB_URL);
+  let swTab = yield addTab(TAB_URL, { background: true });
 
   info("Wait until the service worker appears in about:debugging");
   yield waitUntilServiceWorkerContainer(SERVICE_WORKER, document);
 
   yield waitForServiceWorkerActivation(SERVICE_WORKER, document);
 
   // Wait for the service worker details to update.
   let names = [...document.querySelectorAll("#service-workers .target-name")];
--- a/devtools/client/aboutdebugging/test/browser_service_workers_start.js
+++ b/devtools/client/aboutdebugging/test/browser_service_workers_start.js
@@ -20,17 +20,17 @@ add_task(function* () {
   yield pushPref("dom.serviceWorkers.idle_extended_timeout", SW_TIMEOUT);
 
   let { tab, document } = yield openAboutDebugging("workers");
 
   // Listen for mutations in the service-workers list.
   let serviceWorkersElement = getServiceWorkerList(document);
 
   // Open a tab that registers an empty service worker.
-  let swTab = yield addTab(TAB_URL);
+  let swTab = yield addTab(TAB_URL, { background: true });
 
   // Wait for the service-workers list to update.
   info("Wait until the service worker appears in about:debugging");
   yield waitUntilServiceWorkerContainer(SERVICE_WORKER, document);
 
   info("Ensure that the registration resolved before trying to interact with " +
     "the service worker.");
   yield waitForServiceWorkerRegistered(swTab);
--- a/devtools/client/aboutdebugging/test/browser_service_workers_status.js
+++ b/devtools/client/aboutdebugging/test/browser_service_workers_status.js
@@ -16,17 +16,17 @@ add_task(function* () {
   yield pushPref("dom.serviceWorkers.idle_timeout", SW_TIMEOUT);
   yield pushPref("dom.serviceWorkers.idle_extended_timeout", SW_TIMEOUT);
 
   let { tab, document } = yield openAboutDebugging("workers");
 
   // Listen for mutations in the service-workers list.
   let serviceWorkersElement = getServiceWorkerList(document);
 
-  let swTab = yield addTab(TAB_URL);
+  let swTab = yield addTab(TAB_URL, { background: true });
 
   info("Wait until the service worker appears in about:debugging");
   let container = yield waitUntilServiceWorkerContainer(SERVICE_WORKER, document);
 
   // We should ideally check that the service worker registration goes through the
   // "registering" and "running" steps, but it is difficult to workaround race conditions
   // for a test running on a wide variety of platforms. Due to intermittent failures, we
   // simply check that the registration transitions to "stopped".
--- a/devtools/client/aboutdebugging/test/browser_service_workers_timeout.js
+++ b/devtools/client/aboutdebugging/test/browser_service_workers_timeout.js
@@ -15,17 +15,17 @@ add_task(function* () {
   yield enableServiceWorkerDebugging();
   yield pushPref("dom.serviceWorkers.idle_timeout", SW_TIMEOUT);
   yield pushPref("dom.serviceWorkers.idle_extended_timeout", SW_TIMEOUT);
 
   let { tab, document } = yield openAboutDebugging("workers");
 
   let serviceWorkersElement = getServiceWorkerList(document);
 
-  let swTab = yield addTab(TAB_URL);
+  let swTab = yield addTab(TAB_URL, { background: true });
 
   info("Wait until the service worker appears in about:debugging");
   yield waitUntilServiceWorkerContainer(SERVICE_WORKER, document);
 
   // Ensure that the registration resolved before trying to connect to the sw
   yield waitForServiceWorkerRegistered(swTab);
   ok(true, "Service worker registration resolved");
 
--- a/devtools/client/aboutdebugging/test/browser_service_workers_unregister.js
+++ b/devtools/client/aboutdebugging/test/browser_service_workers_unregister.js
@@ -15,17 +15,17 @@ const SERVICE_WORKER = SCOPE + "empty-sw
 const TAB_URL = SCOPE + "empty-sw.html";
 
 add_task(function* () {
   yield enableServiceWorkerDebugging();
 
   let { tab, document } = yield openAboutDebugging("workers");
 
   // Open a tab that registers an empty service worker.
-  let swTab = yield addTab(TAB_URL);
+  let swTab = yield addTab(TAB_URL, { background: true });
 
   info("Wait until the service worker appears in about:debugging");
   yield waitUntilServiceWorkerContainer(SERVICE_WORKER, document);
 
   yield waitForServiceWorkerActivation(SERVICE_WORKER, document);
 
   info("Ensure that the registration resolved before trying to interact with " +
     "the service worker.");
--- a/dom/animation/test/mozilla/file_restyles.html
+++ b/dom/animation/test/mozilla/file_restyles.html
@@ -1138,16 +1138,108 @@ waitForAllPaints(() => {
     var markers = await observeStyling(5);
     is(markers.length, 0,
        'CSS animation on an out-of-view element with pre-transform should be ' +
        'throttled.');
 
     await ensureElementRemoval(scrollDiv);
   });
 
+  add_task(
+    async function throttling_animations_in_out_of_view_position_absolute_element() {
+      if (!offscreenThrottlingEnabled) {
+        return;
+      }
+
+      var parentDiv = addDiv(null,
+                             { style: 'position: absolute; top: -1000px;' });
+      var targetDiv = addDiv(null,
+                             { style: 'animation: background-color 100s;' });
+      parentDiv.appendChild(targetDiv);
+
+      var animation = targetDiv.getAnimations()[0];
+      await animation.ready;
+
+      var markers = await observeStyling(5);
+      is(markers.length, 0,
+         'CSS animation in an out-of-view position absolute element should ' +
+         'be throttled');
+
+      await ensureElementRemoval(parentDiv);
+    }
+  );
+
+  add_task(
+    async function throttling_animations_on_out_of_view_position_absolute_element() {
+      if (!offscreenThrottlingEnabled) {
+        return;
+      }
+
+      var div = addDiv(null,
+                       { style: 'animation: background-color 100s; ' +
+                                'position: absolute; top: -1000px;' });
+
+      var animation = div.getAnimations()[0];
+      await animation.ready;
+
+      var markers = await observeStyling(5);
+      is(markers.length, 0,
+         'CSS animation on an out-of-view position absolute element should ' +
+         'be throttled');
+
+      await ensureElementRemoval(div);
+    }
+  );
+
+  add_task(
+    async function throttling_animations_in_out_of_view_position_fixed_element() {
+      if (!offscreenThrottlingEnabled) {
+        return;
+      }
+
+      var parentDiv = addDiv(null,
+                             { style: 'position: fixed; top: -1000px;' });
+      var targetDiv = addDiv(null,
+                             { style: 'animation: background-color 100s;' });
+      parentDiv.appendChild(targetDiv);
+
+      var animation = targetDiv.getAnimations()[0];
+      await animation.ready;
+
+      var markers = await observeStyling(5);
+      is(markers.length, 0,
+         'CSS animation on an out-of-view position:fixed element should be ' +
+         'throttled');
+
+      await ensureElementRemoval(parentDiv);
+    }
+  );
+
+  add_task(
+    async function throttling_animations_on_out_of_view_position_fixed_element() {
+      if (!offscreenThrottlingEnabled) {
+        return;
+      }
+
+      var div = addDiv(null,
+                       { style: 'animation: background-color 100s; ' +
+                                'position: fixed; top: -1000px;' });
+
+      var animation = div.getAnimations()[0];
+      await animation.ready;
+
+      var markers = await observeStyling(5);
+      is(markers.length, 0,
+         'CSS animation on an out-of-view position:fixed element should be ' +
+         'throttled');
+
+      await ensureElementRemoval(div);
+    }
+  );
+
   add_task_if_omta_enabled(
     async function no_restyling_for_compositor_animation_on_unrelated_style_change() {
       var div = addDiv(null);
       var animation = div.animate({ opacity: [0, 1] }, 100 * MS_PER_SEC);
 
       await animation.ready;
       ok(SpecialPowers.wrap(animation).isRunningOnCompositor,
          'The opacity animation is running on the compositor');
--- a/dom/base/nsGkAtomList.h
+++ b/dom/base/nsGkAtomList.h
@@ -2245,17 +2245,17 @@ GK_ATOM(mozfixed, "-moz-fixed")
 GK_ATOM(Remote, "remote")
 GK_ATOM(RemoteId, "_remote_id")
 GK_ATOM(RemoteType, "remoteType")
 GK_ATOM(DisplayPort, "_displayport")
 GK_ATOM(DisplayPortMargins, "_displayportmargins")
 GK_ATOM(DisplayPortBase, "_displayportbase")
 GK_ATOM(AsyncScrollLayerCreationFailed, "_asyncscrolllayercreationfailed")
 GK_ATOM(forcemessagemanager, "forcemessagemanager")
-GK_ATOM(isPreloadBrowser, "isPreloadBrowser")
+GK_ATOM(preloadedState, "preloadedState")
 
 // Names for system metrics
 GK_ATOM(scrollbar_start_backward, "scrollbar-start-backward")
 GK_ATOM(scrollbar_start_forward, "scrollbar-start-forward")
 GK_ATOM(scrollbar_end_backward, "scrollbar-end-backward")
 GK_ATOM(scrollbar_end_forward, "scrollbar-end-forward")
 GK_ATOM(scrollbar_thumb_proportional, "scrollbar-thumb-proportional")
 GK_ATOM(overlay_scrollbars, "overlay-scrollbars")
--- a/dom/base/test/browser_aboutnewtab_process_selection.js
+++ b/dom/base/test/browser_aboutnewtab_process_selection.js
@@ -1,9 +1,11 @@
 const TEST_URL = "http://www.example.com/browser/dom/base/test/dummy.html";
+const PRELOADED_STATE = "preloaded";
+const CONSUMED_STATE = "consumed";
 
 var ppmm = Services.ppmm;
 
 add_task(async function(){
   // We want to count processes in this test, so let's disable the pre-allocated process manager.
   await SpecialPowers.pushPrefEnv({"set": [
     ["dom.ipc.processPrelaunch.enabled", false],
     ["dom.ipc.processCount", 10],
@@ -72,8 +74,35 @@ add_task(async function(){
 
   // Make sure the preload browser does not keep any of the new processes alive.
   gBrowser.removePreloadedBrowser();
 
   // Since we kept alive all the processes, we can shut down the ones that do
   // not host any tabs reliably.
   ppmm.releaseCachedProcesses();
 });
+
+add_task(async function preloaded_state_attribute() {
+  // Wait for a preloaded browser to exist, use it, and then create another one
+  await ensurePreloaded(gBrowser);
+  let preloadedTabState = gBrowser._preloadedBrowser.getAttribute("preloadedState");
+  is(preloadedTabState, PRELOADED_STATE, "Sanity check that the first preloaded browser has the correct attribute");
+
+  BrowserOpenTab();
+  await ensurePreloaded(gBrowser);
+
+  // Now check that the tabs have the correct browser attributes set
+  let consumedTabState = gBrowser.selectedBrowser.getAttribute("preloadedState");
+  is(consumedTabState, CONSUMED_STATE, "The opened tab consumed the preloaded browser and updated the attribute");
+
+  preloadedTabState = gBrowser._preloadedBrowser.getAttribute("preloadedState");
+  is(preloadedTabState, PRELOADED_STATE, "The preloaded browser has the correct attribute");
+
+  // Navigate away and check that the attribute has been removed altogether
+  gBrowser.selectedBrowser.loadURI(TEST_URL);
+  let navigatedTabHasState = gBrowser.selectedBrowser.hasAttribute("preloadedState");
+  ok(!navigatedTabHasState, "Correctly removed the preloadState attribute when navigating away");
+
+  // Remove tabs and preloaded browsers
+  await BrowserTestUtils.removeTab(gBrowser.selectedTab);
+  gBrowser.removePreloadedBrowser();
+});
+
--- a/dom/ipc/ContentParent.cpp
+++ b/dom/ipc/ContentParent.cpp
@@ -1131,19 +1131,19 @@ ContentParent::CreateBrowser(const TabCo
   nsIDocShell* docShell = GetOpenerDocShellHelper(aFrameElement);
   TabId openerTabId;
   if (docShell) {
     openerTabId = TabParent::GetTabIdFrom(docShell);
   }
 
   bool isPreloadBrowser = false;
   nsAutoString isPreloadBrowserStr;
-  if (aFrameElement->GetAttr(kNameSpaceID_None, nsGkAtoms::isPreloadBrowser,
+  if (aFrameElement->GetAttr(kNameSpaceID_None, nsGkAtoms::preloadedState,
                              isPreloadBrowserStr)) {
-    isPreloadBrowser = isPreloadBrowserStr.EqualsLiteral("true");
+    isPreloadBrowser = isPreloadBrowserStr.EqualsLiteral("preloaded");
   }
 
   RefPtr<nsIContentParent> constructorSender;
   if (isInContentProcess) {
     MOZ_ASSERT(aContext.IsMozBrowserElement() || aContext.IsJSPlugin());
     constructorSender = CreateContentBridgeParent(aContext, initialPriority,
                                                   openerTabId, tabId);
   } else {
--- a/dom/media/MediaCache.cpp
+++ b/dom/media/MediaCache.cpp
@@ -719,26 +719,28 @@ MediaCache::Flush()
     });
   sThread->Dispatch(r.forget());
 }
 
 void
 MediaCache::CloseStreamsForPrivateBrowsing()
 {
   MOZ_ASSERT(NS_IsMainThread());
-  sThread->Dispatch(
-    NS_NewRunnableFunction("MediaCache::CloseStreamsForPrivateBrowsing",
-                           [self = RefPtr<MediaCache>(this)]() {
-                             AutoLock lock(self->mMonitor);
-                             for (MediaCacheStream* s : self->mStreams) {
-                               if (s->mIsPrivateBrowsing) {
-                                 s->CloseInternal(lock);
-                               }
-                             }
-                           }));
+  sThread->Dispatch(NS_NewRunnableFunction(
+    "MediaCache::CloseStreamsForPrivateBrowsing",
+    [self = RefPtr<MediaCache>(this)]() {
+      AutoLock lock(self->mMonitor);
+      // Copy mStreams since CloseInternal() will change the array.
+      nsTArray<MediaCacheStream*> streams(self->mStreams);
+      for (MediaCacheStream* s : streams) {
+        if (s->mIsPrivateBrowsing) {
+          s->CloseInternal(lock);
+        }
+      }
+    }));
 }
 
 /* static */ RefPtr<MediaCache>
 MediaCache::GetMediaCache(int64_t aContentLength)
 {
   NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
 
   if (!sThreadInit) {
--- a/gfx/vr/gfxVROculus.cpp
+++ b/gfx/vr/gfxVROculus.cpp
@@ -385,24 +385,24 @@ VROculusSession::Refresh(bool aForceRefr
       // traversal.
       bInvisible = false;
 
       // While we are waiting for either the timeout or a new presentation,
       // fill the HMD with black / no layers.
       if (mSession && mTextureSet) {
         if (!aForceRefresh) {
           // VROculusSession didn't start submitting frames yet.
-          if (!mSubmitThread) {
+          // Or, the VR thread has been shut down already.
+          if (!mSubmitThread || !mSubmitThread->IsActive()) {
             return;
           }
           // ovr_SubmitFrame is running at VR Submit thread,
           // so we post this task to VR Submit thread and let it paint
           // a black frame.
           mDrawBlack = true;
-          MOZ_ASSERT(mSubmitThread->IsActive());
           mSubmitThread->PostTask(NewRunnableMethod<bool>(
             "gfx::VROculusSession::Refresh",
             this,
             &VROculusSession::Refresh, true));
           return;
         }
         ovrLayerEyeFov layer;
         memset(&layer, 0, sizeof(layer));
--- a/layout/generic/nsBlockFrame.cpp
+++ b/layout/generic/nsBlockFrame.cpp
@@ -447,26 +447,16 @@ nsBlockFrame::List(FILE* out, const char
 
 nsresult
 nsBlockFrame::GetFrameName(nsAString& aResult) const
 {
   return MakeFrameName(NS_LITERAL_STRING("Block"), aResult);
 }
 #endif
 
-#ifdef DEBUG
-nsFrameState
-nsBlockFrame::GetDebugStateBits() const
-{
-  // We don't want to include our cursor flag in the bits the
-  // regression tester looks at
-  return nsContainerFrame::GetDebugStateBits() & ~NS_BLOCK_HAS_LINE_CURSOR;
-}
-#endif
-
 void
 nsBlockFrame::InvalidateFrame(uint32_t aDisplayItemKey)
 {
   if (nsSVGUtils::IsInSVGTextSubtree(this)) {
     NS_ASSERTION(GetParent()->IsSVGTextFrame(),
                  "unexpected block frame in SVG text");
     GetParent()->InvalidateFrame();
     return;
--- a/layout/generic/nsBlockFrame.h
+++ b/layout/generic/nsBlockFrame.h
@@ -152,17 +152,16 @@ public:
   void InvalidateFrameWithRect(const nsRect& aRect, uint32_t aDisplayItemKey = 0) override;
 
 #ifdef DEBUG_FRAME_DUMP
   void List(FILE* out = stderr, const char* aPrefix = "", uint32_t aFlags = 0) const override;
   nsresult GetFrameName(nsAString& aResult) const override;
 #endif
 
 #ifdef DEBUG
-  nsFrameState GetDebugStateBits() const override;
   const char* LineReflowStatusToString(LineReflowStatus aLineReflowStatus) const;
 #endif
 
 #ifdef ACCESSIBILITY
   mozilla::a11y::AccType AccessibleType() override;
 #endif
 
   // Line cursor methods to speed up line searching in which one query
--- a/layout/generic/nsFrame.cpp
+++ b/layout/generic/nsFrame.cpp
@@ -7806,65 +7806,16 @@ nsIFrame::RootFrameList(nsPresContext* a
     nsIFrame* frame = shell->FrameManager()->GetRootFrame();
     if(frame) {
       frame->List(out, aPrefix);
     }
   }
 }
 #endif
 
-#ifdef DEBUG
-nsFrameState
-nsFrame::GetDebugStateBits() const
-{
-  // We'll ignore these flags for the purposes of comparing frame state:
-  //
-  //   NS_FRAME_EXTERNAL_REFERENCE
-  //     because this is set by the event state manager or the
-  //     caret code when a frame is focused. Depending on whether
-  //     or not the regression tests are run as the focused window
-  //     will make this value vary randomly.
-#define IRRELEVANT_FRAME_STATE_FLAGS NS_FRAME_EXTERNAL_REFERENCE
-
-#define FRAME_STATE_MASK (~(IRRELEVANT_FRAME_STATE_FLAGS))
-
-  return GetStateBits() & FRAME_STATE_MASK;
-}
-
-void
-nsFrame::XMLQuote(nsString& aString)
-{
-  int32_t i, len = aString.Length();
-  for (i = 0; i < len; i++) {
-    char16_t ch = aString.CharAt(i);
-    if (ch == '<') {
-      nsAutoString tmp(NS_LITERAL_STRING("&lt;"));
-      aString.Cut(i, 1);
-      aString.Insert(tmp, i);
-      len += 3;
-      i += 3;
-    }
-    else if (ch == '>') {
-      nsAutoString tmp(NS_LITERAL_STRING("&gt;"));
-      aString.Cut(i, 1);
-      aString.Insert(tmp, i);
-      len += 3;
-      i += 3;
-    }
-    else if (ch == '\"') {
-      nsAutoString tmp(NS_LITERAL_STRING("&quot;"));
-      aString.Cut(i, 1);
-      aString.Insert(tmp, i);
-      len += 5;
-      i += 5;
-    }
-  }
-}
-#endif
-
 bool
 nsIFrame::IsVisibleForPainting(nsDisplayListBuilder* aBuilder) {
   if (!StyleVisibility()->IsVisible())
     return false;
   nsISelection* sel = aBuilder->GetBoundingSelection();
   return !sel || IsVisibleInSelection(sel);
 }
 
@@ -11011,16 +10962,17 @@ nsIFrame::GetPseudoElement(CSSPseudoElem
 static bool
 IsFrameScrolledOutOfView(nsIFrame* aTarget,
                          const nsRect& aTargetRect,
                          nsIFrame* aParent)
 {
   nsIScrollableFrame* scrollableFrame =
     nsLayoutUtils::GetNearestScrollableFrame(aParent,
       nsLayoutUtils::SCROLLABLE_SAME_DOC |
+      nsLayoutUtils::SCROLLABLE_FIXEDPOS_FINDS_ROOT |
       nsLayoutUtils::SCROLLABLE_INCLUDE_HIDDEN);
   if (!scrollableFrame) {
     return false;
   }
 
   nsIFrame *scrollableParent = do_QueryFrame(scrollableFrame);
 
   nsRect transformedRect =
--- a/layout/generic/nsFrame.h
+++ b/layout/generic/nsFrame.h
@@ -489,18 +489,16 @@ public:
   void Trace(const char* aMethod, bool aEnter);
   void Trace(const char* aMethod, bool aEnter, const nsReflowStatus& aStatus);
   void TraceMsg(const char* fmt, ...) MOZ_FORMAT_PRINTF(2, 3);
 
   // Helper function that verifies that each frame in the list has the
   // NS_FRAME_IS_DIRTY bit set
   static void VerifyDirtyBitSet(const nsFrameList& aFrameList);
 
-  static void XMLQuote(nsString& aString);
-
   // Display Reflow Debugging
   static void* DisplayReflowEnter(nsPresContext*          aPresContext,
                                   nsIFrame*                aFrame,
                                   const ReflowInput& aReflowInput);
   static void* DisplayLayoutEnter(nsIFrame* aFrame);
   static void* DisplayIntrinsicISizeEnter(nsIFrame* aFrame,
                                           const char* aType);
   static void* DisplayIntrinsicSizeEnter(nsIFrame* aFrame,
@@ -723,21 +721,16 @@ public:
   // Helper function to return the index in parent of the frame's content
   // object. Returns -1 on error or if the frame doesn't have a content object
   static int32_t ContentIndexInContainer(const nsIFrame* aFrame);
 #endif
 
 #ifdef DEBUG
 public:
   /**
-   * Return the state bits that are relevant to regression tests (that
-   * is, those bits which indicate a real difference when they differ
-   */
-  nsFrameState GetDebugStateBits() const override;
-  /**
    * See if style tree verification is enabled. To enable style tree
    * verification add "styleverifytree:1" to your MOZ_LOG
    * environment variable (any non-zero debug level will work). Or,
    * call SetVerifyStyleTreeEnable with true.
    */
   static bool GetVerifyStyleTreeEnable();
 
   /**
--- a/layout/generic/nsIFrame.h
+++ b/layout/generic/nsIFrame.h
@@ -4545,21 +4545,16 @@ public:
    */
   static void RootFrameList(nsPresContext* aPresContext,
                             FILE* out = stderr, const char* aPrefix = "");
   virtual void DumpFrameTree() const;
   void DumpFrameTreeLimited() const;
 
   virtual nsresult  GetFrameName(nsAString& aResult) const = 0;
 #endif
-
-#ifdef DEBUG
-public:
-  virtual nsFrameState  GetDebugStateBits() const = 0;
-#endif
 };
 
 //----------------------------------------------------------------------
 
 /**
  * AutoWeakFrame can be used to keep a reference to a nsIFrame in a safe way.
  * Whenever an nsIFrame object is deleted, the AutoWeakFrames pointing
  * to it will be cleared.  AutoWeakFrame is for variables on the stack or
--- a/layout/generic/nsTextFrame.cpp
+++ b/layout/generic/nsTextFrame.cpp
@@ -10256,26 +10256,16 @@ nsTextFrame::List(FILE* out, const char*
 
   if (IsSelected()) {
     str += " SELECTED";
   }
   fprintf_stderr(out, "%s\n", str.get());
 }
 #endif
 
-#ifdef DEBUG
-nsFrameState
-nsTextFrame::GetDebugStateBits() const
-{
-  // mask out our emptystate flags; those are just caches
-  return nsFrame::GetDebugStateBits() &
-    ~(TEXT_WHITESPACE_FLAGS | TEXT_REFLOW_FLAGS);
-}
-#endif
-
 void
 nsTextFrame::AdjustOffsetsForBidi(int32_t aStart, int32_t aEnd)
 {
   AddStateBits(NS_FRAME_IS_BIDI);
   if (mContent->HasFlag(NS_HAS_FLOWLENGTH_PROPERTY)) {
     mContent->DeleteProperty(nsGkAtoms::flowlength);
     mContent->UnsetFlags(NS_HAS_FLOWLENGTH_PROPERTY);
   }
--- a/layout/generic/nsTextFrame.h
+++ b/layout/generic/nsTextFrame.h
@@ -167,20 +167,16 @@ public:
 #ifdef DEBUG_FRAME_DUMP
   void List(FILE* out = stderr,
             const char* aPrefix = "",
             uint32_t aFlags = 0) const override;
   nsresult GetFrameName(nsAString& aResult) const override;
   void ToCString(nsCString& aBuf, int32_t* aTotalContentLength) const;
 #endif
 
-#ifdef DEBUG
-  nsFrameState GetDebugStateBits() const override;
-#endif
-
   ContentOffsets CalcContentOffsetsFromFramePoint(const nsPoint& aPoint) override;
   ContentOffsets GetCharacterOffsetAtFramePoint(const nsPoint& aPoint);
 
   /**
    * This is called only on the primary text frame. It indicates that
    * the selection state of the given character range has changed.
    * Text in the range is unconditionally invalidated
    * (Selection::Repaint depends on this).
--- a/layout/moz.build
+++ b/layout/moz.build
@@ -32,12 +32,12 @@ if CONFIG['NS_PRINTING']:
     DIRS += ['printing']
 
 TEST_DIRS += [
     'tools/reftest',
 ]
 
 DIRS += ['build', 'media']
 
-if CONFIG['MOZ_DEBUG']:
+if CONFIG['MOZ_DEBUG'] and CONFIG['MOZ_WIDGET_TOOLKIT'] != 'android':
     TEST_DIRS += ['tools/layout-debug']
 
 CRASHTEST_MANIFESTS += ['../testing/crashtest/crashtests.list']
--- a/layout/style/ServoBindings.cpp
+++ b/layout/style/ServoBindings.cpp
@@ -1797,44 +1797,43 @@ Gecko_EnsureImageLayersLength(nsStyleIma
 
   aLayers->mLayers.EnsureLengthAtLeast(aLen);
 
   for (size_t i = oldLength; i < aLen; ++i) {
     aLayers->mLayers[i].Initialize(aLayerType);
   }
 }
 
+template <typename StyleType>
+static void
+EnsureStyleAutoArrayLength(StyleType* aArray, size_t aLen)
+{
+  size_t oldLength = aArray->Length();
+
+  aArray->EnsureLengthAtLeast(aLen);
+
+  for (size_t i = oldLength; i < aLen; ++i) {
+    (*aArray)[i].SetInitialValues();
+  }
+}
+
 void
 Gecko_EnsureStyleAnimationArrayLength(void* aArray, size_t aLen)
 {
   auto base =
     static_cast<nsStyleAutoArray<StyleAnimation>*>(aArray);
-
-  size_t oldLength = base->Length();
-
-  base->EnsureLengthAtLeast(aLen);
-
-  for (size_t i = oldLength; i < aLen; ++i) {
-    (*base)[i].SetInitialValues();
-  }
+  EnsureStyleAutoArrayLength(base, aLen);
 }
 
 void
 Gecko_EnsureStyleTransitionArrayLength(void* aArray, size_t aLen)
 {
   auto base =
     reinterpret_cast<nsStyleAutoArray<StyleTransition>*>(aArray);
-
-  size_t oldLength = base->Length();
-
-  base->EnsureLengthAtLeast(aLen);
-
-  for (size_t i = oldLength; i < aLen; ++i) {
-    (*base)[i].SetInitialValues();
-  }
+  EnsureStyleAutoArrayLength(base, aLen);
 }
 
 void
 Gecko_ClearWillChange(nsStyleDisplay* aDisplay, size_t aLength)
 {
   aDisplay->mWillChange.Clear();
   aDisplay->mWillChange.SetCapacity(aLength);
 }
--- a/layout/style/nsAnimationManager.cpp
+++ b/layout/style/nsAnimationManager.cpp
@@ -417,24 +417,24 @@ class MOZ_STACK_CLASS ServoCSSAnimationB
 public:
   explicit ServoCSSAnimationBuilder(const ServoStyleContext* aStyleContext)
     : mStyleContext(aStyleContext)
   {
     MOZ_ASSERT(aStyleContext);
   }
 
   bool BuildKeyframes(nsPresContext* aPresContext,
-                      const StyleAnimation& aSrc,
+                      nsAtom* aName,
+                      const nsTimingFunction& aTimingFunction,
                       nsTArray<Keyframe>& aKeyframes)
   {
     ServoStyleSet* styleSet = aPresContext->StyleSet()->AsServo();
     MOZ_ASSERT(styleSet);
-    const nsTimingFunction& timingFunction = aSrc.GetTimingFunction();
-    return styleSet->GetKeyframesForName(aSrc.GetName(),
-                                         timingFunction,
+    return styleSet->GetKeyframesForName(aName,
+                                         aTimingFunction,
                                          aKeyframes);
   }
   void SetKeyframes(KeyframeEffectReadOnly& aEffect,
                     nsTArray<Keyframe>&& aKeyframes)
   {
     aEffect.SetKeyframes(Move(aKeyframes), mStyleContext);
   }
 
@@ -488,29 +488,30 @@ public:
     : mStyleContext(aStyleContext)
     , mTarget(aTarget)
   {
     MOZ_ASSERT(aStyleContext);
     MOZ_ASSERT(aTarget.mElement);
   }
 
   bool BuildKeyframes(nsPresContext* aPresContext,
-                      const StyleAnimation& aSrc,
+                      nsAtom* aName,
+                      const nsTimingFunction& aTimingFunction,
                       nsTArray<Keyframe>& aKeyframs);
   void SetKeyframes(KeyframeEffectReadOnly& aEffect,
                     nsTArray<Keyframe>&& aKeyframes)
   {
     aEffect.SetKeyframes(Move(aKeyframes), mStyleContext);
   }
 
   void NotifyNewOrRemovedAnimation(const Animation&) {}
 
 private:
   nsTArray<Keyframe> BuildAnimationFrames(nsPresContext* aPresContext,
-                                          const StyleAnimation& aSrc,
+                                          const nsTimingFunction& aTimingFunction,
                                           const nsCSSKeyframesRule* aRule);
   Maybe<ComputedTimingFunction> GetKeyframeTimingFunction(
     nsPresContext* aPresContext,
     nsCSSKeyframeRule* aKeyframeRule,
     const Maybe<ComputedTimingFunction>& aInheritedTimingFunction);
   nsTArray<PropertyValuePair> GetKeyframePropertyValues(
     nsPresContext* aPresContext,
     nsCSSKeyframeRule* aKeyframeRule,
@@ -584,40 +585,47 @@ UpdateOldAnimationPropertiesWithNew(
 
 // Returns a new animation set up with given StyleAnimation.
 // Or returns an existing animation matching StyleAnimation's name updated
 // with the new StyleAnimation.
 template<class BuilderType>
 static already_AddRefed<CSSAnimation>
 BuildAnimation(nsPresContext* aPresContext,
                const NonOwningAnimationTarget& aTarget,
-               const StyleAnimation& aSrc,
+               const nsStyleDisplay& aStyleDisplay,
+               uint32_t animIdx,
                BuilderType& aBuilder,
                nsAnimationManager::CSSAnimationCollection* aCollection)
 {
   MOZ_ASSERT(aPresContext);
 
+  nsAtom* animationName = aStyleDisplay.GetAnimationName(animIdx);
   nsTArray<Keyframe> keyframes;
-  if (!aBuilder.BuildKeyframes(aPresContext, aSrc, keyframes)) {
+  if (!aBuilder.BuildKeyframes(aPresContext,
+                               animationName,
+                               aStyleDisplay.GetAnimationTimingFunction(animIdx),
+                               keyframes)) {
     return nullptr;
   }
 
-  TimingParams timing = TimingParamsFromCSSParams(aSrc.GetDuration(),
-                                                  aSrc.GetDelay(),
-                                                  aSrc.GetIterationCount(),
-                                                  aSrc.GetDirection(),
-                                                  aSrc.GetFillMode());
+  TimingParams timing =
+    TimingParamsFromCSSParams(aStyleDisplay.GetAnimationDuration(animIdx),
+                              aStyleDisplay.GetAnimationDelay(animIdx),
+                              aStyleDisplay.GetAnimationIterationCount(animIdx),
+                              aStyleDisplay.GetAnimationDirection(animIdx),
+                              aStyleDisplay.GetAnimationFillMode(animIdx));
 
   bool isStylePaused =
-    aSrc.GetPlayState() == NS_STYLE_ANIMATION_PLAY_STATE_PAUSED;
+    aStyleDisplay.GetAnimationPlayState(animIdx) ==
+      NS_STYLE_ANIMATION_PLAY_STATE_PAUSED;
 
   // Find the matching animation with animation name in the old list
   // of animations and remove the matched animation from the list.
   RefPtr<CSSAnimation> oldAnim =
-    PopExistingAnimation(aSrc.GetName(), aCollection);
+    PopExistingAnimation(animationName, aCollection);
 
   if (oldAnim) {
     // Copy over the start times and (if still paused) pause starts
     // for each animation (matching on name only) that was also in the
     // old list of animations.
     // This means that we honor dynamic changes, which isn't what the
     // spec says to do, but WebKit seems to honor at least some of
     // them.  See
@@ -637,18 +645,17 @@ BuildAnimation(nsPresContext* aPresConte
   KeyframeEffectParams effectOptions;
   RefPtr<KeyframeEffectReadOnly> effect =
     new KeyframeEffectReadOnly(aPresContext->Document(), target, timing,
                                effectOptions);
 
   aBuilder.SetKeyframes(*effect, Move(keyframes));
 
   RefPtr<CSSAnimation> animation =
-    new CSSAnimation(aPresContext->Document()->GetScopeObject(),
-                     aSrc.GetName());
+    new CSSAnimation(aPresContext->Document()->GetScopeObject(), animationName);
   animation->SetOwningElement(
     OwningElementRef(*aTarget.mElement, aTarget.mPseudoType));
 
   animation->SetTimelineNoUpdate(aTarget.mElement->OwnerDoc()->Timeline());
   animation->SetEffectNoUpdate(effect);
 
   if (isStylePaused) {
     animation->PauseFromStyle();
@@ -658,37 +665,39 @@ BuildAnimation(nsPresContext* aPresConte
 
   aBuilder.NotifyNewOrRemovedAnimation(*animation);
 
   return animation.forget();
 }
 
 bool
 GeckoCSSAnimationBuilder::BuildKeyframes(nsPresContext* aPresContext,
-                                         const StyleAnimation& aSrc,
+                                         nsAtom* aName,
+                                         const nsTimingFunction& aTimingFunction,
                                          nsTArray<Keyframe>& aKeyframes)
 {
   MOZ_ASSERT(aPresContext);
   MOZ_ASSERT(aPresContext->StyleSet()->IsGecko());
 
   nsCSSKeyframesRule* rule =
-    aPresContext->StyleSet()->AsGecko()->KeyframesRuleForName(aSrc.GetName());
+    aPresContext->StyleSet()->AsGecko()->KeyframesRuleForName(aName);
   if (!rule) {
     return false;
   }
 
-  aKeyframes = BuildAnimationFrames(aPresContext, aSrc, rule);
+  aKeyframes = BuildAnimationFrames(aPresContext, aTimingFunction, rule);
 
   return true;
 }
 
 nsTArray<Keyframe>
-GeckoCSSAnimationBuilder::BuildAnimationFrames(nsPresContext* aPresContext,
-                                               const StyleAnimation& aSrc,
-                                               const nsCSSKeyframesRule* aRule)
+GeckoCSSAnimationBuilder::BuildAnimationFrames(
+  nsPresContext* aPresContext,
+  const nsTimingFunction& aTimingFunction,
+  const nsCSSKeyframesRule* aRule)
 {
   // Ideally we'd like to build up a set of Keyframe objects that more-or-less
   // reflect the keyframes as-specified in the @keyframes rule(s) so that
   // authors get something intuitive when they call anim.effect.getKeyframes().
   //
   // That, however, proves to be difficult because the way CSS declarations are
   // processed differs from how we are able to represent keyframes as
   // JavaScript objects in the Web Animations API.
@@ -728,17 +737,17 @@ GeckoCSSAnimationBuilder::BuildAnimation
   // for that matter) we resolve values on @keyframes down to computed values
   // (thereby expanding shorthands and variable references) and then pick up the
   // last value for each longhand property at each offset.
 
   // FIXME: There is a pending spec change to make multiple @keyframes
   // rules with the same name cascade but we don't support that yet.
 
   Maybe<ComputedTimingFunction> inheritedTimingFunction =
-    ConvertTimingFunction(aSrc.GetTimingFunction());
+    ConvertTimingFunction(aTimingFunction);
 
   // First, make up Keyframe objects for each rule
   nsTArray<Keyframe> keyframes;
   nsCSSPropertyIDSet animatedProperties;
 
   for (auto ruleIdx = 0, ruleEnd = aRule->StyleRuleCount();
        ruleIdx != ruleEnd; ++ruleIdx) {
     css::Rule* cssRule = aRule->GetStyleRuleAt(ruleIdx);
@@ -1004,38 +1013,36 @@ GeckoCSSAnimationBuilder::FillInMissingK
     }
   }
 }
 
 template<class BuilderType>
 static nsAnimationManager::OwningCSSAnimationPtrArray
 BuildAnimations(nsPresContext* aPresContext,
                 const NonOwningAnimationTarget& aTarget,
-                const nsStyleAutoArray<StyleAnimation>& aStyleAnimations,
-                uint32_t aStyleAnimationNameCount,
+                const nsStyleDisplay& aStyleDisplay,
                 BuilderType& aBuilder,
                 nsAnimationManager::CSSAnimationCollection* aCollection)
 {
   nsAnimationManager::OwningCSSAnimationPtrArray result;
 
-  for (size_t animIdx = aStyleAnimationNameCount; animIdx-- != 0;) {
-    const StyleAnimation& src = aStyleAnimations[animIdx];
-
+  for (size_t animIdx = aStyleDisplay.mAnimationNameCount; animIdx-- != 0;) {
     // CSS Animations whose animation-name does not match a @keyframes rule do
     // not generate animation events. This includes when the animation-name is
     // "none" which is represented by an empty name in the StyleAnimation.
     // Since such animations neither affect style nor dispatch events, we do
     // not generate a corresponding CSSAnimation for them.
-    if (src.GetName() == nsGkAtoms::_empty) {
+    if (aStyleDisplay.GetAnimationName(animIdx) == nsGkAtoms::_empty) {
       continue;
     }
 
     RefPtr<CSSAnimation> dest = BuildAnimation(aPresContext,
                                                aTarget,
-                                               src,
+                                               aStyleDisplay,
+                                               animIdx,
                                                aBuilder,
                                                aCollection);
     if (!dest) {
       continue;
     }
 
     dest->SetAnimationIndex(static_cast<uint64_t>(animIdx));
     result.AppendElement(dest);
@@ -1121,18 +1128,17 @@ nsAnimationManager::DoUpdateAnimations(
 
   nsAutoAnimationMutationBatch mb(aTarget.mElement->OwnerDoc());
 
   // Build the updated animations list, extracting matching animations from
   // the existing collection as we go.
   OwningCSSAnimationPtrArray newAnimations;
   newAnimations = BuildAnimations(mPresContext,
                                   aTarget,
-                                  aStyleDisplay.mAnimations,
-                                  aStyleDisplay.mAnimationNameCount,
+                                  aStyleDisplay,
                                   aBuilder,
                                   collection);
 
   if (newAnimations.IsEmpty()) {
     if (collection) {
       collection->Destroy();
     }
     return;
--- a/layout/style/nsStyleStruct.h
+++ b/layout/style/nsStyleStruct.h
@@ -2255,21 +2255,16 @@ struct StyleTransition
   // Delay and Duration are in milliseconds
 
   const nsTimingFunction& GetTimingFunction() const { return mTimingFunction; }
   float GetDelay() const { return mDelay; }
   float GetDuration() const { return mDuration; }
   nsCSSPropertyID GetProperty() const { return mProperty; }
   nsAtom* GetUnknownProperty() const { return mUnknownProperty; }
 
-  float GetCombinedDuration() const {
-    // http://dev.w3.org/csswg/css-transitions/#combined-duration
-    return std::max(mDuration, 0.0f) + mDelay;
-  }
-
   void SetTimingFunction(const nsTimingFunction& aTimingFunction)
     { mTimingFunction = aTimingFunction; }
   void SetDelay(float aDelay) { mDelay = aDelay; }
   void SetDuration(float aDuration) { mDuration = aDuration; }
   void SetProperty(nsCSSPropertyID aProperty)
     {
       NS_ASSERTION(aProperty != eCSSProperty_UNKNOWN &&
                    aProperty != eCSSPropertyExtra_variable,
@@ -2605,29 +2600,86 @@ struct MOZ_NEEDS_MEMMOVABLE_MEMBERS nsSt
 
   // The number of elements in mTransitions that are not from repeating
   // a list due to another property being longer.
   uint32_t mTransitionTimingFunctionCount,
            mTransitionDurationCount,
            mTransitionDelayCount,
            mTransitionPropertyCount;
 
+  nsCSSPropertyID GetTransitionProperty(uint32_t aIndex) const
+  {
+    return mTransitions[aIndex % mTransitionPropertyCount].GetProperty();
+  }
+  float GetTransitionDelay(uint32_t aIndex) const
+  {
+    return mTransitions[aIndex % mTransitionDelayCount].GetDelay();
+  }
+  float GetTransitionDuration(uint32_t aIndex) const
+  {
+    return mTransitions[aIndex % mTransitionDurationCount].GetDuration();
+  }
+  const nsTimingFunction& GetTransitionTimingFunction(uint32_t aIndex) const
+  {
+    return mTransitions[aIndex % mTransitionTimingFunctionCount].GetTimingFunction();
+  }
+  float GetTransitionCombinedDuration(uint32_t aIndex) const
+  {
+    // https://drafts.csswg.org/css-transitions/#transition-combined-duration
+    return
+      std::max(mTransitions[aIndex % mTransitionDurationCount].GetDuration(),
+               0.0f)
+        + mTransitions[aIndex % mTransitionDelayCount].GetDelay();
+  }
+
   nsStyleAutoArray<mozilla::StyleAnimation> mAnimations; // [reset]
 
   // The number of elements in mAnimations that are not from repeating
   // a list due to another property being longer.
   uint32_t mAnimationTimingFunctionCount,
            mAnimationDurationCount,
            mAnimationDelayCount,
            mAnimationNameCount,
            mAnimationDirectionCount,
            mAnimationFillModeCount,
            mAnimationPlayStateCount,
            mAnimationIterationCountCount;
 
+  nsAtom* GetAnimationName(uint32_t aIndex) const
+  {
+    return mAnimations[aIndex % mAnimationNameCount].GetName();
+  }
+  float GetAnimationDelay(uint32_t aIndex) const
+  {
+    return mAnimations[aIndex % mAnimationDelayCount].GetDelay();
+  }
+  float GetAnimationDuration(uint32_t aIndex) const
+  {
+    return mAnimations[aIndex % mAnimationDurationCount].GetDuration();
+  }
+  mozilla::dom::PlaybackDirection GetAnimationDirection(uint32_t aIndex) const
+  {
+    return mAnimations[aIndex % mAnimationDirectionCount].GetDirection();
+  }
+  mozilla::dom::FillMode GetAnimationFillMode(uint32_t aIndex) const
+  {
+    return mAnimations[aIndex % mAnimationFillModeCount].GetFillMode();
+  }
+  uint8_t GetAnimationPlayState(uint32_t aIndex) const
+  {
+    return mAnimations[aIndex % mAnimationPlayStateCount].GetPlayState();
+  }
+  float GetAnimationIterationCount(uint32_t aIndex) const
+  {
+    return mAnimations[aIndex % mAnimationIterationCountCount].GetIterationCount();
+  }
+  const nsTimingFunction& GetAnimationTimingFunction(uint32_t aIndex) const
+  {
+    return mAnimations[aIndex % mAnimationTimingFunctionCount].GetTimingFunction();
+  }
 
   // The threshold used for extracting a shape from shape-outside: <image>.
   float mShapeImageThreshold = 0.0f; // [reset]
 
   mozilla::StyleShapeSource mShapeOutside; // [reset]
 
   bool IsBlockInsideStyle() const {
     return mozilla::StyleDisplay::Block == mDisplay ||
--- a/layout/style/nsTransitionManager.cpp
+++ b/layout/style/nsTransitionManager.cpp
@@ -517,17 +517,17 @@ nsTransitionManager::StyleContextChanged
   }
 
   // NOTE: Things in this function (and ConsiderInitiatingTransition)
   // should never call PeekStyleData because we don't preserve gotten
   // structs across reframes.
 
   // Return sooner (before the startedAny check below) for the most
   // common case: no transitions specified or running.
-  const nsStyleDisplay *disp = newStyleContext->StyleDisplay();
+  const nsStyleDisplay* disp = newStyleContext->StyleDisplay();
   CSSPseudoElementType pseudoType = newStyleContext->GetPseudoType();
   if (pseudoType != CSSPseudoElementType::NotPseudo) {
     if (pseudoType != CSSPseudoElementType::before &&
         pseudoType != CSSPseudoElementType::after) {
       return;
     }
 
     NS_ASSERTION((pseudoType == CSSPseudoElementType::before &&
@@ -540,17 +540,17 @@ nsTransitionManager::StyleContextChanged
     // :before or :after is attached to.
     aElement = aElement->GetParent()->AsElement();
   }
 
   CSSTransitionCollection* collection =
     CSSTransitionCollection::GetAnimationCollection(aElement, pseudoType);
   if (!collection &&
       disp->mTransitionPropertyCount == 1 &&
-      disp->mTransitions[0].GetCombinedDuration() <= 0.0f) {
+      disp->GetTransitionCombinedDuration(0) <= 0.0f) {
     return;
   }
 
   MOZ_ASSERT(mPresContext->RestyleManager()->IsGecko(),
              "ServoRestyleManager should not use nsTransitionManager "
              "for transitions");
   if (collection &&
       collection->mCheckGeneration ==
@@ -593,17 +593,17 @@ nsTransitionManager::StyleContextChanged
   }
 
   nsAutoAnimationMutationBatch mb(aElement->OwnerDoc());
 
   DebugOnly<bool> startedAny = false;
   // We don't have to update transitions if display:none, although we will
   // cancel them after restyling.
   if (!afterChangeStyle->IsInDisplayNoneSubtree()) {
-    startedAny = DoUpdateTransitions(disp,
+    startedAny = DoUpdateTransitions(*disp,
                                      aElement,
                                      afterChangeStyle->GetPseudoType(),
                                      collection,
                                      aOldStyleContext->AsGecko(),
                                      afterChangeStyle->AsGecko());
   }
 
   MOZ_ASSERT(!startedAny || collection,
@@ -642,79 +642,77 @@ nsTransitionManager::UpdateTransitions(
 {
   if (!mPresContext->IsDynamic()) {
     // For print or print preview, ignore transitions.
     return false;
   }
 
   CSSTransitionCollection* collection =
     CSSTransitionCollection::GetAnimationCollection(aElement, aPseudoType);
-  const nsStyleDisplay *disp =
+  const nsStyleDisplay* disp =
       aNewStyle->ComputedData()->GetStyleDisplay();
-  return DoUpdateTransitions(disp,
+  return DoUpdateTransitions(*disp,
                              aElement, aPseudoType,
                              collection,
                              aOldStyle, aNewStyle);
 }
 
 template<typename StyleType>
 bool
 nsTransitionManager::DoUpdateTransitions(
-  const nsStyleDisplay* aDisp,
+  const nsStyleDisplay& aDisp,
   dom::Element* aElement,
   CSSPseudoElementType aPseudoType,
   CSSTransitionCollection*& aElementTransitions,
   StyleType aOldStyle,
   StyleType aNewStyle)
 {
-  MOZ_ASSERT(aDisp, "Null nsStyleDisplay");
   MOZ_ASSERT(!aElementTransitions ||
              aElementTransitions->mElement == aElement, "Element mismatch");
 
   // Per http://lists.w3.org/Archives/Public/www-style/2009Aug/0109.html
   // I'll consider only the transitions from the number of items in
   // 'transition-property' on down, and later ones will override earlier
   // ones (tracked using |whichStarted|).
   bool startedAny = false;
   nsCSSPropertyIDSet whichStarted;
-  for (uint32_t i = aDisp->mTransitionPropertyCount; i-- != 0; ) {
-    const StyleTransition& t = aDisp->mTransitions[i];
+  for (uint32_t i = aDisp.mTransitionPropertyCount; i-- != 0; ) {
     // Check the combined duration (combination of delay and duration)
     // first, since it defaults to zero, which means we can ignore the
     // transition.
-    if (t.GetCombinedDuration() > 0.0f) {
+    if (aDisp.GetTransitionCombinedDuration(i) > 0.0f) {
       // We might have something to transition.  See if any of the
       // properties in question changed and are animatable.
       // FIXME: Would be good to find a way to share code between this
       // interpretation of transition-property and the one below.
-      nsCSSPropertyID property = t.GetProperty();
+      nsCSSPropertyID property = aDisp.GetTransitionProperty(i);
       if (property == eCSSPropertyExtra_no_properties ||
           property == eCSSPropertyExtra_variable ||
           property == eCSSProperty_UNKNOWN) {
         // Nothing to do, but need to exclude this from cases below.
       } else if (property == eCSSPropertyExtra_all_properties) {
         for (nsCSSPropertyID p = nsCSSPropertyID(0);
              p < eCSSProperty_COUNT_no_shorthands;
              p = nsCSSPropertyID(p + 1)) {
-          ConsiderInitiatingTransition(p, t, aElement, aPseudoType,
+          ConsiderInitiatingTransition(p, aDisp, i, aElement, aPseudoType,
                                        aElementTransitions,
                                        aOldStyle, aNewStyle,
                                        &startedAny, &whichStarted);
         }
       } else if (nsCSSProps::IsShorthand(property)) {
         CSSPROPS_FOR_SHORTHAND_SUBPROPERTIES(subprop, property,
                                              CSSEnabledState::eForAllContent)
         {
-          ConsiderInitiatingTransition(*subprop, t, aElement, aPseudoType,
+          ConsiderInitiatingTransition(*subprop, aDisp, i, aElement, aPseudoType,
                                        aElementTransitions,
                                        aOldStyle, aNewStyle,
                                        &startedAny, &whichStarted);
         }
       } else {
-        ConsiderInitiatingTransition(property, t, aElement, aPseudoType,
+        ConsiderInitiatingTransition(property, aDisp, i, aElement, aPseudoType,
                                      aElementTransitions,
                                      aOldStyle, aNewStyle,
                                      &startedAny, &whichStarted);
       }
     }
   }
 
   // Stop any transitions for properties that are no longer in
@@ -723,24 +721,23 @@ nsTransitionManager::DoUpdateTransitions
   // for properties that just changed (and are still in the set of
   // properties to transition), but for which we didn't just start the
   // transition.  This can happen delay and duration are both zero, or
   // because the new value is not interpolable.
   // Note that we also do the latter set of work in
   // nsTransitionManager::PruneCompletedTransitions.
   if (aElementTransitions) {
     bool checkProperties =
-      aDisp->mTransitions[0].GetProperty() != eCSSPropertyExtra_all_properties;
+      aDisp.GetTransitionProperty(0) != eCSSPropertyExtra_all_properties;
     nsCSSPropertyIDSet allTransitionProperties;
     if (checkProperties) {
-      for (uint32_t i = aDisp->mTransitionPropertyCount; i-- != 0; ) {
-        const StyleTransition& t = aDisp->mTransitions[i];
+      for (uint32_t i = aDisp.mTransitionPropertyCount; i-- != 0; ) {
         // FIXME: Would be good to find a way to share code between this
         // interpretation of transition-property and the one above.
-        nsCSSPropertyID property = t.GetProperty();
+        nsCSSPropertyID property = aDisp.GetTransitionProperty(i);
         if (property == eCSSPropertyExtra_no_properties ||
             property == eCSSPropertyExtra_variable ||
             property == eCSSProperty_UNKNOWN) {
           // Nothing to do, but need to exclude this from cases below.
         } else if (property == eCSSPropertyExtra_all_properties) {
           for (nsCSSPropertyID p = nsCSSPropertyID(0);
                p < eCSSProperty_COUNT_no_shorthands;
                p = nsCSSPropertyID(p + 1)) {
@@ -854,17 +851,18 @@ IsTransitionable(nsCSSPropertyID aProper
   // FIXME: This should also exclude discretely-animated properties.
   return nsCSSProps::kAnimTypeTable[aProperty] != eStyleAnimType_None;
 }
 
 template<typename StyleType>
 void
 nsTransitionManager::ConsiderInitiatingTransition(
   nsCSSPropertyID aProperty,
-  const StyleTransition& aTransition,
+  const nsStyleDisplay& aStyleDisplay,
+  uint32_t transitionIdx,
   dom::Element* aElement,
   CSSPseudoElementType aPseudoType,
   CSSTransitionCollection*& aElementTransitions,
   StyleType aOldStyle,
   StyleType aNewStyle,
   bool* aStartedAny,
   nsCSSPropertyIDSet* aWhichStarted)
 {
@@ -967,19 +965,20 @@ nsTransitionManager::ConsiderInitiatingT
         // |aElementTransitions| is now a dangling pointer!
         aElementTransitions = nullptr;
       }
       // GetAnimationRule already called RestyleForAnimation.
     }
     return;
   }
 
-  const nsTimingFunction &tf = aTransition.GetTimingFunction();
-  float delay = aTransition.GetDelay();
-  float duration = aTransition.GetDuration();
+  const nsTimingFunction &tf =
+    aStyleDisplay.GetTransitionTimingFunction(transitionIdx);
+  float delay = aStyleDisplay.GetTransitionDelay(transitionIdx);
+  float duration = aStyleDisplay.GetTransitionDuration(transitionIdx);
   if (duration < 0.0) {
     // The spec says a negative duration is treated as zero.
     duration = 0.0;
   }
 
   AnimationValue startForReversingTest = startValue;
   double reversePortion = 1.0;
 
--- a/layout/style/nsTransitionManager.h
+++ b/layout/style/nsTransitionManager.h
@@ -425,26 +425,27 @@ protected:
     OwningCSSTransitionPtrArray;
 
   // Update transitions. This will start new transitions,
   // replace existing transitions, and stop existing transitions
   // as needed. aDisp and aElement must be non-null.
   // aElementTransitions is the collection of current transitions, and it
   // could be a nullptr if we don't have any transitions.
   template<typename StyleType> bool
-  DoUpdateTransitions(const nsStyleDisplay* aDisp,
+  DoUpdateTransitions(const nsStyleDisplay& aDisp,
                       mozilla::dom::Element* aElement,
                       mozilla::CSSPseudoElementType aPseudoType,
                       CSSTransitionCollection*& aElementTransitions,
                       StyleType aOldStyle,
                       StyleType aNewStyle);
 
   template<typename StyleType> void
   ConsiderInitiatingTransition(nsCSSPropertyID aProperty,
-                               const mozilla::StyleTransition& aTransition,
+                               const nsStyleDisplay& aStyleDisplay,
+                               uint32_t transitionIdx,
                                mozilla::dom::Element* aElement,
                                mozilla::CSSPseudoElementType aPseudoType,
                                CSSTransitionCollection*& aElementTransitions,
                                StyleType aOldStyle,
                                StyleType aNewStyle,
                                bool* aStartedAny,
                                nsCSSPropertyIDSet* aWhichStarted);
 
--- a/mobile/android/app/build.gradle
+++ b/mobile/android/app/build.gradle
@@ -22,16 +22,22 @@ android {
         testApplicationId 'org.mozilla.roboexample.test'
         testInstrumentationRunner 'org.mozilla.gecko.FennecInstrumentationTestRunner'
         // Used by Robolectric based tests; see TestRunner.
         buildConfigField 'String', 'BUILD_DIR', "\"${project.buildDir}\""
 
         vectorDrawables.useSupportLibrary = true
     }
 
+    aaptOptions {
+        // The omnijar is already a compressed file itself and Gecko expects it to be
+        // STORED within the APK rather than DEFLATED.
+        noCompress 'ja'
+    }
+
     compileOptions {
         sourceCompatibility JavaVersion.VERSION_1_7
         targetCompatibility JavaVersion.VERSION_1_7
     }
 
     dexOptions {
         javaMaxHeapSize "2g"
         jumboMode = true
--- a/mobile/android/installer/package-manifest.in
+++ b/mobile/android/installer/package-manifest.in
@@ -169,19 +169,16 @@
 @BINPATH@/components/imglib2.xpt
 @BINPATH@/components/inspector.xpt
 @BINPATH@/components/intl.xpt
 @BINPATH@/components/jar.xpt
 @BINPATH@/components/jsdebugger.xpt
 @BINPATH@/components/jsdownloads.xpt
 @BINPATH@/components/jsinspector.xpt
 @BINPATH@/components/layout_base.xpt
-#ifdef MOZ_DEBUG
-@BINPATH@/components/layout_debug.xpt
-#endif
 #ifdef NS_PRINTING
 @BINPATH@/components/layout_printing.xpt
 #endif
 @BINPATH@/components/layout_xul_tree.xpt
 @BINPATH@/components/layout_xul.xpt
 @BINPATH@/components/locale.xpt
 @BINPATH@/components/mimetype.xpt
 @BINPATH@/components/mozfind.xpt
--- a/mobile/locales/search/list.json
+++ b/mobile/locales/search/list.json
@@ -176,26 +176,16 @@
       }
     },
     "en-US": {
       "default": {
         "visibleDefaultEngines": [
           "google", "yahoo", "bing", "amazondotcom", "ddg", "twitter", "wikipedia"
         ]
       },
-      "US": {
-        "visibleDefaultEngines": [
-          "yahoo", "google-nocodes", "bing", "amazondotcom", "ddg", "twitter", "wikipedia"
-        ]
-      },
-      "CA": {
-        "visibleDefaultEngines": [
-          "google-nocodes", "yahoo", "bing", "amazondotcom", "ddg", "twitter", "wikipedia"
-        ]
-      },
       "experimental-hidden": {
         "visibleDefaultEngines": [
           "amazon-ca", "amazon-au", "google-2018", "duckduckgo"
         ]
       }
     },
     "en-ZA": {
       "default": {
--- a/services/sync/Weave.js
+++ b/services/sync/Weave.js
@@ -9,18 +9,17 @@ const Cu = Components.utils;
 Cu.import("resource://gre/modules/XPCOMUtils.jsm");
 Cu.import("resource://gre/modules/Services.jsm");
 XPCOMUtils.defineLazyModuleGetter(this, "FileUtils",
                                   "resource://gre/modules/FileUtils.jsm");
 XPCOMUtils.defineLazyGetter(this, "Utils", () => {
   return Cu.import("resource://services-sync/util.js", {}).Utils;
 });
 
-const SYNC_PREFS_BRANCH = "services.sync.";
-
+XPCOMUtils.defineLazyPreferenceGetter(this, "syncUsername", "services.sync.username");
 
 /**
  * Sync's XPCOM service.
  *
  * It is named "Weave" for historical reasons.
  *
  * It's worth noting how Sync is lazily loaded. We register a timer that
  * loads Sync a few seconds after app startup. This is so Sync does not
@@ -123,18 +122,17 @@ WeaveService.prototype = {
    * Whether Sync appears to be enabled.
    *
    * This returns true if we have an associated FxA account
    *
    * It does *not* perform a robust check to see if the client is working.
    * For that, you'll want to check Weave.Status.checkSetup().
    */
   get enabled() {
-    let prefs = Services.prefs.getBranch(SYNC_PREFS_BRANCH);
-    return prefs.prefHasUserValue("username");
+    return !!syncUsername;
   }
 };
 
 function AboutWeaveLog() {}
 AboutWeaveLog.prototype = {
   classID: Components.ID("{d28f8a0b-95da-48f4-b712-caf37097be41}"),
 
   QueryInterface: XPCOMUtils.generateQI([Ci.nsIAboutModule,
--- a/services/sync/tests/tps/all_tests.json
+++ b/services/sync/tests/tps/all_tests.json
@@ -16,17 +16,16 @@
     "test_bug575423.js",
     "test_bug546807.js",
     "test_history_collision.js",
     "test_privbrw_passwords.js",
     "test_privbrw_tabs.js",
     "test_bookmarks_in_same_named_folder.js",
     "test_client_wipe.js",
     "test_special_tabs.js",
-    "test_addon_sanity.js",
     "test_addon_restartless_xpi.js",
     "test_addon_nonrestartless_xpi.js",
     "test_addon_reconciling.js",
     "test_addon_wipe.js",
     "test_existing_bookmarks.js"
   ]
 }
 
--- a/services/sync/tests/tps/test_addon_nonrestartless_xpi.js
+++ b/services/sync/tests/tps/test_addon_nonrestartless_xpi.js
@@ -25,29 +25,33 @@ var phases = {
   "phase16": "profile2"
 };
 
 const id = "unsigned-xpi@tests.mozilla.org";
 
 Phase("phase01", [
   [Addons.verifyNot, [id]],
   [Addons.install, [id]],
-  [Sync]
+  [Sync],
+  [Addons.skipValidation] // Validation disabled due to bug 1427835
 ]);
 Phase("phase02", [
   [Addons.verify, [id], STATE_ENABLED],
-  [Sync]
+  [Sync],
+  [Addons.skipValidation] // Validation disabled due to bug 1427835
 ]);
 Phase("phase03", [
   [Addons.verifyNot, [id]],
-  [Sync]
+  [Sync],
+  [Addons.skipValidation] // Validation disabled due to bug 1427835
 ]);
 Phase("phase04", [
   [Addons.verify, [id], STATE_ENABLED],
-  [Sync]
+  [Sync],
+  [Addons.skipValidation] // Validation disabled due to bug 1427835
 ]);
 
 // Now we disable the add-on
 Phase("phase05", [
   [EnsureTracking],
   [Addons.setEnabled, [id], STATE_DISABLED],
   [Sync],
   [Addons.skipValidation] // Validation disabled due to bug 1380472
@@ -67,43 +71,51 @@ Phase("phase08", [
   [Sync],
   [Addons.skipValidation] // Validation disabled due to bug 1380472
 ]);
 
 // Now we re-enable it again.
 Phase("phase09", [
   [EnsureTracking],
   [Addons.setEnabled, [id], STATE_ENABLED],
-  [Sync]
+  [Sync],
+  [Addons.skipValidation] // Validation disabled due to bug 1427835
 ]);
 Phase("phase10", [
   [Addons.verify, [id], STATE_ENABLED],
-  [Sync]
+  [Sync],
+  [Addons.skipValidation] // Validation disabled due to bug 1427835
 ]);
 Phase("phase11", [
   [Addons.verify, [id], STATE_DISABLED],
-  [Sync]
+  [Sync],
+  [Addons.skipValidation] // Validation disabled due to bug 1427835
 ]);
 Phase("phase12", [
   [Addons.verify, [id], STATE_ENABLED],
-  [Sync]
+  [Sync],
+  [Addons.skipValidation] // Validation disabled due to bug 1427835
 ]);
 
 // And we uninstall it
 
 Phase("phase13", [
   [EnsureTracking],
   [Addons.verify, [id], STATE_ENABLED],
   [Addons.uninstall, [id]],
-  [Sync]
+  [Sync],
+  [Addons.skipValidation] // Validation disabled due to bug 1427835
 ]);
 Phase("phase14", [
   [Addons.verifyNot, [id]],
-  [Sync]
+  [Sync],
+  [Addons.skipValidation] // Validation disabled due to bug 1427835
 ]);
 Phase("phase15", [
   [Addons.verify, [id], STATE_ENABLED],
-  [Sync]
+  [Sync],
+  [Addons.skipValidation] // Validation disabled due to bug 1427835
 ]);
 Phase("phase16", [
   [Addons.verifyNot, [id]],
-  [Sync]
+  [Sync],
+  [Addons.skipValidation] // Validation disabled due to bug 1427835
 ]);
deleted file mode 100644
--- a/services/sync/tests/tps/test_addon_sanity.js
+++ /dev/null
@@ -1,30 +0,0 @@
-/* Any copyright is dedicated to the Public Domain.
-   http://creativecommons.org/publicdomain/zero/1.0/ */
-
-/*
- * The list of phases mapped to their corresponding profiles.  The object
- * here must be in strict JSON format, as it will get parsed by the Python
- * testrunner (no single quotes, extra comma's, etc).
- */
-
-EnableEngines(["addons"]);
-
-var phases = { "phase1": "profile1",
-               "phase2": "profile1" };
-
-const id = "unsigned-xpi@tests.mozilla.org";
-
-Phase("phase1", [
-  [Addons.install, [id]],
-  // Non-restartless add-on shouldn't be found after install.
-  [Addons.verifyNot, [id]],
-
-  // But it should be marked for Sync.
-  [Sync]
-]);
-
-Phase("phase2", [
-  // Add-on should be present after restart
-  [Addons.verify, [id], STATE_ENABLED],
-  [Sync] // Sync to ensure everything is initialized enough for the addon validator to run
-]);
--- a/servo/.taskcluster.yml
+++ b/servo/.taskcluster.yml
@@ -6,29 +6,32 @@ metadata:
     and embedded use.
   owner: '{{ event.head.user.email }}'
   source: '{{ event.head.repo.url }}'
 tasks:
   - provisionerId: '{{ taskcluster.docker.provisionerId }}'
     workerType: '{{ taskcluster.docker.workerType }}'
     extra:
       github:
-        events: []
+        events:
+          - pull_request.opened
+          - pull_request.reopened
+          - pull_request.synchronize
     payload:
       maxRunTime: 3600
-      image: servobrowser/servo-linux-dev
+      image: 'servobrowser/servo-linux-dev:servo-linux-build-deps-2017-06-30'
       command:
         - /bin/bash
         - '--login'
         - '-c'
         - '-x'
         - >-
           git clone {{event.head.repo.url}} servo &&
           cd servo &&
           git config advice.detachedHead false &&
           git checkout {{event.head.sha}} &&
           etc/ci/taskcluster-test.sh
     metadata:
       name: linux-tests
       description: Run Linux tests.
       owner: '{{ event.head.user.email }}'
       source: '{{ event.head.repo.url }}'
-
+allowPullRequests: public
--- a/servo/components/gfx/platform/freetype/android/font_list.rs
+++ b/servo/components/gfx/platform/freetype/android/font_list.rs
@@ -198,17 +198,17 @@ impl FontList {
             aliases: aliases
         })
     }
 
     // Fonts expected to exist in Android devices.
     // Only used in the unlikely case where no font xml mapping files are found.
     fn fallback_font_families() -> Vec<FontFamily> {
         let alternatives = [
-            ("san-serif", "Roboto-Regular.ttf"),
+            ("sans-serif", "Roboto-Regular.ttf"),
             ("Droid Sans", "DroidSans.ttf"),
         ];
 
         alternatives.iter().filter(|item| {
             Path::new(&Self::font_absolute_path(item.1)).exists()
         }).map(|item| {
             FontFamily {
                 name: item.0.into(),
--- a/servo/components/style/properties/gecko.mako.rs
+++ b/servo/components/style/properties/gecko.mako.rs
@@ -2943,18 +2943,18 @@ fn static_assert() {
 <%def name="impl_copy_animation_or_transition_value(type, ident, gecko_ffi_name)">
     #[allow(non_snake_case)]
     pub fn copy_${type}_${ident}_from(&mut self, other: &Self) {
         self.gecko.m${type.capitalize()}s.ensure_len(other.gecko.m${type.capitalize()}s.len());
 
         let count = other.gecko.m${type.capitalize()}${gecko_ffi_name}Count;
         self.gecko.m${type.capitalize()}${gecko_ffi_name}Count = count;
 
-        let iter = self.gecko.m${type.capitalize()}s.iter_mut().zip(
-            other.gecko.m${type.capitalize()}s.iter().take(count as usize).cycle()
+        let iter = self.gecko.m${type.capitalize()}s.iter_mut().take(count as usize).zip(
+            other.gecko.m${type.capitalize()}s.iter()
         );
 
         for (ours, others) in iter {
             ours.m${gecko_ffi_name} = others.m${gecko_ffi_name};
         }
     }
 
     #[allow(non_snake_case)]
@@ -2977,17 +2977,17 @@ fn static_assert() {
               I::IntoIter: ExactSizeIterator + Clone
     {
         let v = v.into_iter();
         debug_assert!(v.len() != 0);
         let input_len = v.len();
         self.gecko.m${type.capitalize()}s.ensure_len(input_len);
 
         self.gecko.m${type.capitalize()}${gecko_ffi_name}Count = input_len as u32;
-        for (gecko, servo) in self.gecko.m${type.capitalize()}s.iter_mut().zip(v.cycle()) {
+        for (gecko, servo) in self.gecko.m${type.capitalize()}s.iter_mut().take(input_len as usize).zip(v) {
             gecko.m${gecko_ffi_name} = servo.seconds() * 1000.;
         }
     }
     #[allow(non_snake_case)]
     pub fn ${type}_${ident}_at(&self, index: usize)
         -> longhands::${type}_${ident}::computed_value::SingleComputedValue {
         use values::computed::Time;
         Time::from_seconds(self.gecko.m${type.capitalize()}s[index].m${gecko_ffi_name} / 1000.)
@@ -3002,17 +3002,17 @@ fn static_assert() {
               I::IntoIter: ExactSizeIterator + Clone
     {
         let v = v.into_iter();
         debug_assert!(v.len() != 0);
         let input_len = v.len();
         self.gecko.m${type.capitalize()}s.ensure_len(input_len);
 
         self.gecko.m${type.capitalize()}TimingFunctionCount = input_len as u32;
-        for (gecko, servo) in self.gecko.m${type.capitalize()}s.iter_mut().zip(v.cycle()) {
+        for (gecko, servo) in self.gecko.m${type.capitalize()}s.iter_mut().take(input_len as usize).zip(v) {
             gecko.mTimingFunction = servo.into();
         }
     }
     ${impl_animation_or_transition_count(type, 'timing_function', 'TimingFunction')}
     ${impl_copy_animation_or_transition_value(type, 'timing_function', 'TimingFunction')}
     pub fn ${type}_timing_function_at(&self, index: usize)
         -> longhands::${type}_timing_function::computed_value::SingleComputedValue {
         self.gecko.m${type.capitalize()}s[index].mTimingFunction.into()
@@ -3059,17 +3059,17 @@ fn static_assert() {
         let v = v.into_iter();
 
         debug_assert!(v.len() != 0);
         let input_len = v.len();
         self.gecko.mAnimations.ensure_len(input_len);
 
         self.gecko.mAnimation${gecko_ffi_name}Count = input_len as u32;
 
-        for (gecko, servo) in self.gecko.mAnimations.iter_mut().zip(v.cycle()) {
+        for (gecko, servo) in self.gecko.mAnimations.iter_mut().take(input_len as usize).zip(v) {
             let result = match servo {
                 % for value in keyword.gecko_values():
                     Keyword::${to_camel_case(value)} =>
                         structs::${keyword.gecko_constant(value)} ${keyword.maybe_cast(cast_type)},
                 % endfor
             };
             gecko.m${gecko_ffi_name} = result;
         }
@@ -3293,17 +3293,18 @@ fn static_assert() {
     ${impl_css_url('_moz_binding', 'mBinding')}
 
     ${impl_transition_time_value('delay', 'Delay')}
     ${impl_transition_time_value('duration', 'Duration')}
     ${impl_transition_timing_function()}
 
     pub fn transition_combined_duration_at(&self, index: usize) -> f32 {
         // https://drafts.csswg.org/css-transitions/#transition-combined-duration
-        self.gecko.mTransitions[index].mDuration.max(0.0) + self.gecko.mTransitions[index].mDelay
+        self.gecko.mTransitions[index % self.gecko.mTransitionDurationCount as usize].mDuration.max(0.0)
+            + self.gecko.mTransitions[index % self.gecko.mTransitionDelayCount as usize].mDelay
     }
 
     pub fn set_transition_property<I>(&mut self, v: I)
         where I: IntoIterator<Item = longhands::transition_property::computed_value::single_value::T>,
               I::IntoIter: ExactSizeIterator
     {
         use gecko_bindings::structs::nsCSSPropertyID::eCSSPropertyExtra_no_properties;
 
@@ -3327,17 +3328,17 @@ fn static_assert() {
         }
     }
 
     /// Returns whether there are any transitions specified.
     pub fn specifies_transitions(&self) -> bool {
         use gecko_bindings::structs::nsCSSPropertyID::eCSSPropertyExtra_all_properties;
         if self.gecko.mTransitionPropertyCount == 1 &&
             self.gecko.mTransitions[0].mProperty == eCSSPropertyExtra_all_properties &&
-            self.gecko.mTransitions[0].mDuration.max(0.0) + self.gecko.mTransitions[0].mDelay <= 0.0f32 {
+            self.transition_combined_duration_at(0) <= 0.0f32 {
             return false;
         }
 
         self.gecko.mTransitionPropertyCount > 0
     }
 
     pub fn transition_property_at(&self, index: usize)
         -> longhands::transition_property::computed_value::SingleComputedValue {
@@ -3384,17 +3385,25 @@ fn static_assert() {
 
     pub fn reset_transition_property(&mut self, other: &Self) {
         self.copy_transition_property_from(other)
     }
 
     ${impl_transition_count('property', 'Property')}
 
     pub fn animations_equals(&self, other: &Self) -> bool {
-        unsafe { bindings::Gecko_StyleAnimationsEquals(&self.gecko.mAnimations, &other.gecko.mAnimations) }
+        return self.gecko.mAnimationNameCount == other.gecko.mAnimationNameCount
+            && self.gecko.mAnimationDelayCount == other.gecko.mAnimationDelayCount
+            && self.gecko.mAnimationDirectionCount == other.gecko.mAnimationDirectionCount
+            && self.gecko.mAnimationDurationCount == other.gecko.mAnimationDurationCount
+            && self.gecko.mAnimationFillModeCount == other.gecko.mAnimationFillModeCount
+            && self.gecko.mAnimationIterationCountCount == other.gecko.mAnimationIterationCountCount
+            && self.gecko.mAnimationPlayStateCount == other.gecko.mAnimationPlayStateCount
+            && self.gecko.mAnimationTimingFunctionCount == other.gecko.mAnimationTimingFunctionCount
+            && unsafe { bindings::Gecko_StyleAnimationsEquals(&self.gecko.mAnimations, &other.gecko.mAnimations) }
     }
 
     pub fn set_animation_name<I>(&mut self, v: I)
         where I: IntoIterator<Item = longhands::animation_name::computed_value::single_value::T>,
               I::IntoIter: ExactSizeIterator
     {
         let v = v.into_iter();
         debug_assert!(v.len() != 0);
@@ -3451,17 +3460,17 @@ fn static_assert() {
 
         let v = v.into_iter();
 
         debug_assert_ne!(v.len(), 0);
         let input_len = v.len();
         self.gecko.mAnimations.ensure_len(input_len);
 
         self.gecko.mAnimationIterationCountCount = input_len as u32;
-        for (gecko, servo) in self.gecko.mAnimations.iter_mut().zip(v.cycle()) {
+        for (gecko, servo) in self.gecko.mAnimations.iter_mut().take(input_len as usize).zip(v) {
             match servo {
                 AnimationIterationCount::Number(n) => gecko.mIterationCount = n,
                 AnimationIterationCount::Infinite => gecko.mIterationCount = f32::INFINITY,
             }
         }
     }
 
     pub fn animation_iteration_count_at(
--- a/servo/components/style/properties/properties.mako.rs
+++ b/servo/components/style/properties/properties.mako.rs
@@ -682,24 +682,16 @@ impl LonghandId {
     /// So we classify properties into "early" and "other", such that the only
     /// dependencies can be from "other" to "early".
     ///
     /// Unfortunately, it’s not easy to check that this classification is
     /// correct.
     fn is_early_property(&self) -> bool {
         matches!(*self,
             % if product == 'gecko':
-            // We need to know the number of animations / transition-properties
-            // before setting the rest of the related longhands, see #15923.
-            //
-            // FIXME(emilio): Looks to me that we could just do this in Gecko
-            // instead of making them early properties. Indeed, the spec
-            // mentions _used_ values, not computed values, so this looks wrong.
-            LonghandId::AnimationName |
-            LonghandId::TransitionProperty |
 
             // Needed to properly compute the writing mode, to resolve logical
             // properties, and similar stuff. In this block instead of along
             // `WritingMode` and `Direction` just for convenience, since it's
             // Gecko-only (for now at least).
             //
             // see WritingMode::new.
             LonghandId::TextOrientation |
--- a/servo/components/style/values/specified/box.rs
+++ b/servo/components/style/values/specified/box.rs
@@ -548,28 +548,18 @@ impl Parse for Contain {
         input: &mut Parser<'i, 't>
     ) -> Result<Contain, ParseError<'i>> {
         let mut result = Contain::empty();
         while let Ok(name) = input.try(|i| i.expect_ident_cloned()) {
             let flag = match_ignore_ascii_case! { &name,
                 "layout" => Some(Contain::LAYOUT),
                 "style" => Some(Contain::STYLE),
                 "paint" => Some(Contain::PAINT),
-                "strict" => {
-                    if result.is_empty() {
-                        return Ok(Contain::STRICT | Contain::STRICT_BITS)
-                    }
-                    None
-                },
-                "none" => {
-                    if result.is_empty() {
-                        return Ok(result)
-                    }
-                    None
-                },
+                "strict" if result.is_empty() => return Ok(Contain::STRICT | Contain::STRICT_BITS),
+                "none" if result.is_empty() => return Ok(result),
                 _ => None
             };
 
             let flag = match flag {
                 Some(flag) if !result.contains(flag) => flag,
                 _ => return Err(input.new_custom_error(SelectorParseErrorKind::UnexpectedIdent(name)))
             };
             result.insert(flag);
--- a/taskcluster/ci/config.yml
+++ b/taskcluster/ci/config.yml
@@ -36,16 +36,17 @@ treeherder:
         'tc-L10n-Rpk': 'Localized Repackaged Repacks executed by Taskcluster'
         'tc-BM-L10n': 'Beetmover for locales executed by Taskcluster'
         'tc-BMR-L10n': 'Beetmover repackages for locales executed by Taskcluster'
         'c-Up': 'Balrog submission of complete updates'
         'tc-cs': 'Checksum signing executed by Taskcluster'
         'tc-rs': 'Repackage signing executed by Taskcluster'
         'tc-BMcs': 'Beetmover checksums, executed by Taskcluster'
         'Aries': 'Aries Device Image'
+        'Deb7': 'Packages for Debian 7'
         'Nexus 5-L': 'Nexus 5-L Device Image'
         'I': 'Docker Image Builds'
         'TL': 'Toolchain builds for Linux 64-bits'
         'TM': 'Toolchain builds for OSX'
         'TMW': 'Toolchain builds for Windows MinGW'
         'TW32': 'Toolchain builds for Windows 32-bits'
         'TW64': 'Toolchain builds for Windows 64-bits'
         'Searchfox': 'Searchfox builds'
--- a/taskcluster/ci/docker-image/kind.yml
+++ b/taskcluster/ci/docker-image/kind.yml
@@ -1,30 +1,37 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 loader: taskgraph.loader.transform:loader
 
+kind-dependencies:
+  - packages
+
 transforms:
   - taskgraph.transforms.docker_image:transforms
   - taskgraph.transforms.task:transforms
 
 # make a task for each docker-image we might want.  For the moment, since we
 # write artifacts for each, these are whitelisted, but ideally that will change
 # (to use subdirectory clones of the proper directory), at which point we can
 # generate tasks for every docker image in the directory, secure in the
 # knowledge that unnecessary images will be omitted from the target task graph
 jobs:
   desktop1604-test:
     symbol: I(dt16t)
   desktop-build:
     symbol: I(db)
   valgrind-build:
     symbol: I(vb)
+  toolchain-build:
+    symbol: I(toolchain)
+    packages:
+      - deb7-python
   lint:
     symbol: I(lnt)
   android-build:
     symbol: I(agb)
   index-task:
     symbol: I(idx)
   funsize-update-generator:
     symbol: I(pg)
new file mode 100644
--- /dev/null
+++ b/taskcluster/ci/packages/kind.yml
@@ -0,0 +1,28 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+loader: taskgraph.loader.transform:loader
+
+transforms:
+  - taskgraph.transforms.try_job:transforms
+  - taskgraph.transforms.job:transforms
+  - taskgraph.transforms.task:transforms
+
+jobs:
+  deb7-python:
+    description: "Python backport for Debian wheezy"
+    treeherder:
+      kind: build
+      platform: packages/opt
+      symbol: Deb7(python)
+      tier: 1
+    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
+    worker:
+      max-run-time: 1800
+    run:
+      using: debian-package
+      dsc: "http://snapshot.debian.org/archive/debian/20160813T164221Z/pool/main/p/python2.7/python2.7_2.7.9-2+deb8u1.dsc"
+      dsc-sha256: 274c293e7156edf59cb9f0a9d8cedcd94fa801df35adf39b8a9f3d776a250ead
+      patch: python-wheezy.diff
+      pre-build-command: debian/rules control-file
new file mode 100644
--- /dev/null
+++ b/taskcluster/docker/toolchain-build/Dockerfile
@@ -0,0 +1,102 @@
+FROM debian:wheezy-20171210
+MAINTAINER Mike Hommey <mhommey@mozilla.com>
+
+### Add worker user and setup its workspace.
+RUN mkdir /builds && \
+    groupadd -g 500 worker && \
+    useradd -u 500 -g 500 -d /builds/worker -s /bin/bash -m worker && \
+    mkdir -p /builds/worker/workspace && \
+    chown -R worker:worker /builds
+
+# Declare default working folder
+WORKDIR /builds/worker
+
+VOLUME /builds/worker/checkouts
+VOLUME /builds/worker/workspace
+VOLUME /builds/worker/tooltool-cache
+
+# Set variable normally configured at login, by the shells parent process, these
+# are taken from GNU su manual
+ENV HOME=/builds/worker \
+    SHELL=/bin/bash \
+    USER=worker \
+    LOGNAME=worker \
+    HOSTNAME=taskcluster-worker \
+    DEBIAN_FRONTEND=noninteractive
+
+# Set a default command useful for debugging
+CMD ["/bin/bash", "--login"]
+
+# Set apt sources list to a snapshot.
+RUN for s in debian_wheezy debian_wheezy-updates debian_wheezy-backports debian-security_wheezy/updates; do \
+      echo "deb http://snapshot.debian.org/archive/${s%_*}/20171210T214726Z/ ${s#*_} main"; \
+    done > /etc/apt/sources.list
+
+RUN apt-get -o Acquire::Check-Valid-Until=false update -q && \
+    apt-get install -yyq --no-install-recommends \
+      apt-transport-https \
+      ca-certificates
+
+# %ARG DOCKER_IMAGE_PACKAGES
+RUN for task in $DOCKER_IMAGE_PACKAGES; do \
+      echo "deb [trusted=yes] https://queue.taskcluster.net/v1/task/$task/runs/0/artifacts/public/build/ debian/" >> /etc/apt/sources.list; \
+    done
+
+RUN dpkg --add-architecture i386
+
+RUN apt-get -o Acquire::Check-Valid-Until=false update -q && \
+    apt-get install -yyq --no-install-recommends \
+      autoconf \
+      automake \
+      bison \
+      build-essential \
+      curl \
+      flex \
+      gawk \
+      gcc-multilib \
+      git \
+      gnupg \
+      libtool \
+      make \
+      p7zip-full \
+      procps \
+      pxz/wheezy-backports \
+      python-dev \
+      python-pip \
+      python-setuptools \
+      python-virtualenv \
+      subversion \
+      tar \
+      unzip \
+      uuid \
+      wget \
+      xz-utils \
+      zip \
+    && \
+    apt-get clean
+
+# %include python/mozbuild/mozbuild/action/tooltool.py
+COPY topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /setup/tooltool.py
+
+# %include testing/mozharness/external_tools/robustcheckout.py
+COPY topsrcdir/testing/mozharness/external_tools/robustcheckout.py /usr/local/mercurial/robustcheckout.py
+
+# %include taskcluster/docker/recipes/common.sh
+COPY topsrcdir/taskcluster/docker/recipes/common.sh /setup/common.sh
+
+# %include taskcluster/docker/recipes/install-mercurial.sh
+COPY topsrcdir/taskcluster/docker/recipes/install-mercurial.sh /setup/install-mercurial.sh
+
+# %include taskcluster/docker/recipes/debian-build-system-setup.sh
+COPY topsrcdir/taskcluster/docker/recipes/debian-build-system-setup.sh /setup/system-setup.sh
+
+RUN bash /setup/system-setup.sh
+
+# Add pip configuration, among other things.
+# %include taskcluster/docker/recipes/dot-config
+COPY topsrcdir/taskcluster/docker/recipes/dot-config /builds/worker/.config
+
+# %include taskcluster/docker/recipes/run-task
+COPY topsrcdir/taskcluster/docker/recipes/run-task /builds/worker/bin/run-task
+
+RUN chown -R worker:worker /builds/worker/bin && chmod 755 /builds/worker/bin/*
--- a/taskcluster/docs/kinds.rst
+++ b/taskcluster/docs/kinds.rst
@@ -343,8 +343,12 @@ Dummy tasks to consolidate balrog depend
 
 post-beetmover-dummy
 --------------------
 Dummy tasks to consolidate beetmover dependencies to avoid taskcluster limits on number of dependencies per task.
 
 post-beetmover-checksums-dummy
 ------------------------------
 Dummy tasks to consolidate beetmover-checksums dependencies to avoid taskcluster limits on number of dependencies per task.
+
+packages
+--------
+Tasks used to build packages for use in docker images.
--- a/taskcluster/docs/transforms.rst
+++ b/taskcluster/docs/transforms.rst
@@ -139,16 +139,17 @@ following ``run-using`` are available
 
   * ``buildbot``
   * ``hazard``
   * ``mach``
   * ``mozharness``
   * ``mozharness-test``
   * ``run-task``
   * ``spidermonkey`` or ``spidermonkey-package`` or ``spidermonkey-mozjs-crate`` or ``spidermonkey-rust-bindings``
+  * ``debian-package``
   * ``toolchain-script``
 
 
 Task Descriptions
 -----------------
 
 Every kind needs to create tasks, and all of those tasks have some things in
 common.  They all run on one of a small set of worker implementations, each
--- a/taskcluster/taskgraph/transforms/docker_image.py
+++ b/taskcluster/taskgraph/transforms/docker_image.py
@@ -36,34 +36,60 @@ docker_image_schema = Schema({
     Optional('job-from'): basestring,
 
     # Arguments to use for the Dockerfile.
     Optional('args'): {basestring: basestring},
 
     # Name of the docker image definition under taskcluster/docker, when
     # different from the docker image name.
     Optional('definition'): basestring,
+
+    # List of package tasks this docker image depends on.
+    Optional('packages'): [basestring],
 })
 
 
 @transforms.add
 def validate(config, tasks):
     for task in tasks:
         yield validate_schema(
             docker_image_schema, task,
             "In docker image {!r}:".format(task.get('name', 'unknown')))
 
 
 @transforms.add
 def fill_template(config, tasks):
+    available_packages = {}
+    for task in config.kind_dependencies_tasks:
+        if task.kind != 'packages':
+            continue
+        name = task.label.replace('packages-', '')
+        for route in task.task.get('routes', []):
+            if route.startswith('index.') and '.hash.' in route:
+                available_packages[name] = route
+                break
     for task in tasks:
         image_name = task.pop('name')
         job_symbol = task.pop('symbol')
         args = task.pop('args', {})
         definition = task.pop('definition', image_name)
+        packages = task.pop('packages', [])
+
+        for p in packages:
+            if p not in available_packages:
+                raise Exception('Missing package job for {}-{}: {}'.format(
+                    config.kind, image_name, p))
+
+        # Generating the context hash relies on arguments being set, so we
+        # set this now, although it's not the final value (it's a
+        # task-reference value, see further below). We add the package routes
+        # containing a hash to get the overall docker image hash, so changes
+        # to packages will be reflected in the docker image hash.
+        args['DOCKER_IMAGE_PACKAGES'] = ' '.join('<{}>'.format(p)
+                                                 for p in packages)
 
         context_path = os.path.join('taskcluster', 'docker', definition)
         context_hash = generate_context_hash(
             GECKO, context_path, image_name, args)
 
         description = 'Build the docker image {} for use by dependent tasks'.format(
             image_name)
 
@@ -126,18 +152,30 @@ def fill_template(config, tasks):
                 'chain-of-trust': True,
                 'docker-in-docker': True,
                 'taskcluster-proxy': True,
                 'max-run-time': 7200,
             },
         }
 
         for k, v in args.items():
-            taskdesc['worker']['env'][k] = v
+            if k == 'DOCKER_IMAGE_PACKAGES':
+                taskdesc['worker']['env'][k] = {'task-reference': v}
+            else:
+                taskdesc['worker']['env'][k] = v
 
+        if packages:
+            deps = taskdesc.setdefault('dependencies', {})
+            digest_data = [context_hash]
+            for p in sorted(packages):
+                deps[p] = 'packages-{}'.format(p)
+                digest_data.append(available_packages[p])
+            kwargs = {'digest_data': digest_data}
+        else:
+            kwargs = {'digest': context_hash}
         add_optimization(
             config, taskdesc,
             cache_type="docker-images.v1",
             cache_name=image_name,
-            digest=context_hash,
+            **kwargs
         )
 
         yield taskdesc
new file mode 100644
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/job/debian_package.py
@@ -0,0 +1,138 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running spidermonkey jobs via dedicated scripts
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import os
+
+from taskgraph.util.schema import Schema
+from voluptuous import Optional, Required
+
+from taskgraph.transforms.job import run_job_using
+from taskgraph.transforms.job.common import add_public_artifacts
+
+from taskgraph.util.hash import hash_paths
+from taskgraph import GECKO
+from taskgraph.util.cached_tasks import add_optimization
+
+run_schema = Schema({
+    Required('using'): 'debian-package',
+    # Debian distribution
+    Optional('dist'): basestring,
+
+    # Date of the snapshot (from snapshot.debian.org) to use, in the format
+    # YYYYMMDDTHHMMSSZ. The same date is used for the base docker-image name
+    # (only the YYYYMMDD part).
+    Optional('snapshot'): basestring,
+
+    # URL of the source control (.dsc) file to build.
+    Required('dsc'): basestring,
+
+    # SHA256 of the source control (.dsc) file.
+    Required('dsc-sha256'): basestring,
+
+    # Patch to apply to the extracted source.
+    Optional('patch'): basestring,
+
+    # Command to run before dpkg-buildpackage.
+    Optional('pre-build-command'): basestring,
+})
+
+
+@run_job_using("docker-worker", "debian-package", schema=run_schema)
+def docker_worker_debian_package(config, job, taskdesc):
+    run = job['run']
+    run.setdefault('dist', 'wheezy')
+    run.setdefault('snapshot', '20171210T214726Z')
+
+    worker = taskdesc['worker']
+    worker['artifacts'] = []
+    worker['docker-image'] = 'debian:{dist}-{date}'.format(
+        dist=run['dist'],
+        date=run['snapshot'][:8])
+
+    add_public_artifacts(config, job, taskdesc, path='/tmp/artifacts')
+
+    dsc_file = os.path.basename(run['dsc'])
+    package = dsc_file[:dsc_file.index('_')]
+
+    adjust = ''
+    if 'patch' in run:
+        # We can't depend on docker images, so we don't have robustcheckout
+        # or run-task to get a checkout. So for this one file we'd need
+        # from a checkout, download it.
+        adjust += ('curl -sL {head_repo}/raw-file/{head_rev}'
+                   '/build/debian-packages/{patch} | patch -p1 && ').format(
+            head_repo=config.params['head_repository'],
+            head_rev=config.params['head_rev'],
+            patch=run['patch'],
+        )
+    if 'pre-build-command' in run:
+        adjust += run['pre-build-command'] + ' && '
+
+    # We can't depend on docker images (since docker images depend on packages),
+    # so we inline the whole script here.
+    worker['command'] = [
+        'sh',
+        '-x',
+        '-c',
+        # Fill /etc/apt/sources.list with the relevant snapshot repository.
+        'echo "deb http://snapshot.debian.org/archive/debian'
+        '/{snapshot}/ {dist} main" > /etc/apt/sources.list && '
+        'echo "deb http://snapshot.debian.org/archive/debian'
+        '/{snapshot}/ {dist}-updates main" >> /etc/apt/sources.list && '
+        'echo "deb http://snapshot.debian.org/archive/debian-security'
+        '/{snapshot}/ {dist}/updates main" >> /etc/apt/sources.list && '
+        # Install the base utilities required to build debian packages.
+        'apt-get update -o Acquire::Check-Valid-Until=false -q && '
+        'apt-get install -yyq fakeroot build-essential devscripts apt-utils && '
+        'cd /tmp && '
+        # Get, validate and extract the package source.
+        'dget -d -u {dsc} && '
+        'echo "{dsc_sha256}  {dsc_file}" | sha256sum -c && '
+        'dpkg-source -x {dsc_file} {package} && '
+        'cd {package} && '
+        # Optionally apply patch and/or pre-build command.
+        '{adjust}'
+        # Install the necessary build dependencies.
+        'mk-build-deps -i -r debian/control -t "apt-get -yyq --no-install-recommends" && '
+        # Build the package
+        'DEB_BUILD_OPTIONS="parallel=$(nproc) nocheck" dpkg-buildpackage && '
+        # Copy the artifacts
+        'mkdir -p {artifacts}/debian && '
+        'dcmd cp ../{package}_*.changes {artifacts}/debian/ && '
+        'cd {artifacts} && '
+        # Make the artifacts directory usable as an APT repository.
+        'apt-ftparchive sources debian | gzip -c9 > debian/Sources.gz && '
+        'apt-ftparchive packages debian | gzip -c9 > debian/Packages.gz && '
+        'apt-ftparchive release -o APT::FTPArchive::Release::Codename={dist} debian > Release && '
+        'mv Release debian/'
+        .format(
+            package=package,
+            snapshot=run['snapshot'],
+            dist=run['dist'],
+            dsc=run['dsc'],
+            dsc_file=dsc_file,
+            dsc_sha256=run['dsc-sha256'],
+            adjust=adjust,
+            artifacts='/tmp/artifacts',
+        )
+    ]
+
+    name = taskdesc['label'].replace('{}-'.format(config.kind), '', 1)
+    files = [
+        # This file
+        'taskcluster/taskgraph/transforms/job/debian_package.py',
+    ]
+    if 'patch' in run:
+        files.append('build/debian-packages/{}'.format(run['patch']))
+    data = [hash_paths(GECKO, files)]
+    for k in ('snapshot', 'dist', 'dsc-sha256', 'pre-build-command'):
+        if k in run:
+            data.append(run[k])
+    add_optimization(config, taskdesc, cache_type='packages.v1',
+                     cache_name=name, digest_data=data)
--- a/testing/config/mozbase_requirements.txt
+++ b/testing/config/mozbase_requirements.txt
@@ -1,8 +1,10 @@
+../tools/mozterm
+
 ../mozbase/manifestparser
 ../mozbase/mozcrash
 ../mozbase/mozdebug
 ../mozbase/mozdevice
 ../mozbase/mozfile
 ../mozbase/mozhttpd
 ../mozbase/mozinfo
 ../mozbase/mozinstall
@@ -10,10 +12,8 @@
 ../mozbase/mozlog
 ../mozbase/moznetwork
 ../mozbase/mozprocess
 ../mozbase/mozprofile
 ../mozbase/mozrunner
 ../mozbase/mozscreenshot
 ../mozbase/moztest
 ../mozbase/mozversion
-
-../tools/mozterm
new file mode 100644
--- /dev/null
+++ b/testing/config/mozbase_source_requirements.txt
@@ -0,0 +1,19 @@
+../../python/mozterm
+
+../mozbase/manifestparser
+../mozbase/mozcrash
+../mozbase/mozdebug
+../mozbase/mozdevice
+../mozbase/mozfile
+../mozbase/mozhttpd
+../mozbase/mozinfo
+../mozbase/mozinstall
+../mozbase/mozleak
+../mozbase/mozlog
+../mozbase/moznetwork
+../mozbase/mozprocess
+../mozbase/mozprofile
+../mozbase/mozrunner
+../mozbase/mozscreenshot
+../mozbase/moztest
+../mozbase/mozversion
--- a/testing/mozbase/mozlog/setup.py
+++ b/testing/mozbase/mozlog/setup.py
@@ -2,30 +2,35 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this file,
 # You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import
 
 from setuptools import setup, find_packages
 
 PACKAGE_NAME = 'mozlog'
-PACKAGE_VERSION = '3.6'
+PACKAGE_VERSION = '3.7'
+DEPS = [
+    'blessings>=1.3',
+    'mozterm',
+    'six >= 1.10.0',
+]
+
 
 setup(name=PACKAGE_NAME,
       version=PACKAGE_VERSION,
       description="Robust log handling specialized for logging in the Mozilla universe",
       long_description="see https://firefox-source-docs.mozilla.org/mozbase/index.html",
       author='Mozilla Automation and Testing Team',
       author_email='tools@lists.mozilla.org',
       url='https://wiki.mozilla.org/Auto-tools/Projects/Mozbase',
       license='MPL 1.1/GPL 2.0/LGPL 2.1',
       packages=find_packages(),
       zip_safe=False,
-      install_requires=['blessings >= 1.3',
-                        'six >= 1.10.0'],
+      install_requires=DEPS,
       tests_require=['mozfile'],
       platforms=['Any'],
       classifiers=['Development Status :: 4 - Beta',
                    'Environment :: Console',
                    'Intended Audience :: Developers',
                    'License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)',
                    'Operating System :: OS Independent',
                    'Programming Language :: Python :: 2.7',
--- a/testing/mozharness/mozharness/mozilla/testing/talos.py
+++ b/testing/mozharness/mozharness/mozilla/testing/talos.py
@@ -366,16 +366,18 @@ class Talos(TestingMixin, MercurialScrip
             options.extend(['--%s' % key, value])
         # configure profiling options
         options.extend(self.query_gecko_profile_options())
         # extra arguments
         if args is not None:
             options += args
         if 'talos_extra_options' in self.config:
             options += self.config['talos_extra_options']
+        if self.config.get('code_coverage', False):
+            options.extend(['--code-coverage'])
         return options
 
     def populate_webroot(self):
         """Populate the production test slaves' webroots"""
         self.talos_path = os.path.join(
             self.query_abs_dirs()['abs_work_dir'], 'tests', 'talos'
         )
 
@@ -573,17 +575,17 @@ class Talos(TestingMixin, MercurialScrip
                 'tests',
                 'config',
                 'mozbase_requirements.txt'
             )
         else:
             mozbase_requirements = os.path.join(
                 os.path.dirname(self.talos_path),
                 'config',
-                'mozbase_requirements.txt'
+                'mozbase_source_requirements.txt'
             )
         self.register_virtualenv_module(
             requirements=[mozbase_requirements],
             two_pass=True,
             editable=True,
         )
         # require pip >= 1.5 so pip will prefer .whl files to install
         super(Talos, self).create_virtualenv(
--- a/testing/talos/talos/cmdline.py
+++ b/testing/talos/talos/cmdline.py
@@ -175,15 +175,21 @@ def create_parser(mach_interface=False):
     debug_options.add_argument('--debug', action='store_true',
                                help='Enable the debugger. Not specifying a --debugger option will'
                                     'result in the default debugger being used.')
     debug_options.add_argument('--debugger', default=None,
                                help='Name of debugger to use.')
     debug_options.add_argument('--debugger-args', default=None, metavar='params',
                                help='Command-line arguments to pass to the debugger itself; split'
                                     'as the Bourne shell would.')
+    add_arg('--code-coverage', action="store_true",
+            dest='code_coverage',
+            help='Remove any existing ccov gcda output files after browser'
+                 ' initialization but before starting the tests. NOTE:'
+                 ' Currently only supported in production.')
+
     add_logging_group(parser)
     return parser
 
 
 def parse_args(argv=None):
     parser = create_parser()
     return parser.parse_args(argv)
--- a/testing/talos/talos/ffsetup.py
+++ b/testing/talos/talos/ffsetup.py
@@ -3,16 +3,17 @@
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 """
 Set up a browser environment before running a test.
 """
 from __future__ import absolute_import, print_function
 
 import os
+import shutil
 import tempfile
 
 import mozfile
 import mozinfo
 import mozrunner
 from mozlog import get_proxy_logger
 from mozprocess import ProcessHandlerMixin
 from mozprofile.profile import Profile
@@ -180,31 +181,113 @@ class FFSetup(object):
                                               self.browser_config,
                                               self.test_config)
             self.gecko_profile.update_env(self.env)
 
     def clean(self):
         try:
             mozfile.remove(self._tmp_dir)
         except Exception as e:
-            print("Exception while removing profile directory: %s" % self._tmp_dir)
-            print(e)
+            LOG.info("Exception while removing profile directory: %s" % self._tmp_dir)
+            LOG.info(e)
 
         if self.gecko_profile:
             self.gecko_profile.clean()
 
+    def collect_or_clean_ccov(self, clean=False):
+        # NOTE: Currently only supported when running in production
+        if not self.browser_config.get('develop', False):
+            # first see if we an find any ccov files at the ccov output dirs
+            if clean:
+                LOG.info("Cleaning ccov files before starting the talos test")
+            else:
+                LOG.info("Collecting ccov files that were generated during the talos test")
+            gcov_prefix = os.getenv('GCOV_PREFIX', None)
+            js_ccov_dir = os.getenv('JS_CODE_COVERAGE_OUTPUT_DIR', None)
+            gcda_archive_folder_name = 'gcda-archive'
+            _gcda_files_found = []
+
+            for _ccov_env in [gcov_prefix, js_ccov_dir]:
+                if _ccov_env is not None:
+                    # ccov output dir env vars exist; now search for gcda files to remove
+                    _ccov_path = os.path.abspath(_ccov_env)
+                    if os.path.exists(_ccov_path):
+                        # now walk through and look for gcda files
+                        LOG.info("Recursive search for gcda files in: %s" % _ccov_path)
+                        for root, dirs, files in os.walk(_ccov_path):
+                            for next_file in files:
+                                if next_file.endswith(".gcda"):
+                                    # don't want to move or delete files in our 'gcda-archive'
+                                    if root.find(gcda_archive_folder_name) == -1:
+                                        _gcda_files_found.append(os.path.join(root, next_file))
+                    else:
+                        LOG.info("The ccov env var path doesn't exist: %s" % str(_ccov_path))
+
+            # now  clean or collect gcda files accordingly
+            if clean:
+                # remove ccov data
+                LOG.info("Found %d gcda files to clean. Deleting..." % (len(_gcda_files_found)))
+                for _gcda in _gcda_files_found:
+                    try:
+                        mozfile.remove(_gcda)
+                    except Exception as e:
+                        LOG.info("Exception while removing file: %s" % _gcda)
+                        LOG.info(e)
+                LOG.info("Finished cleaning ccov gcda files")
+            else:
+                # copy gcda files to archive folder to be collected later
+                gcda_archive_top = os.path.join(gcov_prefix,
+                                                gcda_archive_folder_name,
+                                                self.test_config['name'])
+                LOG.info("Found %d gcda files to collect. Moving to gcda archive %s"
+                         % (len(_gcda_files_found), str(gcda_archive_top)))
+                if not os.path.exists(gcda_archive_top):
+                    try:
+                        os.makedirs(gcda_archive_top)
+                    except OSError:
+                        LOG.critical("Unable to make gcda archive folder %s" % gcda_archive_top)
+                for _gcda in _gcda_files_found:
+                    # want to copy the existing directory strucutre but put it under archive-dir
+                    # need to remove preceeding '/' from _gcda file name so can join the path
+                    gcda_archive_file = os.path.join(gcov_prefix,
+                                                     gcda_archive_folder_name,
+                                                     self.test_config['name'],
+                                                     _gcda.strip(gcov_prefix + "//"))
+                    gcda_archive_dest = os.path.dirname(gcda_archive_file)
+
+                    # create archive folder, mirroring structure
+                    if not os.path.exists(gcda_archive_dest):
+                        try:
+                            os.makedirs(gcda_archive_dest)
+                        except OSError:
+                            LOG.critical("Unable to make archive folder %s" % gcda_archive_dest)
+                    # now copy the file there
+                    try:
+                        shutil.copy(_gcda, gcda_archive_dest)
+                    except Exception as e:
+                        LOG.info("Error copying %s to %s" % (str(_gcda), str(gcda_archive_dest)))
+                        LOG.info(e)
+                LOG.info("Finished collecting ccov gcda files. Copied to: %s" % gcda_archive_top)
+
     def __enter__(self):
         LOG.info('Initialising browser for %s test...'
                  % self.test_config['name'])
         self._init_env()
         self._init_profile()
         try:
             if not self.debug_mode and self.test_config['name'] != "damp":
                 self._run_profile()
         except:
             self.clean()
             raise
         self._init_gecko_profile()
         LOG.info('Browser initialized.')
+        # remove ccov files before actual tests start
+        if self.browser_config.get('code_coverage', False):
+            # if the Firefox build was instrumented for ccov, initializing the browser
+            # will have caused ccov to output some gcda files; in order to have valid
+            # ccov data for the talos test we want to remove these files before starting
+            # the actual talos test(s)
+            self.collect_or_clean_ccov(clean=True)
         return self
 
     def __exit__(self, type, value, tb):
         self.clean()
--- a/testing/talos/talos/run_tests.py
+++ b/testing/talos/talos/run_tests.py
@@ -113,16 +113,28 @@ def run_tests(config, browser_config):
     # instance
     if browser_config['develop']:
         browser_config['extra_args'] = '--no-remote'
 
     # Pass subtests filter argument via a preference
     if browser_config['subtests']:
         browser_config['preferences']['talos.subtests'] = browser_config['subtests']
 
+    # If --code-coverage files are expected, set flag in browser config so ffsetup knows
+    # that it needs to delete any ccov files resulting from browser initialization
+    # NOTE: This is only supported in production; local setup of ccov folders and
+    # data collection not supported yet, so if attempting to run with --code-coverage
+    # flag locally, that is not supported yet
+    if config.get('code_coverage', False):
+        if browser_config['develop']:
+            raise TalosError('Aborting: talos --code-coverage flag is only '
+                             'supported in production')
+        else:
+            browser_config['code_coverage'] = True
+
     # set defaults
     testdate = config.get('testdate', '')
 
     # get the process name from the path to the browser
     if not browser_config['process']:
         browser_config['process'] = \
             os.path.basename(browser_config['browser_path'])
 
--- a/testing/talos/talos/ttest.py
+++ b/testing/talos/talos/ttest.py
@@ -286,10 +286,15 @@ class TTest(object):
         # include global (cross-cycle) counters
         test_results.all_counter_results.extend(
             [{key: value} for key, value in global_counters.items()]
         )
         for c in test_results.all_counter_results:
             for key, value in c.items():
                 LOG.debug('COUNTER %r: %s' % (key, value))
 
+        # if running against a code-coverage instrumented build, move the
+        # produced gcda files to a folder where they will be collected later
+        if browser_config.get('code_coverage', False):
+            setup.collect_or_clean_ccov()
+
         # return results
         return test_results
--- a/toolkit/components/browser/nsWebBrowser.cpp
+++ b/toolkit/components/browser/nsWebBrowser.cpp
@@ -1749,30 +1749,30 @@ DrawPaintedLayer(PaintedLayer* aLayer,
 
   ColorPattern color(ToDeviceColor(*static_cast<nscolor*>(aCallbackData)));
   nsIntRect dirtyRect = aRegionToDraw.GetBounds();
   aDrawTarget.FillRect(
     Rect(dirtyRect.x, dirtyRect.y, dirtyRect.width, dirtyRect.height), color);
 }
 
 void
-nsWebBrowser::WindowRaised(nsIWidget* aWidget)
+nsWebBrowser::WindowActivated()
 {
 #if defined(DEBUG_smaug)
   nsCOMPtr<nsIDocument> document = mDocShell->GetDocument();
   nsAutoString documentURI;
   document->GetDocumentURI(documentURI);
   printf("nsWebBrowser::NS_ACTIVATE %p %s\n", (void*)this,
          NS_ConvertUTF16toUTF8(documentURI).get());
 #endif
   Activate();
 }
 
 void
-nsWebBrowser::WindowLowered(nsIWidget* aWidget)
+nsWebBrowser::WindowDeactivated()
 {
 #if defined(DEBUG_smaug)
   nsCOMPtr<nsIDocument> document = mDocShell->GetDocument();
   nsAutoString documentURI;
   document->GetDocumentURI(documentURI);
   printf("nsWebBrowser::NS_DEACTIVATE %p %s\n", (void*)this,
          NS_ConvertUTF16toUTF8(documentURI).get());
 #endif
--- a/toolkit/components/browser/nsWebBrowser.h
+++ b/toolkit/components/browser/nsWebBrowser.h
@@ -113,18 +113,18 @@ protected:
   // XXXbz why are these NS_IMETHOD?  They're not interface methods!
   NS_IMETHOD SetDocShell(nsIDocShell* aDocShell);
   NS_IMETHOD EnsureDocShellTreeOwner();
   NS_IMETHOD BindListener(nsISupports* aListener, const nsIID& aIID);
   NS_IMETHOD UnBindListener(nsISupports* aListener, const nsIID& aIID);
   NS_IMETHOD EnableGlobalHistory(bool aEnable);
 
   // nsIWidgetListener
-  virtual void WindowRaised(nsIWidget* aWidget);
-  virtual void WindowLowered(nsIWidget* aWidget);
+  virtual void WindowActivated() override;
+  virtual void WindowDeactivated() override;
   virtual bool PaintWindow(nsIWidget* aWidget,
                            mozilla::LayoutDeviceIntRegion aRegion) override;
 
 protected:
   RefPtr<nsDocShellTreeOwner> mDocShellTreeOwner;
   nsCOMPtr<nsIDocShell> mDocShell;
   nsCOMPtr<nsIInterfaceRequestor> mDocShellAsReq;
   nsCOMPtr<nsIBaseWindow> mDocShellAsWin;
--- a/toolkit/components/mozintl/mozIMozIntl.idl
+++ b/toolkit/components/mozintl/mozIMozIntl.idl
@@ -36,10 +36,10 @@
  */
 [scriptable, uuid(7f63279a-1a29-4ae6-9e7a-dc9684a23530)]
 interface mozIMozIntl : nsISupports
 {
   jsval getCalendarInfo([optional] in jsval locales);
   jsval getDisplayNames([optional] in jsval locales, [optional] in jsval options);
   jsval getLocaleInfo([optional] in jsval locales);
 
-  jsval createDateTimeFormat([optional] in jsval locales, [optional] in jsval options);
+  readonly attribute jsval DateTimeFormat;
 };
--- a/toolkit/components/mozintl/mozIntl.js
+++ b/toolkit/components/mozintl/mozIntl.js
@@ -64,37 +64,41 @@ class MozIntl {
   getLocaleInfo(locales, ...args) {
     if (!this._cache.hasOwnProperty("getLocaleInfo")) {
       mozIntlHelper.addGetLocaleInfo(this._cache);
     }
 
     return this._cache.getLocaleInfo(getLocales(locales), ...args);
   }
 
-  createDateTimeFormat(locales, options, ...args) {
+  get DateTimeFormat() {
     if (!this._cache.hasOwnProperty("DateTimeFormat")) {
       mozIntlHelper.addDateTimeFormatConstructor(this._cache);
     }
 
-    let resolvedLocales =
-      this._cache.DateTimeFormat.supportedLocalesOf(getLocales(locales));
+    let DateTimeFormat = this._cache.DateTimeFormat;
 
-    if (options) {
-      if (options.dateStyle || options.timeStyle) {
-        options.pattern = osPrefs.getDateTimePattern(
-          getDateTimePatternStyle(options.dateStyle),
-          getDateTimePatternStyle(options.timeStyle),
-          resolvedLocales[0]);
-      } else {
-        // make sure that user doesn't pass a pattern explicitly
-        options.pattern = undefined;
+    class MozDateTimeFormat extends this._cache.DateTimeFormat {
+      constructor(locales, options, ...args) {
+        let resolvedLocales = DateTimeFormat.supportedLocalesOf(getLocales(locales));
+        if (options) {
+          if (options.dateStyle || options.timeStyle) {
+            options.pattern = osPrefs.getDateTimePattern(
+              getDateTimePatternStyle(options.dateStyle),
+              getDateTimePatternStyle(options.timeStyle),
+              resolvedLocales[0]);
+          } else {
+            // make sure that user doesn't pass a pattern explicitly
+            options.pattern = undefined;
+          }
+        }
+        super(resolvedLocales, options, ...args);
       }
     }
-
-    return new this._cache.DateTimeFormat(resolvedLocales, options, ...args);
+    return MozDateTimeFormat;
   }
 }
 
 MozIntl.prototype.classID = Components.ID("{35ec195a-e8d0-4300-83af-c8a2cc84b4a3}");
 MozIntl.prototype.QueryInterface = XPCOMUtils.generateQI([Ci.mozIMozIntl, Ci.nsISupports]);
 
 var components = [MozIntl];
 this.NSGetFactory = XPCOMUtils.generateNSGetFactory(components);
--- a/toolkit/components/mozintl/test/test_mozintl.js
+++ b/toolkit/components/mozintl/test/test_mozintl.js
@@ -1,26 +1,48 @@
 /* Any copyright is dedicated to the Public Domain.
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 Components.utils.import("resource://gre/modules/Services.jsm");
 
 function run_test() {
   test_methods_presence();
   test_methods_calling();
+  test_constructors();
 
   ok(true);
 }
 
 function test_methods_presence() {
   equal(Services.intl.getCalendarInfo instanceof Function, true);
   equal(Services.intl.getDisplayNames instanceof Function, true);
   equal(Services.intl.getLocaleInfo instanceof Function, true);
-  equal(Services.intl.createDateTimeFormat instanceof Function, true);
+  equal(Services.intl.getLocaleInfo instanceof Object, true);
 }
 
 function test_methods_calling() {
   Services.intl.getCalendarInfo("pl");
   Services.intl.getDisplayNames("ar");
   Services.intl.getLocaleInfo("de");
-  Services.intl.createDateTimeFormat("fr");
+  new Services.intl.DateTimeFormat("fr");
   ok(true);
 }
+
+function test_constructors() {
+  let dtf = new Intl.DateTimeFormat();
+  let dtf2 = new Services.intl.DateTimeFormat();
+
+  equal(typeof dtf, typeof dtf2);
+
+  Assert.throws(() => {
+    // This is an observable difference between Intl and mozIntl.
+    //
+    // Old ECMA402 APIs (edition 1 and 2) allowed for constructors to be called
+    // as functions.
+    // Starting from ed.3 all new constructors are throwing when called without |new|.
+    //
+    // All MozIntl APIs do not implement the legacy behavior and throw
+    // when called without |new|.
+    //
+    // For more information see https://github.com/tc39/ecma402/pull/84 .
+    Services.intl.DateTimeFormat();
+  }, /class constructors must be invoked with |new|/);
+}
--- a/toolkit/components/passwordmgr/LoginManagerContent.jsm
+++ b/toolkit/components/passwordmgr/LoginManagerContent.jsm
@@ -469,34 +469,36 @@ var LoginManagerContent = {
   stateForDocument(document) {
     let loginFormState = this.loginFormStateByDocument.get(document);
     if (!loginFormState) {
       loginFormState = {
         /**
          * Keeps track of filled fields and values.
          */
         fillsByRootElement: new WeakMap(),
-        loginFormRootElements: new Set(),
+        loginFormRootElements: new WeakSet(),
       };
       this.loginFormStateByDocument.set(document, loginFormState);
     }
     return loginFormState;
   },
 
   /**
    * Compute whether there is an insecure login form on any frame of the current page, and
    * notify the parent process. This is used to control whether insecure password UI appears.
    */
   _detectInsecureFormLikes(topWindow) {
     log("_detectInsecureFormLikes", topWindow.location.href);
 
     // Returns true if this window or any subframes have insecure login forms.
     let hasInsecureLoginForms = (thisWindow) => {
       let doc = thisWindow.document;
-      let hasLoginForm = this.stateForDocument(doc).loginFormRootElements.size > 0;
+      let rootElsWeakSet = this.stateForDocument(doc).loginFormRootElements;
+      let hasLoginForm = ChromeUtils.nondeterministicGetWeakSetKeys(rootElsWeakSet)
+                                    .filter(el => el.isConnected).length > 0;
       return (hasLoginForm && !thisWindow.isSecureContext) ||
              Array.some(thisWindow.frames,
                         frame => hasInsecureLoginForms(frame));
     };
 
     let messageManager = messageManagerFromWindow(topWindow);
     messageManager.sendAsyncMessage("RemoteLogins:insecureLoginFormPresent", {
       hasInsecureLoginForms: hasInsecureLoginForms(topWindow),
@@ -875,21 +877,27 @@ var LoginManagerContent = {
    * To avoid multiple notifications for the same FormLike, this currently
    * avoids capturing when dealing with a real <form> which are ideally already
    * using a submit event.
    *
    * @param {Document} document being navigated
    */
   _onNavigation(aDocument) {
     let state = this.stateForDocument(aDocument);
-    let loginFormRootElements = state.loginFormRootElements;
-    log("_onNavigation: state:", state, "loginFormRootElements size:", loginFormRootElements.size,
+    let rootElsWeakSet = state.loginFormRootElements;
+    let weakLoginFormRootElements = ChromeUtils.nondeterministicGetWeakSetKeys(rootElsWeakSet);
+
+    log("_onNavigation: state:", state, "loginFormRootElements approx size:", weakLoginFormRootElements.length,
         "document:", aDocument);
 
-    for (let formRoot of state.loginFormRootElements) {
+    for (let formRoot of weakLoginFormRootElements) {
+      if (!formRoot.isConnected) {
+        continue;
+      }
+
       if (formRoot instanceof Ci.nsIDOMHTMLFormElement) {
         // For now only perform capture upon navigation for FormLike's without
         // a <form> to avoid capture from both an earlyformsubmit and
         // navigation for the same "form".
         log("Ignoring navigation for the form root to avoid multiple prompts " +
             "since it was for a real <form>");
         continue;
       }
@@ -1435,17 +1443,17 @@ function UserAutoCompleteResult(aSearchS
   }
 
   this._showInsecureFieldWarning = (!isSecure && LoginHelper.showInsecureFieldWarning) ? 1 : 0;
   this.searchString = aSearchString;
   this.logins = matchingLogins.sort(loginSort);
   this.matchCount = matchingLogins.length + this._showInsecureFieldWarning;
   this._messageManager = messageManager;
   this._stringBundle = Services.strings.createBundle("chrome://passwordmgr/locale/passwordmgr.properties");
-  this._dateAndTimeFormatter = Services.intl.createDateTimeFormat(undefined, { dateStyle: "medium" });
+  this._dateAndTimeFormatter = new Services.intl.DateTimeFormat(undefined, { dateStyle: "medium" });
 
   this._isPasswordField = isPasswordField;
 
   this._duplicateUsernames = findDuplicates(matchingLogins);
 
   if (this.matchCount > 0) {
     this.searchResult = Ci.nsIAutoCompleteResult.RESULT_SUCCESS;
     this.defaultIndex = 0;
--- a/toolkit/components/passwordmgr/LoginManagerContextMenu.jsm
+++ b/toolkit/components/passwordmgr/LoginManagerContextMenu.jsm
@@ -186,12 +186,12 @@ var LoginManagerContextMenu = {
 };
 
 XPCOMUtils.defineLazyGetter(LoginManagerContextMenu, "_stringBundle", function() {
   return Services.strings.
          createBundle("chrome://passwordmgr/locale/passwordmgr.properties");
 });
 
 XPCOMUtils.defineLazyGetter(LoginManagerContextMenu, "dateAndTimeFormatter", function() {
-  return Services.intl.createDateTimeFormat(undefined, {
+  return new Services.intl.DateTimeFormat(undefined, {
     dateStyle: "medium"
   });
 });
--- a/toolkit/components/passwordmgr/content/passwordManager.js
+++ b/toolkit/components/passwordmgr/content/passwordManager.js
@@ -55,19 +55,19 @@ let signonReloadDisplay = {
           break;
       }
       Services.obs.notifyObservers(null, "passwordmgr-dialog-updated");
     }
   }
 };
 
 // Formatter for localization.
-let dateFormatter = Services.intl.createDateTimeFormat(undefined,
+let dateFormatter = new Services.intl.DateTimeFormat(undefined,
                       { dateStyle: "medium" });
-let dateAndTimeFormatter = Services.intl.createDateTimeFormat(undefined,
+let dateAndTimeFormatter = new Services.intl.DateTimeFormat(undefined,
                              { dateStyle: "medium",
                                timeStyle: "short" });
 
 function Startup() {
   // be prepared to reload the display if anything changes
   Services.obs.addObserver(signonReloadDisplay, "passwordmgr-storage-changed");
 
   signonsTree = document.getElementById("signonsTree");
--- a/toolkit/components/passwordmgr/test/mochitest/test_password_field_autocomplete.html
+++ b/toolkit/components/passwordmgr/test/mochitest/test_password_field_autocomplete.html
@@ -129,17 +129,17 @@ async function reinitializeForm(index) {
   uname = $_(index, "uname");
   pword = $_(index, "pword");
   uname.value = "";
   pword.value = "";
   pword.focus();
 }
 
 function generateDateString(date) {
-  let dateAndTimeFormatter = Services.intl.createDateTimeFormat(undefined,
+  let dateAndTimeFormatter = new Services.intl.DateTimeFormat(undefined,
                              { dateStyle: "medium" });
   return dateAndTimeFormatter.format(date);
 }
 
 const DATE_NOW_STRING = generateDateString(new Date());
 
 // Check for expected username/password in form.
 function checkACFormPasswordField(expectedPassword) {
--- a/toolkit/components/passwordmgr/test/unit/test_context_menu.js
+++ b/toolkit/components/passwordmgr/test/unit/test_context_menu.js
@@ -97,17 +97,17 @@ function checkLoginItems(logins, items) 
         duplicates.add(login.username);
       }
       seen.add(login.username);
     }
     return duplicates;
   }
   let duplicates = findDuplicates(logins);
 
-  let dateAndTimeFormatter = Services.intl.createDateTimeFormat(undefined,
+  let dateAndTimeFormatter = new Services.intl.DateTimeFormat(undefined,
                              { dateStyle: "medium" });
   for (let login of logins) {
     if (login.username && !duplicates.has(login.username)) {
       // If login is not duplicate and we can't find an item for it, fail.
       if (!items.find(item => item.label == login.username)) {
         return false;
       }
       continue;
--- a/toolkit/components/passwordmgr/test/unit/test_user_autocomplete_result.js
+++ b/toolkit/components/passwordmgr/test/unit/test_user_autocomplete_result.js
@@ -19,17 +19,17 @@ matchingLogins.push(new nsLoginInfo("htt
 
 matchingLogins.push(new nsLoginInfo("http://mochi.test:8888", "http://autocomplete:8888", null,
                                     "testuser3", "testpass3", "uname", "pword"));
 
 matchingLogins.push(new nsLoginInfo("http://mochi.test:8888", "http://autocomplete:8888", null,
                                     "zzzuser4", "zzzpass4", "uname", "pword"));
 
 let meta = matchingLogins[0].QueryInterface(Ci.nsILoginMetaInfo);
-let dateAndTimeFormatter = Services.intl.createDateTimeFormat(undefined,
+let dateAndTimeFormatter = new Services.intl.DateTimeFormat(undefined,
                             { dateStyle: "medium" });
 let time = dateAndTimeFormatter.format(new Date(meta.timePasswordChanged));
 const LABEL_NO_USERNAME = "No username (" + time + ")";
 
 let expectedResults = [
   {
     insecureFieldWarningEnabled: true,
     insecureAutoFillFormsEnabled: true,
--- a/toolkit/components/payments/res/mixins/ObservedPropertiesMixin.js
+++ b/toolkit/components/payments/res/mixins/ObservedPropertiesMixin.js
@@ -27,17 +27,17 @@ function ObservedPropertiesMixin(superCl
         }
         // Convert attribute names from kebab-case to camelCase properties
         Object.defineProperty(this, name.replace(/-([a-z])/g, ($0, $1) => $1.toUpperCase()), {
           configurable: true,
           get() {
             return this.getAttribute(name);
           },
           set(value) {
-            if (value === null || value === undefined) {
+            if (value === null || value === undefined || value === false) {
               this.removeAttribute(name);
             } else {
               this.setAttribute(name, value);
             }
           },
         });
       }
     }
--- a/toolkit/components/places/PlacesRemoteTabsAutocompleteProvider.jsm
+++ b/toolkit/components/places/PlacesRemoteTabsAutocompleteProvider.jsm
@@ -111,18 +111,17 @@ Services.obs.addObserver(observe, "weave
 Services.prefs.addObserver(PREF_SHOW_REMOTE_ICONS, observe);
 observe(null, "nsPref:changed", PREF_SHOW_REMOTE_ICONS);
 
 // This public object is a static singleton.
 this.PlacesRemoteTabsAutocompleteProvider = {
   // a promise that resolves with an array of matching remote tabs.
   getMatches(searchString) {
     // If Sync isn't configured we bail early.
-    if (Weave === null ||
-        !Services.prefs.prefHasUserValue("services.sync.username")) {
+    if (!weaveXPCService.ready || !weaveXPCService.enabled) {
       return Promise.resolve([]);
     }
 
     let re = new RegExp(escapeRegExp(searchString), "i");
     let matches = [];
     let { tabs, clients } = ensureItems();
     for (let [url, { clientId, tab }] of tabs) {
       let title = tab.title;
--- a/toolkit/content/aboutTelemetry.js
+++ b/toolkit/content/aboutTelemetry.js
@@ -419,17 +419,17 @@ var PingPicker = {
     const today = new Date();
     today.setHours(0, 0, 0, 0);
     const yesterday = new Date(today);
     yesterday.setDate(today.getDate() - 1);
 
     for (let p of this._archivedPings) {
       pingTypes.add(p.type);
       const pingDate = new Date(p.timestampCreated);
-      const datetimeText = Services.intl.createDateTimeFormat(undefined, {
+      const datetimeText = new Services.intl.DateTimeFormat(undefined, {
           dateStyle: "short",
           timeStyle: "medium"
         }).format(pingDate);
       const pingName = `${datetimeText}, ${p.type}`;
 
       let option = document.createElement("option");
       let content = document.createTextNode(pingName);
       option.appendChild(content);
@@ -1455,32 +1455,39 @@ var Search = {
     adjustHeaderState();
     Array.from(document.querySelectorAll("section")).forEach((section) => {
       section.classList.toggle("active", section.id == "home-section");
     });
   },
 
   homeSearch(text) {
     changeUrlSearch(text);
+    removeSearchSectionTitles();
     if (text === "") {
       this.resetHome();
       return;
     }
     document.getElementById("main").classList.add("search");
     let title = bundle.formatStringFromName("resultsForSearch", [text], 1);
     adjustHeaderState(title);
     let noSearchResults = true;
     Array.from(document.querySelectorAll("section")).forEach((section) => {
       if (section.id == "home-section" || section.id == "raw-payload-section") {
         section.classList.remove("active");
         return;
       }
       section.classList.add("active");
       let sectionHidden = this.search(text, section);
-      if (noSearchResults && !sectionHidden) {
+      if (!sectionHidden) {
+        let sectionTitle = document.querySelector(`.category[value="${section.id}"] .category-name`).textContent;
+        let sectionDataDiv = document.querySelector(`#${section.id}.has-data.active .data`);
+        let titleDiv = document.createElement("h1");
+        titleDiv.classList.add("data", "search-section-title");
+        titleDiv.textContent = sectionTitle;
+        section.insertBefore(titleDiv, sectionDataDiv);
         noSearchResults = false;
       }
     });
     this.updateNoResults(text, noSearchResults);
   }
 };
 
 /*
@@ -1869,32 +1876,40 @@ function displayProcessesSelector(select
     "keyed-histograms-section",
     "events-section"
   ];
   let processes = document.getElementById("processes");
   processes.hidden = !whitelist.includes(selectedSection);
 }
 
 function refreshSearch() {
+  removeSearchSectionTitles();
   let selectedSection = document.querySelector(".category.selected").getAttribute("value");
   let search = document.getElementById("search");
   if (!Search.blacklist.includes(selectedSection)) {
     Search.search(search.value);
   }
 }
 
 function adjustSearchState() {
+  removeSearchSectionTitles();
   let selectedSection = document.querySelector(".category.selected").getAttribute("value");
   let search = document.getElementById("search");
   search.value = "";
   search.hidden = Search.blacklist.includes(selectedSection);
   document.getElementById("no-search-results").classList.add("hidden");
   Search.search(""); // reinitialize search state.
 }
 
+function removeSearchSectionTitles() {
+    for (let sectionTitleDiv of Array.from(document.getElementsByClassName("search-section-title"))) {
+        sectionTitleDiv.remove();
+    }
+}
+
 function adjustSection() {
   let selectedCategory = document.querySelector(".category.selected");
   if (!selectedCategory.classList.contains("has-data")) {
     PingPicker._showStructuredPingData();
   }
 }
 
 function adjustHeaderState(title = null) {
--- a/toolkit/crashreporter/content/crashes.js
+++ b/toolkit/crashreporter/content/crashes.js
@@ -68,18 +68,18 @@ function populateReportList() {
     document.getElementById("reportList").style.display = "none";
     document.getElementById("noReports").style.display = "block";
     return;
   }
 
   var dateFormatter;
   var timeFormatter;
   try {
-    dateFormatter = Services.intl.createDateTimeFormat(undefined, { dateStyle: "short" });
-    timeFormatter = Services.intl.createDateTimeFormat(undefined, { timeStyle: "short" });
+    dateFormatter = new Services.intl.DateTimeFormat(undefined, { dateStyle: "short" });
+    timeFormatter = new Services.intl.DateTimeFormat(undefined, { timeStyle: "short" });
   } catch (e) {
     // XXX Fallback to be removed once bug 1215247 is complete
     // and the Intl API is available on all platforms.
     dateFormatter = {
       format(date) {
         return date.toLocaleDateString();
       }
     };
--- a/toolkit/mozapps/downloads/DownloadUtils.jsm
+++ b/toolkit/mozapps/downloads/DownloadUtils.jsm
@@ -344,17 +344,17 @@ this.DownloadUtils = {
     // Figure out when today begins
     let today = new Date(aNow.getFullYear(), aNow.getMonth(), aNow.getDate());
 
     let dateTimeCompact;
     let dateTimeFull;
 
     // Figure out if the time is from today, yesterday, this week, etc.
     if (aDate >= today) {
-      let dts = Services.intl.createDateTimeFormat(undefined, {
+      let dts = new Services.intl.DateTimeFormat(undefined, {
         timeStyle: "short"
       });
       dateTimeCompact = dts.format(aDate);
     } else if (today - aDate < (MS_PER_DAY)) {
       // After yesterday started, show yesterday
       dateTimeCompact = gBundle.GetStringFromName(gStr.yesterday);
     } else if (today - aDate < (6 * MS_PER_DAY)) {
       // After last week started, show day of week
@@ -364,17 +364,17 @@ this.DownloadUtils = {
       dateTimeCompact = aDate.toLocaleString(undefined, {
                           month: "long",
                           day: "numeric"
       });
     }
 
     const dtOptions = { dateStyle: "long", timeStyle: "short" };
     dateTimeFull =
-      Services.intl.createDateTimeFormat(undefined, dtOptions).format(aDate);
+      new Services.intl.DateTimeFormat(undefined, dtOptions).format(aDate);
 
     return [dateTimeCompact, dateTimeFull];
   },
 
   /**
    * Get the appropriate display host string for a URI string depending on if
    * the URI has an eTLD + 1, is an IP address, a local file, or other protocol
    *
--- a/toolkit/mozapps/downloads/tests/unit/test_DownloadUtils.js
+++ b/toolkit/mozapps/downloads/tests/unit/test_DownloadUtils.js
@@ -73,37 +73,37 @@ function testAllGetReadableDates() {
   const today_11_30     = new Date(2000, 11, 31, 11, 30, 15);
   const today_12_30     = new Date(2000, 11, 31, 12, 30, 15);
   const yesterday_11_30 = new Date(2000, 11, 30, 11, 30, 15);
   const yesterday_12_30 = new Date(2000, 11, 30, 12, 30, 15);
   const twodaysago      = new Date(2000, 11, 29, 11, 30, 15);
   const sixdaysago      = new Date(2000, 11, 25, 11, 30, 15);
   const sevendaysago    = new Date(2000, 11, 24, 11, 30, 15);
 
-  let cDtf = Services.intl.createDateTimeFormat;
+  let cDtf = Services.intl.DateTimeFormat;
 
   testGetReadableDates(today_11_30,
-                       cDtf(undefined, {timeStyle: "short"}).format(today_11_30));
+                       (new cDtf(undefined, {timeStyle: "short"})).format(today_11_30));
   testGetReadableDates(today_12_30,
-                       cDtf(undefined, {timeStyle: "short"}).format(today_12_30));
+                       (new cDtf(undefined, {timeStyle: "short"})).format(today_12_30));
 
   testGetReadableDates(yesterday_11_30, "Yesterday");
   testGetReadableDates(yesterday_12_30, "Yesterday");
   testGetReadableDates(twodaysago,
                        twodaysago.toLocaleDateString(undefined, { weekday: "long" }));
   testGetReadableDates(sixdaysago,
                        sixdaysago.toLocaleDateString(undefined, { weekday: "long" }));
   testGetReadableDates(sevendaysago,
                        sevendaysago.toLocaleDateString(undefined, { month: "long" }) + " " +
                        sevendaysago.getDate().toString().padStart(2, "0"));
 
   let [, dateTimeFull] = DownloadUtils.getReadableDates(today_11_30);
 
   const dtOptions = { dateStyle: "long", timeStyle: "short" };
-  Assert.equal(dateTimeFull, cDtf(undefined, dtOptions).format(today_11_30));
+  Assert.equal(dateTimeFull, (new cDtf(undefined, dtOptions)).format(today_11_30));
 }
 
 function run_test() {
   testConvertByteUnits(-1, "-1", "bytes");
   testConvertByteUnits(1, _("1"), "bytes");
   testConvertByteUnits(42, _("42"), "bytes");
   testConvertByteUnits(123, _("123"), "bytes");
   testConvertByteUnits(1024, _("1.0"), "KB");
--- a/xpcom/threads/nsThreadUtils.cpp
+++ b/xpcom/threads/nsThreadUtils.cpp
@@ -35,19 +35,26 @@ NS_IMPL_ISUPPORTS(IdlePeriod, nsIIdlePer
 
 NS_IMETHODIMP
 IdlePeriod::GetIdlePeriodHint(TimeStamp* aIdleDeadline)
 {
   *aIdleDeadline = TimeStamp();
   return NS_OK;
 }
 
+// NS_IMPL_NAMED_* relies on the mName field, which is not present on
+// release or beta. Instead, fall back to using "Runnable" for all
+// runnables.
+#ifdef RELEASE_OR_BETA
+NS_IMPL_ISUPPORTS(Runnable, nsIRunnable, nsINamed)
+#else
 NS_IMPL_NAMED_ADDREF(Runnable, mName)
 NS_IMPL_NAMED_RELEASE(Runnable, mName)
 NS_IMPL_QUERY_INTERFACE(Runnable, nsIRunnable, nsINamed)
+#endif
 
 NS_IMETHODIMP
 Runnable::Run()
 {
   // Do nothing
   return NS_OK;
 }