Merge inbound to mozilla-central. a=merge
authorCiure Andrei <aciure@mozilla.com>
Sat, 13 Oct 2018 12:36:04 +0300
changeset 441109 94a62c1aad526dc24dc9186a6ccebb0db276ee87
parent 440981 0dfb3afc73572ea842e1fbf6ee08644e5570d79d (current diff)
parent 441108 2cc5b3f51ec4fa0501af01e4ece67be0eed814c4 (diff)
child 441110 661d9ca2bed9ec587170e2ec697844b444874a02
child 441114 27ce807450490a5a443c71b3431d5d51c3223ece
push id34842
push useraciure@mozilla.com
push dateSat, 13 Oct 2018 09:36:47 +0000
treeherdermozilla-central@94a62c1aad52 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersmerge
milestone64.0a1
first release with
nightly linux32
94a62c1aad52 / 64.0a1 / 20181013100102 / files
nightly linux64
94a62c1aad52 / 64.0a1 / 20181013100102 / files
nightly mac
94a62c1aad52 / 64.0a1 / 20181013100102 / files
nightly win32
94a62c1aad52 / 64.0a1 / 20181013100102 / files
nightly win64
94a62c1aad52 / 64.0a1 / 20181013100102 / files
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
releases
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Merge inbound to mozilla-central. a=merge
Cargo.lock
browser/app/profile/firefox.js
browser/base/content/tabbrowser.js
browser/components/uitour/UITour.jsm
build/moz.configure/toolchain.configure
dom/media/Latency.cpp
dom/media/Latency.h
dom/media/test/graph_latency.py
gfx/webrender/res/cs_clip_line.glsl
layout/reftests/native-theme/403458-winmenu-ltr.xul
layout/reftests/native-theme/403458-winmenu-rtl.xul
moz.configure
testing/web-platform/meta/background-fetch/abort.https.window.js.ini
testing/web-platform/meta/html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-block-margins.html.ini
testing/web-platform/tests/background-fetch/abort.https.window.js
testing/web-platform/tests/conformance-checkers/html/elements/input/pattern-asterisk-novalid.html
testing/web-platform/tests/conformance-checkers/html/elements/input/pattern-paren-novalid.html
testing/web-platform/tests/tools/wptserve/.travis.yml
testing/web-platform/tests/webstorage/event_constructor_eventinit.html
testing/web-platform/tests/webstorage/event_local_storageeventinit.html
testing/web-platform/tests/webstorage/event_session_storageeventinit.html
widget/reftests/507947.html
--- a/browser/components/preferences/in-content/preferences.xul
+++ b/browser/components/preferences/in-content/preferences.xul
@@ -191,25 +191,24 @@
 #include sync.xul
         </vbox>
       </vbox>
     </vbox>
   </hbox>
 
   <stack id="dialogStack" hidden="true"/>
   <vbox id="dialogTemplate" class="dialogOverlay" align="center" pack="center" topmost="true" hidden="true">
-    <groupbox class="dialogBox"
-              orient="vertical"
-              pack="end"
-              role="dialog"
-              aria-labelledby="dialogTitle">
-      <caption flex="1" align="center">
-        <label class="dialogTitle" flex="1"></label>
+    <vbox class="dialogBox"
+          pack="end"
+          role="dialog"
+          aria-labelledby="dialogTitle">
+      <hbox class="dialogTitleBar" align="center">
+        <label class="dialogTitle" flex="1"/>
         <button class="dialogClose close-icon"
                 data-l10n-id="close-button"/>
-      </caption>
+      </hbox>
       <browser class="dialogFrame"
                autoscroll="false"
                disablehistory="true"/>
-    </groupbox>
+    </vbox>
   </vbox>
   </stack>
 </page>
--- a/browser/components/preferences/in-content/subdialogs.js
+++ b/browser/components/preferences/in-content/subdialogs.js
@@ -13,20 +13,21 @@
  * @param {DOMNode} template: The template is copied to create a new dialog.
  * @param {DOMNode} parentElement: New dialog is appended onto parentElement.
  * @param {String}  id: A unique identifier for the dialog.
  */
 function SubDialog({template, parentElement, id}) {
   this._id = id;
 
   this._overlay = template.cloneNode(true);
+  this._box = this._overlay.querySelector(".dialogBox");
+  this._titleBar = this._overlay.querySelector(".dialogTitleBar");
+  this._titleElement = this._overlay.querySelector(".dialogTitle");
+  this._closeButton = this._overlay.querySelector(".dialogClose");
   this._frame = this._overlay.querySelector(".dialogFrame");
-  this._box = this._overlay.querySelector(".dialogBox");
-  this._closeButton = this._overlay.querySelector(".dialogClose");
-  this._titleElement = this._overlay.querySelector(".dialogTitle");
 
   this._overlay.id = `dialogOverlay-${id}`;
   this._frame.setAttribute("name", `dialogFrame-${id}`);
   this._frameCreated = new Promise(resolve => {
     this._frame.addEventListener("load", resolve, {once: true});
   });
 
   parentElement.appendChild(this._overlay);
@@ -276,26 +277,24 @@ SubDialog.prototype = {
     // until the promise is fulfilled.
     if (aEvent.target.contentDocument.mozSubdialogReady) {
       await aEvent.target.contentDocument.mozSubdialogReady;
     }
 
     // Do this on load to wait for the CSS to load and apply before calculating the size.
     let docEl = this._frame.contentDocument.documentElement;
 
-    let groupBoxTitle = document.getAnonymousElementByAttribute(this._box, "class", "groupbox-title");
-    let groupBoxTitleHeight = groupBoxTitle.clientHeight +
-                              parseFloat(getComputedStyle(groupBoxTitle).borderBottomWidth);
+    let titleBarHeight = this._titleBar.clientHeight +
+                         parseFloat(getComputedStyle(this._titleBar).borderBottomWidth);
 
-    let groupBoxBody = document.getAnonymousElementByAttribute(this._box, "class", "groupbox-body");
     // These are deduced from styles which we don't change, so it's safe to get them now:
-    let boxVerticalPadding = 2 * parseFloat(getComputedStyle(groupBoxBody).paddingTop);
-    let boxHorizontalPadding = 2 * parseFloat(getComputedStyle(groupBoxBody).paddingLeft);
     let boxHorizontalBorder = 2 * parseFloat(getComputedStyle(this._box).borderLeftWidth);
     let boxVerticalBorder = 2 * parseFloat(getComputedStyle(this._box).borderTopWidth);
+    let frameHorizontalMargin = 2 * parseFloat(getComputedStyle(this._frame).marginLeft);
+    let frameVerticalMargin = 2 * parseFloat(getComputedStyle(this._frame).marginTop);
 
     // The difference between the frame and box shouldn't change, either:
     let boxRect = this._box.getBoundingClientRect();
     let frameRect = this._frame.getBoundingClientRect();
     let frameSizeDifference = (frameRect.top - boxRect.top) + (boxRect.bottom - frameRect.bottom);
 
     // Then determine and set a bunch of width stuff:
     let frameMinWidth = docEl.style.width;
@@ -307,17 +306,17 @@ SubDialog.prototype = {
         frameMinWidth = docEl.scrollWidth;
       }
       frameMinWidth += "px";
     }
     let frameWidth = docEl.getAttribute("width") ? docEl.getAttribute("width") + "px" :
                      frameMinWidth;
     this._frame.style.width = frameWidth;
     this._box.style.minWidth = "calc(" +
-                               (boxHorizontalBorder + boxHorizontalPadding) +
+                               (boxHorizontalBorder + frameHorizontalMargin) +
                                "px + " + frameMinWidth + ")";
 
     // Now do the same but for the height. We need to do this afterwards because otherwise
     // XUL assumes we'll optimize for height and gives us "wrong" values which then are no
     // longer correct after we set the width:
     let frameMinHeight = docEl.style.height || docEl.scrollHeight + "px";
     let frameHeight = docEl.getAttribute("height") ? docEl.getAttribute("height") + "px" :
                                                      frameMinHeight;
@@ -347,17 +346,17 @@ SubDialog.prototype = {
       let containers = this._frame.contentDocument.querySelectorAll(".largeDialogContainer");
       for (let container of containers) {
         container.classList.add("doScroll");
       }
     }
 
     this._frame.style.height = frameHeight;
     this._box.style.minHeight = "calc(" +
-                                (boxVerticalBorder + groupBoxTitleHeight + boxVerticalPadding) +
+                                (boxVerticalBorder + titleBarHeight + frameVerticalMargin) +
                                 "px + " + frameMinHeight + ")";
 
     this._overlay.dispatchEvent(new CustomEvent("dialogopen", {
       bubbles: true,
       detail: { dialog: this },
     }));
     this._overlay.style.visibility = "visible";
     this._overlay.style.opacity = ""; // XXX: focus hack continued from _onContentLoaded
--- a/browser/components/shell/content/setDesktopBackground.xul
+++ b/browser/components/shell/content/setDesktopBackground.xul
@@ -56,24 +56,22 @@
       </menulist>
       <spacer flex="1"/>
       <label value="&color.label;"/>
       <html:input id="desktopColor"
                   type="color"
                   onchange="gSetBackground.updateColor(this.value);"/> 
     </hbox>
 #endif
-    <groupbox align="center">
-      <caption label="&preview.label;"/>
-      <stack>
-        <!-- if width and height are not present, they default to 300x150 and stretch the stack -->
-        <html:canvas id="screen" width="1" height="1"/>
-        <image id="monitor"/>
-      </stack>
-    </groupbox>
+
+    <stack>
+      <!-- if width and height are not present, they default to 300x150 and stretch the stack -->
+      <html:canvas id="screen" width="1" height="1" role="presentation"/>
+      <image id="monitor"/>
+    </stack>
 
 #ifdef XP_MACOSX
     <separator/>
 
     <hbox align="right">
       <button id="setDesktopBackground"
               label="&setDesktopBackground.title;"
               oncommand="gSetBackground.setDesktopBackground();"/>
--- a/browser/locales/en-US/chrome/browser/setDesktopBackground.dtd
+++ b/browser/locales/en-US/chrome/browser/setDesktopBackground.dtd
@@ -3,13 +3,12 @@
    - file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
 
 <!ENTITY position.label             "Position:">
 <!ENTITY tile.label                 "Tile">
 <!ENTITY center.label               "Center">
 <!ENTITY stretch.label              "Stretch">
 <!ENTITY fill.label                 "Fill">
 <!ENTITY fit.label                  "Fit">
-<!ENTITY preview.label              "Preview">
 <!ENTITY color.label                "Color:">
 <!ENTITY setDesktopBackground.title "Set Desktop Background">
 <!ENTITY openDesktopPrefs.label     "Open Desktop Preferences">
 <!ENTITY closeWindow.key            "w">
--- a/browser/themes/shared/incontentprefs/preferences.inc.css
+++ b/browser/themes/shared/incontentprefs/preferences.inc.css
@@ -502,42 +502,39 @@ button > hbox > label {
 
 .dialogBox[resizable="true"] {
   resize: both;
   overflow: hidden;
   min-height: 20em;
   min-width: 66ch;
 }
 
-.dialogBox > .groupbox-title {
+.dialogTitleBar {
   margin-top: 0;
   padding: 3.5px 0;
   background-color: #F1F1F1;
   border-bottom: 1px solid #C1C1C1;
 }
 
 .dialogTitle {
   font-size: .9em;
+  font-weight: 600;
   text-align: center;
   -moz-user-select: none;
 }
 
 .close-icon {
   border: none;
   background: none !important;
   min-width: 0;
   min-height: auto;
 }
 
-.dialogBox > .groupbox-body {
-  -moz-appearance: none;
-  padding: 20px;
-}
-
 .dialogFrame {
+  margin: 20px;
   -moz-box-flex: 1;
   /* Default dialog dimensions */
   width: 66ch;
 }
 
 .largeDialogContainer.doScroll {
   overflow-y: auto;
   -moz-box-flex: 1;
--- a/dom/base/Location.cpp
+++ b/dom/base/Location.cpp
@@ -427,54 +427,26 @@ Location::GetHref(nsAString& aHref)
   AppendUTF8toUTF16(uriString, aHref);
   return NS_OK;
 }
 
 void
 Location::SetHref(const nsAString& aHref,
                   ErrorResult& aRv)
 {
-  JSContext *cx = nsContentUtils::GetCurrentJSContext();
-  if (cx) {
-    aRv = SetHrefWithContext(cx, aHref, false);
-    return;
-  }
-
-  nsAutoString oldHref;
-  aRv = GetHref(oldHref);
-  if (NS_WARN_IF(aRv.Failed())) {
-    return;
-  }
-
-  nsCOMPtr<nsIURI> oldUri;
-  aRv = NS_NewURI(getter_AddRefs(oldUri), oldHref);
-  if (NS_WARN_IF(aRv.Failed())) {
-    return;
-  }
-
-  aRv = SetHrefWithBase(aHref, oldUri, false);
-  if (NS_WARN_IF(aRv.Failed())) {
-    return;
-  }
+  DoSetHref(aHref, false, aRv);
 }
 
-nsresult
-Location::SetHrefWithContext(JSContext* cx, const nsAString& aHref,
-                             bool aReplace)
+void
+Location::DoSetHref(const nsAString& aHref, bool aReplace, ErrorResult& aRv)
 {
-  nsCOMPtr<nsIURI> base;
-
   // Get the source of the caller
-  nsresult result = GetSourceBaseURL(cx, getter_AddRefs(base));
+  nsCOMPtr<nsIURI> base = GetSourceBaseURL();
 
-  if (NS_FAILED(result)) {
-    return result;
-  }
-
-  return SetHrefWithBase(aHref, base, aReplace);
+  aRv = SetHrefWithBase(aHref, base, aReplace);
 }
 
 nsresult
 Location::SetHrefWithBase(const nsAString& aHref, nsIURI* aBase,
                           bool aReplace)
 {
   nsresult result;
   nsCOMPtr<nsIURI> newUri;
@@ -881,91 +853,52 @@ Location::Reload(bool aForceget)
   return rv;
 }
 
 void
 Location::Replace(const nsAString& aUrl,
                   nsIPrincipal& aSubjectPrincipal,
                   ErrorResult& aRv)
 {
-  if (JSContext *cx = nsContentUtils::GetCurrentJSContext()) {
-    aRv = SetHrefWithContext(cx, aUrl, true);
-    return;
-  }
-
-  nsAutoString oldHref;
-  aRv = GetHref(oldHref);
-  if (NS_WARN_IF(aRv.Failed())) {
-    return;
-  }
-
-  nsCOMPtr<nsIURI> oldUri;
-
-  aRv = NS_NewURI(getter_AddRefs(oldUri), oldHref);
-  if (NS_WARN_IF(aRv.Failed())) {
-    return;
-  }
-
-  aRv = SetHrefWithBase(aUrl, oldUri, true);
+  DoSetHref(aUrl, true, aRv);
 }
 
 void
 Location::Assign(const nsAString& aUrl,
                  nsIPrincipal& aSubjectPrincipal,
                  ErrorResult& aRv)
 {
   if (!CallerSubsumes(&aSubjectPrincipal)) {
     aRv.Throw(NS_ERROR_DOM_SECURITY_ERR);
     return;
   }
 
-  if (JSContext *cx = nsContentUtils::GetCurrentJSContext()) {
-    aRv = SetHrefWithContext(cx, aUrl, false);
-    return;
-  }
-
-  nsAutoString oldHref;
-  aRv = GetHref(oldHref);
-  if (NS_WARN_IF(aRv.Failed())) {
-    return;
-  }
-
-  nsCOMPtr<nsIURI> oldUri;
-  aRv = NS_NewURI(getter_AddRefs(oldUri), oldHref);
-  if (NS_WARN_IF(aRv.Failed())) {
-    return;
-  }
-
-  if (oldUri) {
-    aRv = SetHrefWithBase(aUrl, oldUri, false);
-  }
+  DoSetHref(aUrl, false, aRv);
 }
 
-nsresult
-Location::GetSourceBaseURL(JSContext* cx, nsIURI** sourceURL)
+already_AddRefed<nsIURI>
+Location::GetSourceBaseURL()
 {
-  *sourceURL = nullptr;
   nsIDocument* doc = GetEntryDocument();
   // If there's no entry document, we either have no Script Entry Point or one
   // that isn't a DOM Window.  This doesn't generally happen with the DOM, but
   // can sometimes happen with extension code in certain IPC configurations.  If
   // this happens, try falling back on the current document associated with the
   // docshell. If that fails, just return null and hope that the caller passed
   // an absolute URI.
   nsCOMPtr<nsIDocShell> docShell(do_QueryReferent(mDocShell));
   if (!doc && docShell) {
     nsCOMPtr<nsPIDOMWindowOuter> docShellWin =
       do_QueryInterface(docShell->GetScriptGlobalObject());
     if (docShellWin) {
       doc = docShellWin->GetDoc();
     }
   }
-  NS_ENSURE_TRUE(doc, NS_OK);
-  *sourceURL = doc->GetBaseURI().take();
-  return NS_OK;
+  NS_ENSURE_TRUE(doc, nullptr);
+  return doc->GetBaseURI();
 }
 
 bool
 Location::CallerSubsumes(nsIPrincipal* aSubjectPrincipal)
 {
   MOZ_ASSERT(aSubjectPrincipal);
 
   // Get the principal associated with the location object.  Note that this is
--- a/dom/base/Location.h
+++ b/dom/base/Location.h
@@ -164,20 +164,23 @@ protected:
   // fetched from as the URI instead of the jar: uri itself.  Pass in
   // true for aGetInnermostURI when that's the case.
   // Note, this method can return NS_OK with a null value for aURL. This happens
   // if the docShell is null.
   nsresult GetURI(nsIURI** aURL, bool aGetInnermostURI = false);
   nsresult SetURI(nsIURI* aURL, bool aReplace = false);
   nsresult SetHrefWithBase(const nsAString& aHref, nsIURI* aBase,
                            bool aReplace);
-  nsresult SetHrefWithContext(JSContext* cx, const nsAString& aHref,
-                              bool aReplace);
+
+  // Helper for Assign/SetHref/Replace
+  void DoSetHref(const nsAString& aHref, bool aReplace, ErrorResult& aRv);
 
-  nsresult GetSourceBaseURL(JSContext* cx, nsIURI** sourceURL);
+  // Get the base URL we should be using for our relative URL
+  // resolution for SetHref/Assign/Replace.
+  already_AddRefed<nsIURI> GetSourceBaseURL();
   nsresult CheckURL(nsIURI *url, nsDocShellLoadInfo** aLoadInfo);
   bool CallerSubsumes(nsIPrincipal* aSubjectPrincipal);
 
   nsString mCachedHash;
   nsCOMPtr<nsPIDOMWindowInner> mInnerWindow;
   nsWeakPtr mDocShell;
 };
 
--- a/dom/media/AudioSegment.cpp
+++ b/dom/media/AudioSegment.cpp
@@ -2,17 +2,16 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "AudioSegment.h"
 
 #include "AudioMixer.h"
 #include "AudioChannelFormat.h"
-#include "Latency.h"
 #include <speex/speex_resampler.h>
 
 namespace mozilla {
 
 const uint8_t SilentChannel::gZeroChannel[MAX_AUDIO_SAMPLE_SIZE*SilentChannel::AUDIO_PROCESSING_FRAMES] = {0};
 
 template<>
 const float* SilentChannel::ZeroChannel<float>()
@@ -159,17 +158,17 @@ AudioSegment::Mix(AudioMixer& aMixer, ui
   if (offsetSamples) {
     MOZ_ASSERT(offsetSamples == outBufferLength / aOutputChannels,
                "We forgot to write some samples?");
     aMixer.Mix(buf.Elements(), aOutputChannels, offsetSamples, aSampleRate);
   }
 }
 
 void
-AudioSegment::WriteTo(uint64_t aID, AudioMixer& aMixer, uint32_t aOutputChannels, uint32_t aSampleRate)
+AudioSegment::WriteTo(AudioMixer& aMixer, uint32_t aOutputChannels, uint32_t aSampleRate)
 {
   AutoTArray<AudioDataValue,SilentChannel::AUDIO_PROCESSING_FRAMES*GUESS_AUDIO_CHANNELS> buf;
   // Offset in the buffer that will be written to the mixer, in samples.
   uint32_t offset = 0;
 
   if (GetDuration() <= 0) {
     MOZ_ASSERT(GetDuration() == 0);
     return;
@@ -193,23 +192,16 @@ AudioSegment::WriteTo(uint64_t aID, Audi
         // The mixer is expecting interleaved data, so this is ok.
         PodZero(buf.Elements() + offset, c.mDuration * aOutputChannels);
         break;
       default:
         MOZ_ASSERT(false, "Not handled");
     }
 
     offset += c.mDuration * aOutputChannels;
-
-    if (!c.mTimeStamp.IsNull()) {
-      TimeStamp now = TimeStamp::Now();
-      // would be more efficient to c.mTimeStamp to ms on create time then pass here
-      LogTime(AsyncLatencyLogger::AudioMediaStreamTrack, aID,
-              (now - c.mTimeStamp).ToMilliseconds(), c.mTimeStamp);
-    }
   }
 
   if (offset) {
     aMixer.Mix(buf.Elements(), aOutputChannels, offset / aOutputChannels, aSampleRate);
   }
 }
 
 } // namespace mozilla
--- a/dom/media/AudioSegment.h
+++ b/dom/media/AudioSegment.h
@@ -285,19 +285,16 @@ struct AudioChunk {
 
   StreamTime mDuration = 0; // in frames within the buffer
   RefPtr<ThreadSharedObject> mBuffer; // the buffer object whose lifetime is managed; null means data is all zeroes
   // one pointer per channel; empty if and only if mBuffer is null
   AutoTArray<const void*,GUESS_AUDIO_CHANNELS> mChannelData;
   float mVolume = 1.0f; // volume multiplier to apply
   // format of frames in mBuffer (or silence if mBuffer is null)
   SampleFormat mBufferFormat = AUDIO_FORMAT_SILENCE;
-#ifdef MOZILLA_INTERNAL_API
-  mozilla::TimeStamp mTimeStamp;           // time at which this has been fetched from the MediaEngine
-#endif
   // principalHandle for the data in this chunk.
   // This can be compared to an nsIPrincipal* when back on main thread.
   PrincipalHandle mPrincipalHandle = PRINCIPAL_HANDLE_NONE;
 };
 
 /**
  * A list of audio samples consisting of a sequence of slices of SharedBuffers.
  * The audio rate is determined by the track, not stored in this class.
@@ -379,63 +376,54 @@ public:
     chunk->mBuffer = aBuffer;
 
     MOZ_ASSERT(chunk->mBuffer || aChannelData.IsEmpty(), "Appending invalid data ?");
 
     for (uint32_t channel = 0; channel < aChannelData.Length(); ++channel) {
       chunk->mChannelData.AppendElement(aChannelData[channel]);
     }
     chunk->mBufferFormat = AUDIO_FORMAT_FLOAT32;
-#ifdef MOZILLA_INTERNAL_API
-    chunk->mTimeStamp = TimeStamp::Now();
-#endif
     chunk->mPrincipalHandle = aPrincipalHandle;
   }
   void AppendFrames(already_AddRefed<ThreadSharedObject> aBuffer,
                     const nsTArray<const int16_t*>& aChannelData,
                     int32_t aDuration, const PrincipalHandle& aPrincipalHandle)
   {
     AudioChunk* chunk = AppendChunk(aDuration);
     chunk->mBuffer = aBuffer;
 
     MOZ_ASSERT(chunk->mBuffer || aChannelData.IsEmpty(), "Appending invalid data ?");
 
     for (uint32_t channel = 0; channel < aChannelData.Length(); ++channel) {
       chunk->mChannelData.AppendElement(aChannelData[channel]);
     }
     chunk->mBufferFormat = AUDIO_FORMAT_S16;
-#ifdef MOZILLA_INTERNAL_API
-    chunk->mTimeStamp = TimeStamp::Now();
-#endif
     chunk->mPrincipalHandle = aPrincipalHandle;
 
   }
   // Consumes aChunk, and returns a pointer to the persistent copy of aChunk
   // in the segment.
   AudioChunk* AppendAndConsumeChunk(AudioChunk* aChunk)
   {
     AudioChunk* chunk = AppendChunk(aChunk->mDuration);
     chunk->mBuffer = aChunk->mBuffer.forget();
     chunk->mChannelData.SwapElements(aChunk->mChannelData);
 
     MOZ_ASSERT(chunk->mBuffer || aChunk->mChannelData.IsEmpty(), "Appending invalid data ?");
 
     chunk->mVolume = aChunk->mVolume;
     chunk->mBufferFormat = aChunk->mBufferFormat;
-#ifdef MOZILLA_INTERNAL_API
-    chunk->mTimeStamp = TimeStamp::Now();
-#endif
     chunk->mPrincipalHandle = aChunk->mPrincipalHandle;
     return chunk;
   }
   void ApplyVolume(float aVolume);
   // Mix the segment into a mixer, interleaved. This is useful to output a
   // segment to a system audio callback. It up or down mixes to aChannelCount
   // channels.
-  void WriteTo(uint64_t aID, AudioMixer& aMixer, uint32_t aChannelCount,
+  void WriteTo(AudioMixer& aMixer, uint32_t aChannelCount,
                uint32_t aSampleRate);
   // Mix the segment into a mixer, keeping it planar, up or down mixing to
   // aChannelCount channels.
   void Mix(AudioMixer& aMixer, uint32_t aChannelCount, uint32_t aSampleRate);
 
   int ChannelCount() {
     NS_WARNING_ASSERTION(
       !mChunks.IsEmpty(),
--- a/dom/media/GraphDriver.cpp
+++ b/dom/media/GraphDriver.cpp
@@ -1032,22 +1032,22 @@ AudioCallbackDriver::DataCallback(const 
   GraphImpl()->NotifyOutputData(aOutputBuffer, static_cast<size_t>(aFrames),
                                 mSampleRate, mOutputChannels);
 
   if (!stillProcessing) {
     // About to hand over control of the graph.  Do not start a new driver if
     // StateCallback() receives an error for this stream while the main thread
     // or another driver has control of the graph.
     mShouldFallbackIfError = false;
+    RemoveMixerCallback();
+    // Update the flag before handing over the graph and going to drain.
+    mAudioThreadRunning = false;
     // Enter shutdown mode. The stable-state handler will detect this
     // and complete shutdown if the graph does not get restarted.
     mGraphImpl->SignalMainThreadCleanup();
-    RemoveMixerCallback();
-    // Update the flag before go to drain
-    mAudioThreadRunning = false;
     return aFrames - 1;
   }
 
   bool switching = false;
   {
     MonitorAutoLock mon(GraphImpl()->GetMonitor());
     switching = !!NextDriver();
   }
@@ -1057,18 +1057,18 @@ AudioCallbackDriver::DataCallback(const 
     // If the audio stream has not been started by the previous driver or
     // the graph itself, keep it alive.
     MonitorAutoLock mon(GraphImpl()->GetMonitor());
     if (!IsStarted()) {
       return aFrames;
     }
     LOG(LogLevel::Debug, ("%p: Switching to system driver.", GraphImpl()));
     RemoveMixerCallback();
+    mAudioThreadRunning = false;
     SwitchToNextDriver();
-    mAudioThreadRunning = false;
     // Returning less than aFrames starts the draining and eventually stops the
     // audio thread. This function will never get called again.
     return aFrames - 1;
   }
 
   return aFrames;
 }
 
deleted file mode 100644
--- a/dom/media/Latency.cpp
+++ /dev/null
@@ -1,229 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include "Latency.h"
-#include "nsThreadUtils.h"
-#include "mozilla/Logging.h"
-#include <cmath>
-#include <algorithm>
-
-#include <mozilla/Services.h>
-#include <mozilla/StaticPtr.h>
-#include "nsContentUtils.h"
-
-using namespace mozilla;
-
-const char* LatencyLogIndex2Strings[] = {
-  "Audio MediaStreamTrack",
-  "Video MediaStreamTrack",
-  "Cubeb",
-  "AudioStream",
-  "NetEQ",
-  "AudioCapture Base",
-  "AudioCapture Samples",
-  "AudioTrackInsertion",
-  "MediaPipeline Audio Insertion",
-  "AudioTransmit",
-  "AudioReceive",
-  "MediaPipelineAudioPlayout",
-  "MediaStream Create",
-  "AudioStream Create",
-  "AudioSendRTP",
-  "AudioRecvRTP"
-};
-
-static StaticRefPtr<AsyncLatencyLogger> gAsyncLogger;
-
-LogModule*
-GetLatencyLog()
-{
-  static LazyLogModule sLog("MediaLatency");
-  return sLog;
-}
-
-class LogEvent : public Runnable
-{
-public:
-  LogEvent(AsyncLatencyLogger::LatencyLogIndex aIndex,
-           uint64_t aID,
-           int64_t aValue,
-           TimeStamp aTimeStamp)
-    : mozilla::Runnable("LogEvent")
-    , mIndex(aIndex)
-    , mID(aID)
-    , mValue(aValue)
-    , mTimeStamp(aTimeStamp)
-  {}
-  LogEvent(AsyncLatencyLogger::LatencyLogIndex aIndex,
-           uint64_t aID,
-           int64_t aValue)
-    : mozilla::Runnable("LogEvent")
-    , mIndex(aIndex)
-    , mID(aID)
-    , mValue(aValue)
-    , mTimeStamp(TimeStamp())
-  {}
-  ~LogEvent() {}
-
-  NS_IMETHOD Run() override {
-    AsyncLatencyLogger::Get(true)->WriteLog(mIndex, mID, mValue, mTimeStamp);
-    return NS_OK;
-  }
-
-protected:
-  AsyncLatencyLogger::LatencyLogIndex mIndex;
-  uint64_t mID;
-  int64_t mValue;
-  TimeStamp mTimeStamp;
-};
-
-void LogLatency(AsyncLatencyLogger::LatencyLogIndex aIndex, uint64_t aID, int64_t aValue)
-{
-  AsyncLatencyLogger::Get()->Log(aIndex, aID, aValue);
-}
-
-void LogTime(AsyncLatencyLogger::LatencyLogIndex aIndex, uint64_t aID, int64_t aValue)
-{
-  TimeStamp now = TimeStamp::Now();
-  AsyncLatencyLogger::Get()->Log(aIndex, aID, aValue, now);
-}
-
-void LogTime(AsyncLatencyLogger::LatencyLogIndex aIndex, uint64_t aID, int64_t aValue, TimeStamp &aTime)
-{
-  AsyncLatencyLogger::Get()->Log(aIndex, aID, aValue, aTime);
-}
-
-void LogTime(uint32_t aIndex, uint64_t aID, int64_t aValue)
-{
-  LogTime(static_cast<AsyncLatencyLogger::LatencyLogIndex>(aIndex), aID, aValue);
-}
-void LogTime(uint32_t aIndex, uint64_t aID, int64_t aValue, TimeStamp &aTime)
-{
-  LogTime(static_cast<AsyncLatencyLogger::LatencyLogIndex>(aIndex), aID, aValue, aTime);
-}
-void LogLatency(uint32_t aIndex, uint64_t aID, int64_t aValue)
-{
-  LogLatency(static_cast<AsyncLatencyLogger::LatencyLogIndex>(aIndex), aID, aValue);
-}
-
-/* static */
-void AsyncLatencyLogger::InitializeStatics()
-{
-  NS_ASSERTION(NS_IsMainThread(), "Main thread only");
-
-  //Make sure that the underlying logger is allocated.
-  GetLatencyLog();
-  gAsyncLogger = new AsyncLatencyLogger();
-}
-
-/* static */
-void AsyncLatencyLogger::ShutdownLogger()
-{
-  gAsyncLogger = nullptr;
-}
-
-/* static */
-AsyncLatencyLogger* AsyncLatencyLogger::Get(bool aStartTimer)
-{
-  // Users don't generally null-check the result since we should live longer than they
-  MOZ_ASSERT(gAsyncLogger);
-
-  if (aStartTimer) {
-    gAsyncLogger->Init();
-  }
-  return gAsyncLogger;
-}
-
-NS_IMPL_ISUPPORTS(AsyncLatencyLogger, nsIObserver)
-
-AsyncLatencyLogger::AsyncLatencyLogger()
-  : mThread(nullptr),
-    mMutex("AsyncLatencyLogger")
-{
-  NS_ASSERTION(NS_IsMainThread(), "Main thread only");
-  nsContentUtils::RegisterShutdownObserver(this);
-}
-
-AsyncLatencyLogger::~AsyncLatencyLogger()
-{
-  AsyncLatencyLogger::Shutdown();
-}
-
-void AsyncLatencyLogger::Shutdown()
-{
-  nsContentUtils::UnregisterShutdownObserver(this);
-
-  MutexAutoLock lock(mMutex);
-  if (mThread) {
-    mThread->Shutdown();
-  }
-  mStart = TimeStamp(); // make sure we don't try to restart it for any reason
-}
-
-void AsyncLatencyLogger::Init()
-{
-  MutexAutoLock lock(mMutex);
-  if (mStart.IsNull()) {
-    nsresult rv = NS_NewNamedThread("Latency Logger", getter_AddRefs(mThread));
-    NS_ENSURE_SUCCESS_VOID(rv);
-    mStart = TimeStamp::Now();
-  }
-}
-
-void AsyncLatencyLogger::GetStartTime(TimeStamp &aStart)
-{
-  MutexAutoLock lock(mMutex);
-  aStart = mStart;
-}
-
-nsresult
-AsyncLatencyLogger::Observe(nsISupports* aSubject, const char* aTopic,
-                            const char16_t* aData)
-{
-  MOZ_ASSERT(NS_IsMainThread());
-  if (strcmp(aTopic, NS_XPCOM_SHUTDOWN_OBSERVER_ID) == 0) {
-    Shutdown();
-  }
-  return NS_OK;
-}
-
-// aID is a sub-identifier (in particular a specific MediaStramTrack)
-void AsyncLatencyLogger::WriteLog(LatencyLogIndex aIndex, uint64_t aID, int64_t aValue,
-                                  TimeStamp aTimeStamp)
-{
-  if (aTimeStamp.IsNull()) {
-    MOZ_LOG(GetLatencyLog(), LogLevel::Debug,
-      ("Latency: %s,%" PRIu64 ",%" PRId64 ",%" PRId64,
-       LatencyLogIndex2Strings[aIndex], aID, GetTimeStamp(), aValue));
-  } else {
-    MOZ_LOG(GetLatencyLog(), LogLevel::Debug,
-      ("Latency: %s,%" PRIu64 ",%" PRId64 ",%" PRId64 ",%" PRId64,
-       LatencyLogIndex2Strings[aIndex], aID, GetTimeStamp(), aValue,
-       static_cast<int64_t>((aTimeStamp - gAsyncLogger->mStart).ToMilliseconds())));
-  }
-}
-
-int64_t AsyncLatencyLogger::GetTimeStamp()
-{
-  TimeDuration t = TimeStamp::Now() - mStart;
-  return t.ToMilliseconds();
-}
-
-void AsyncLatencyLogger::Log(LatencyLogIndex aIndex, uint64_t aID, int64_t aValue)
-{
-  TimeStamp null;
-  Log(aIndex, aID, aValue, null);
-}
-
-void AsyncLatencyLogger::Log(LatencyLogIndex aIndex, uint64_t aID, int64_t aValue, TimeStamp &aTime)
-{
-  if (MOZ_LOG_TEST(GetLatencyLog(), LogLevel::Debug)) {
-    nsCOMPtr<nsIRunnable> event = new LogEvent(aIndex, aID, aValue, aTime);
-    if (mThread) {
-      mThread->Dispatch(event, NS_DISPATCH_NORMAL);
-    }
-  }
-}
deleted file mode 100644
--- a/dom/media/Latency.h
+++ /dev/null
@@ -1,99 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#ifndef MOZILLA_LATENCY_H
-#define MOZILLA_LATENCY_H
-
-#include "mozilla/TimeStamp.h"
-#include "mozilla/Logging.h"
-#include "nsCOMPtr.h"
-#include "nsIThread.h"
-#include "mozilla/Monitor.h"
-#include "nsISupportsImpl.h"
-#include "nsIObserver.h"
-
-class AsyncLatencyLogger;
-
-mozilla::LogModule* GetLatencyLog();
-
-// This class is a singleton. It is refcounted.
-class AsyncLatencyLogger : public nsIObserver
-{
-  NS_DECL_THREADSAFE_ISUPPORTS
-  NS_DECL_NSIOBSERVER
-
-public:
-
-  enum LatencyLogIndex {
-    AudioMediaStreamTrack = 0,
-    VideoMediaStreamTrack,
-    Cubeb,
-    AudioStream,
-    NetEQ,
-    AudioCaptureBase, // base time for capturing an audio stream
-    AudioCapture, // records number of samples captured and the time
-    AudioTrackInsertion, // # of samples inserted into a mediastreamtrack and the time
-    MediaPipelineAudioInsertion, // Timestamp and time of timestamp
-    AudioTransmit, // Timestamp and socket send time
-    AudioReceive, // Timestamp and receive time
-    MediaPipelineAudioPlayout, // Timestamp and playout into MST time
-    MediaStreamCreate, // Source and TrackUnion streams
-    AudioStreamCreate, // TrackUnion stream and AudioStream
-    AudioSendRTP,
-    AudioRecvRTP,
-    _MAX_INDEX
-  };
-  // Log with a null timestamp
-  void Log(LatencyLogIndex index, uint64_t aID, int64_t aValue);
-  // Log with a timestamp
-  void Log(LatencyLogIndex index, uint64_t aID, int64_t aValue,
-           mozilla::TimeStamp &aTime);
-  // Write a log message to NSPR
-  void WriteLog(LatencyLogIndex index, uint64_t aID, int64_t aValue,
-                mozilla::TimeStamp timestamp);
-  // Get the base time used by the logger for delta calculations
-  void GetStartTime(mozilla::TimeStamp &aStart);
-
-  static AsyncLatencyLogger* Get(bool aStartTimer = false);
-  static void InitializeStatics();
-  // After this is called, the global log object may go away
-  static void ShutdownLogger();
-private:
-  AsyncLatencyLogger();
-  virtual ~AsyncLatencyLogger();
-  int64_t GetTimeStamp();
-  void Init();
-  // Shut down the thread associated with this, and make sure it doesn't
-  // start up again.
-  void Shutdown();
-  // The thread on which the IO happens
-  nsCOMPtr<nsIThread> mThread;
-  // This can be initialized on multiple threads, but is protected by a
-  // monitor. After the initialization phase, it is accessed on the log
-  // thread only.
-  mozilla::TimeStamp mStart;
-  // This monitor protects mStart and mMediaLatencyLog for the
-  // initialization sequence. It is initialized at layout startup, and
-  // destroyed at layout shutdown.
-  mozilla::Mutex mMutex;
-};
-
-// need uint32_t versions for access from webrtc/trunk code
-// Log without a time delta
-void LogLatency(AsyncLatencyLogger::LatencyLogIndex index, uint64_t aID, int64_t aValue);
-void LogLatency(uint32_t index, uint64_t aID, int64_t aValue);
-// Log TimeStamp::Now() (as delta)
-void LogTime(AsyncLatencyLogger::LatencyLogIndex index, uint64_t aID, int64_t aValue);
-void LogTime(uint32_t index, uint64_t aID, int64_t aValue);
-// Log the specified time (as delta)
-void LogTime(AsyncLatencyLogger::LatencyLogIndex index, uint64_t aID, int64_t aValue,
-             mozilla::TimeStamp &aTime);
-
-// For generating unique-ish ids for logged sources
-#define LATENCY_STREAM_ID(source, trackID) \
-  ((((uint64_t) (source)) & ~0x0F) | (trackID))
-
-#endif
--- a/dom/media/MediaManager.cpp
+++ b/dom/media/MediaManager.cpp
@@ -54,17 +54,16 @@
 #include "mozilla/dom/MediaDevices.h"
 #include "mozilla/Base64.h"
 #include "mozilla/ipc/BackgroundChild.h"
 #include "mozilla/media/MediaChild.h"
 #include "mozilla/media/MediaTaskUtils.h"
 #include "MediaTrackConstraints.h"
 #include "VideoUtils.h"
 #include "ThreadSafeRefcountingWithMainThreadDestruction.h"
-#include "Latency.h"
 #include "nsProxyRelease.h"
 #include "nsVariant.h"
 
 // For snprintf
 #include "mozilla/Sprintf.h"
 
 #include "nsJSUtils.h"
 #include "nsGlobalWindow.h"
--- a/dom/media/MediaSegment.h
+++ b/dom/media/MediaSegment.h
@@ -8,17 +8,16 @@
 
 #include "nsTArray.h"
 #include "nsIPrincipal.h"
 #include "nsProxyRelease.h"
 #ifdef MOZILLA_INTERNAL_API
 #include "mozilla/TimeStamp.h"
 #endif
 #include <algorithm>
-#include "Latency.h"
 
 namespace mozilla {
 
 /**
  * Track or graph rate in Hz. Maximum 1 << TRACK_RATE_MAX_BITS Hz. This
  * maximum avoids overflow in conversions between track rates and conversions
  * from seconds.
  */
@@ -326,19 +325,16 @@ public:
     if (aDuration <= 0) {
       return;
     }
     if (!mChunks.IsEmpty() && mChunks[0].IsNull()) {
       mChunks[0].mDuration += aDuration;
     } else {
       mChunks.InsertElementAt(0)->SetNull(aDuration);
     }
-#ifdef MOZILLA_INTERNAL_API
-    mChunks[0].mTimeStamp = mozilla::TimeStamp::Now();
-#endif
     mDuration += aDuration;
   }
   void AppendNullData(StreamTime aDuration) override
   {
     if (aDuration <= 0) {
       return;
     }
     if (!mChunks.IsEmpty() && mChunks[mChunks.Length() - 1].IsNull()) {
@@ -413,22 +409,16 @@ public:
     return nullptr;
   }
 
   void RemoveLeading(StreamTime aDuration)
   {
     RemoveLeading(aDuration, 0);
   }
 
-#ifdef MOZILLA_INTERNAL_API
-  void GetStartTime(TimeStamp &aTime) {
-    aTime = mChunks[0].mTimeStamp;
-  }
-#endif
-
   size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const override
   {
     size_t amount = mChunks.ShallowSizeOfExcludingThis(aMallocSizeOf);
     for (size_t i = 0; i < mChunks.Length(); i++) {
       amount += mChunks[i].SizeOfExcludingThisIfUnshared(aMallocSizeOf);
     }
     return amount;
   }
@@ -450,19 +440,16 @@ protected:
   explicit MediaSegmentBase(Type aType)
     : MediaSegment(aType)
     , mChunks()
   {}
 
   MediaSegmentBase(MediaSegmentBase&& aSegment)
     : MediaSegment(std::move(aSegment))
     , mChunks()
-#ifdef MOZILLA_INTERNAL_API
-    , mTimeStamp(std::move(aSegment.mTimeStamp))
-#endif
   {
     mChunks.SwapElements(aSegment.mChunks);
     MOZ_ASSERT(mChunks.Capacity() >= DEFAULT_SEGMENT_CAPACITY,
                "Capacity must be retained in self after swap");
     MOZ_ASSERT(aSegment.mChunks.Capacity() >= DEFAULT_SEGMENT_CAPACITY,
                "Capacity must be retained in other after swap");
   }
 
@@ -571,16 +558,13 @@ protected:
       mChunks.RemoveElementsAt(i+1, mChunks.Length() - (i+1));
     }
     MOZ_ASSERT(mChunks.Capacity() >= DEFAULT_SEGMENT_CAPACITY,
                "Capacity must be retained after removing chunks");
     // Caller must adjust mDuration
   }
 
   AutoTArray<Chunk, DEFAULT_SEGMENT_CAPACITY> mChunks;
-#ifdef MOZILLA_INTERNAL_API
-  mozilla::TimeStamp mTimeStamp;
-#endif
 };
 
 } // namespace mozilla
 
 #endif /* MOZILLA_MEDIASEGMENT_H_ */
--- a/dom/media/MediaStreamGraph.cpp
+++ b/dom/media/MediaStreamGraph.cpp
@@ -778,21 +778,17 @@ MediaStreamGraphImpl::PlayAudio(MediaStr
           ticksWritten += toWrite;
         }
         output.ApplyVolume(volume);
       }
       t = end;
     }
     audioOutput.mLastTickWritten = offset;
 
-    // Need unique id for stream & track - and we want it to match the inserter
-    output.WriteTo(LATENCY_STREAM_ID(aStream, track->GetID()),
-                                     mMixer,
-                                     AudioOutputChannelCount(),
-                                     mSampleRate);
+    output.WriteTo(mMixer, AudioOutputChannelCount(), mSampleRate);
   }
   return ticksWritten;
 }
 
 void
 MediaStreamGraphImpl::OpenAudioInputImpl(CubebUtils::AudioDeviceID aID,
                                          AudioDataListener* aListener)
 {
@@ -2829,17 +2825,16 @@ MediaStream::AddMainThreadListener(MainT
 }
 
 SourceMediaStream::SourceMediaStream()
   : MediaStream()
   , mMutex("mozilla::media::SourceMediaStream")
   , mUpdateKnownTracksTime(0)
   , mPullEnabled(false)
   , mFinishPending(false)
-  , mNeedsMixing(false)
 {
 }
 
 nsresult
 SourceMediaStream::OpenAudioInput(CubebUtils::AudioDeviceID aID,
                                   AudioDataListener *aListener)
 {
   MOZ_ASSERT(GraphImpl());
@@ -3323,17 +3318,17 @@ SourceMediaStream::RemoveDirectTrackList
 StreamTime
 SourceMediaStream::GetEndOfAppendedData(TrackID aID)
 {
   MutexAutoLock lock(mMutex);
   TrackData *track = FindDataForTrack(aID);
   if (track) {
     return track->mEndOfFlushedData + track->mData->GetDuration();
   }
-  NS_ERROR("Track not found");
+  MOZ_CRASH("Track not found");
   return 0;
 }
 
 void
 SourceMediaStream::EndTrack(TrackID aID)
 {
   MutexAutoLock lock(mMutex);
   TrackData *track = FindDataForTrack(aID);
@@ -3423,30 +3418,16 @@ SourceMediaStream::RemoveAllDirectListen
   }
   mDirectTrackListeners.Clear();
 }
 
 SourceMediaStream::~SourceMediaStream()
 {
 }
 
-void
-SourceMediaStream::RegisterForAudioMixing()
-{
-  MutexAutoLock lock(mMutex);
-  mNeedsMixing = true;
-}
-
-bool
-SourceMediaStream::NeedsMixing()
-{
-  MutexAutoLock lock(mMutex);
-  return mNeedsMixing;
-}
-
 bool
 SourceMediaStream::HasPendingAudioTrack()
 {
   MutexAutoLock lock(mMutex);
   bool audioTrackPresent = false;
 
   for (auto& data : mPendingTracks) {
     if (data.mData->GetType() == MediaSegment::AUDIO) {
@@ -3724,17 +3705,16 @@ MediaStreamGraphImpl::MediaStreamGraphIm
   , mEndTime(GRAPH_TIME_MAX)
   , mForceShutDown(false)
   , mPostedRunInStableStateEvent(false)
   , mDetectedNotRunning(false)
   , mPostedRunInStableState(false)
   , mRealtime(aDriverRequested != OFFLINE_THREAD_DRIVER)
   , mNonRealtimeProcessing(false)
   , mStreamOrderDirty(false)
-  , mLatencyLog(AsyncLatencyLogger::Get())
   , mAbstractMainThread(aMainThread)
   , mSelfRef(this)
   , mOutputChannels(std::min<uint32_t>(8, CubebUtils::MaxNumberOfChannels()))
   , mGlobalVolume(CubebUtils::GetVolumeScale())
 #ifdef DEBUG
   , mCanRunMessagesSynchronously(false)
 #endif
 {
--- a/dom/media/MediaStreamGraph.h
+++ b/dom/media/MediaStreamGraph.h
@@ -825,18 +825,16 @@ public:
   void RemoveAllDirectListenersImpl() override;
 
   /**
    * End all tracks and Finish() this stream.  Used to voluntarily revoke access
    * to a LocalMediaStream.
    */
   void EndAllTrackAndFinish();
 
-  void RegisterForAudioMixing();
-
   /**
    * Returns true if this SourceMediaStream contains at least one audio track
    * that is in pending state.
    * This is thread safe, and takes the SourceMediaStream mutex.
    */
   bool HasPendingAudioTrack();
 
   TimeStamp GetStreamTracksStrartTimeStamp()
@@ -933,17 +931,16 @@ protected:
   // |AddStreamGraphThread| and |AdvanceTimeVaryingValuesToCurrentTime| in
   // particularly.
   TimeStamp mStreamTracksStartTimeStamp;
   nsTArray<TrackData> mUpdateTracks;
   nsTArray<TrackData> mPendingTracks;
   nsTArray<TrackBound<DirectMediaStreamTrackListener>> mDirectTrackListeners;
   bool mPullEnabled;
   bool mFinishPending;
-  bool mNeedsMixing;
 };
 
 /**
  * The blocking mode decides how a track should be blocked in a MediaInputPort.
  */
 enum class BlockingMode
 {
   /**
--- a/dom/media/MediaStreamGraphImpl.h
+++ b/dom/media/MediaStreamGraphImpl.h
@@ -5,17 +5,16 @@
 
 #ifndef MOZILLA_MEDIASTREAMGRAPHIMPL_H_
 #define MOZILLA_MEDIASTREAMGRAPHIMPL_H_
 
 #include "MediaStreamGraph.h"
 
 #include "AudioMixer.h"
 #include "GraphDriver.h"
-#include "Latency.h"
 #include "mozilla/Atomics.h"
 #include "mozilla/Monitor.h"
 #include "mozilla/Services.h"
 #include "mozilla/TimeStamp.h"
 #include "mozilla/UniquePtr.h"
 #include "mozilla/WeakPtr.h"
 #include "nsClassHashtable.h"
 #include "nsIMemoryReporter.h"
@@ -881,20 +880,16 @@ public:
    * value is only accessed on the main thread.
    */
   bool mNonRealtimeProcessing;
   /**
    * True when a change has happened which requires us to recompute the stream
    * blocking order.
    */
   bool mStreamOrderDirty;
-  /**
-   * Hold a ref to the Latency logger
-   */
-  RefPtr<AsyncLatencyLogger> mLatencyLog;
   AudioMixer mMixer;
   const RefPtr<AbstractThread> mAbstractMainThread;
 
   // used to limit graph shutdown time
   // Only accessed on the main thread.
   nsCOMPtr<nsITimer> mShutdownTimer;
 
 private:
--- a/dom/media/moz.build
+++ b/dom/media/moz.build
@@ -114,17 +114,16 @@ EXPORTS += [
     'ChannelMediaDecoder.h',
     'CubebUtils.h',
     'DecoderTraits.h',
     'DOMMediaStream.h',
     'FileBlockCache.h',
     'FrameStatistics.h',
     'ImageToI420.h',
     'Intervals.h',
-    'Latency.h',
     'MediaCache.h',
     'MediaContainerType.h',
     'MediaData.h',
     'MediaDataDemuxer.h',
     'MediaDecoder.h',
     'MediaDecoderOwner.h',
     'MediaDecoderStateMachine.h',
     'MediaEventSource.h',
@@ -234,17 +233,16 @@ UNIFIED_SOURCES += [
     'ChannelMediaResource.cpp',
     'CloneableWithRangeMediaResource.cpp',
     'DOMMediaStream.cpp',
     'FileBlockCache.cpp',
     'FileMediaResource.cpp',
     'GetUserMediaRequest.cpp',
     'GraphDriver.cpp',
     'ImageToI420.cpp',
-    'Latency.cpp',
     'MediaCache.cpp',
     'MediaContainerType.cpp',
     'MediaData.cpp',
     'MediaDecoder.cpp',
     'MediaDecoderStateMachine.cpp',
     'MediaDeviceInfo.cpp',
     'MediaDevices.cpp',
     'MediaFormatReader.cpp',
deleted file mode 100644
--- a/dom/media/test/graph_latency.py
+++ /dev/null
@@ -1,104 +0,0 @@
-#!/usr/bin/env python
-# graph_latency.py - graph media latency
-#
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-# needs matplotlib (sudo aptitude install python-matplotlib)
-
-import matplotlib.pyplot as plt
-from matplotlib import rc
-import sys
-from pprint import pprint
-import re
-
-
-# FIX!  needs to be sum of a single mediastreamtrack and any output overhead for it
-# So there is one sum per MST
-def compute_sum(data):
-    'Compute the sum for each timestamp. This expects the output of parse_data.'
-    last_values = {}
-    out = ([],[])
-
-    for i in data:
-        if i[0] not in last_values.keys():
-          last_values[i[0]] = 0
-        last_values[i[0]] = float(i[3])
-        print last_values
-        out[0].append(i[2])
-        out[1].append(sum(last_values.values()))
-    return out
-
-
-def clean_data(raw_data):
-    '''
-    Remove the PR_LOG cruft at the beginning of each line and returns a list of
-    tuple.
-    '''
-    out = []
-    for line in raw_data:
-        match = re.match(r'(.*)#(.*)', line)
-        if match:
-	    continue
-	else:
-            out.append(line.split(": ")[1])
-    return out
-
-# returns a list of tuples
-def parse_data(raw_lines):
-    '''
-    Split each line by , and put every bit in a tuple.
-    '''
-    out = []
-    for line in raw_lines:
-        out.append(line.split(','))
-    return out
-
-if len(sys.argv) == 3:
-    name = sys.argv[1]
-    channels = int(sys.argv[2])
-else:
-    print sys.argv[0] + "latency_log"
-
-try:
-    f = open(sys.argv[1])
-except:
-    print "cannot open " + name
-
-raw_lines = f.readlines()
-lines = clean_data(raw_lines)
-data = parse_data(lines)
-
-final_data = {}
-
-for tupl in data:
-    name = tupl[0]
-    if tupl[1] != 0:
-        name = name+tupl[1]
-    if name not in final_data.keys():
-        final_data[name] = ([], [])
-# sanity-check values
-    if float(tupl[3]) < 10*1000:
-        final_data[name][0].append(float(tupl[2]))
-        final_data[name][1].append(float(tupl[3]))
-
-#overall = compute_sum(data)
-#final_data["overall"] = overall
-
-pprint(final_data)
-
-fig = plt.figure()
-for i in final_data.keys():
-    plt.plot(final_data[i][0], final_data[i][1], label=i)
-
-plt.legend()
-plt.suptitle("Latency in ms (y-axis) against time in ms (x-axis).")
-
-size = fig.get_size_inches()
-# make it gigantic so we can see things. sometimes, if the graph is too big,
-# this errors. reduce the factor so it stays under 2**15.
-fig.set_size_inches((size[0]*10, size[1]*2))
-name = sys.argv[1][:-4] + ".pdf"
-fig.savefig(name)
-
--- a/dom/media/webrtc/MediaEngineWebRTC.cpp
+++ b/dom/media/webrtc/MediaEngineWebRTC.cpp
@@ -6,16 +6,17 @@
 
 #include "MediaEngineWebRTC.h"
 
 #include "AllocationHandle.h"
 #include "CamerasChild.h"
 #include "CSFLog.h"
 #include "MediaEngineTabVideoSource.h"
 #include "MediaEngineRemoteVideoSource.h"
+#include "MediaEngineWebRTCAudio.h"
 #include "MediaTrackConstraints.h"
 #include "mozilla/dom/MediaDeviceInfo.h"
 #include "mozilla/Logging.h"
 #include "nsIComponentRegistrar.h"
 #include "nsIPrefService.h"
 #include "nsIPrefBranch.h"
 #include "nsITabSource.h"
 #include "prenv.h"
--- a/dom/media/webrtc/MediaEngineWebRTC.h
+++ b/dom/media/webrtc/MediaEngineWebRTC.h
@@ -1,132 +1,57 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef MEDIAENGINEWEBRTC_H_
 #define MEDIAENGINEWEBRTC_H_
 
-#include "AudioPacketizer.h"
-#include "AudioSegment.h"
 #include "AudioDeviceInfo.h"
 #include "CamerasChild.h"
-#include "cubeb/cubeb.h"
 #include "CubebUtils.h"
 #include "DOMMediaStream.h"
-#include "ipc/IPCMessageUtils.h"
 #include "MediaEngine.h"
 #include "MediaEnginePrefs.h"
 #include "MediaEngineSource.h"
 #include "MediaEngineWrapper.h"
 #include "MediaStreamGraph.h"
-#include "mozilla/dom/File.h"
-#include "mozilla/dom/MediaStreamTrackBinding.h"
+#include "NullTransport.h"
+#include "StreamTracks.h"
+#include "VideoSegment.h"
+#include "VideoUtils.h"
+#include "cubeb/cubeb.h"
+#include "ipc/IPCMessageUtils.h"
 #include "mozilla/Mutex.h"
 #include "mozilla/Mutex.h"
 #include "mozilla/Sprintf.h"
 #include "mozilla/StaticMutex.h"
 #include "mozilla/UniquePtr.h"
+#include "mozilla/dom/File.h"
+#include "mozilla/dom/MediaStreamTrackBinding.h"
 #include "nsAutoPtr.h"
-#include "nsComponentManagerUtils.h"
 #include "nsCOMPtr.h"
+#include "nsComponentManagerUtils.h"
 #include "nsDirectoryServiceDefs.h"
+#include "nsIRunnable.h"
 #include "nsIThread.h"
-#include "nsIRunnable.h"
 #include "nsRefPtrHashtable.h"
 #include "nsThreadUtils.h"
-#include "NullTransport.h"
 #include "prcvar.h"
 #include "prthread.h"
-#include "StreamTracks.h"
-#include "VideoSegment.h"
-#include "VideoUtils.h"
 
 // WebRTC library includes follow
-// Audio Engine
-#include "webrtc/voice_engine/include/voe_base.h"
-#include "webrtc/voice_engine/include/voe_codec.h"
-#include "webrtc/voice_engine/include/voe_network.h"
-#include "webrtc/voice_engine/include/voe_audio_processing.h"
-#include "webrtc/voice_engine/include/voe_volume_control.h"
-#include "webrtc/voice_engine/include/voe_external_media.h"
-#include "webrtc/voice_engine/include/voe_audio_processing.h"
-#include "webrtc/modules/audio_device/include/audio_device.h"
-#include "webrtc/modules/audio_processing/include/audio_processing.h"
 // Video Engine
 // conflicts with #include of scoped_ptr.h
 #undef FF
 #include "webrtc/modules/video_capture/video_capture_defines.h"
 
 namespace mozilla {
 
-class MediaEngineWebRTCMicrophoneSource;
-
-class MediaEngineWebRTCAudioCaptureSource : public MediaEngineSource
-{
-public:
-  explicit MediaEngineWebRTCAudioCaptureSource(const char* aUuid)
-  {
-  }
-  nsString GetName() const override;
-  nsCString GetUUID() const override;
-  nsresult Allocate(const dom::MediaTrackConstraints &aConstraints,
-                    const MediaEnginePrefs &aPrefs,
-                    const nsString& aDeviceId,
-                    const ipc::PrincipalInfo& aPrincipalInfo,
-                    AllocationHandle** aOutHandle,
-                    const char** aOutBadConstraint) override
-  {
-    // Nothing to do here, everything is managed in MediaManager.cpp
-    *aOutHandle = nullptr;
-    return NS_OK;
-  }
-  nsresult Deallocate(const RefPtr<const AllocationHandle>& aHandle) override
-  {
-    // Nothing to do here, everything is managed in MediaManager.cpp
-    MOZ_ASSERT(!aHandle);
-    return NS_OK;
-  }
-  nsresult SetTrack(const RefPtr<const AllocationHandle>& aHandle,
-                    const RefPtr<SourceMediaStream>& aStream,
-                    TrackID aTrackID,
-                    const PrincipalHandle& aPrincipal) override;
-  nsresult Start(const RefPtr<const AllocationHandle>& aHandle) override;
-  nsresult Stop(const RefPtr<const AllocationHandle>& aHandle) override;
-  nsresult Reconfigure(const RefPtr<AllocationHandle>& aHandle,
-                       const dom::MediaTrackConstraints& aConstraints,
-                       const MediaEnginePrefs& aPrefs,
-                       const nsString& aDeviceId,
-                       const char** aOutBadConstraint) override;
-
-  void Pull(const RefPtr<const AllocationHandle>& aHandle,
-            const RefPtr<SourceMediaStream>& aStream,
-            TrackID aTrackID,
-            StreamTime aDesiredTime,
-            const PrincipalHandle& aPrincipalHandle) override
-  {}
-
-  dom::MediaSourceEnum GetMediaSource() const override
-  {
-    return dom::MediaSourceEnum::AudioCapture;
-  }
-
-  nsresult TakePhoto(MediaEnginePhotoCallback* aCallback) override
-  {
-    return NS_ERROR_NOT_IMPLEMENTED;
-  }
-
-  uint32_t GetBestFitnessDistance(
-    const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
-    const nsString& aDeviceId) const override;
-
-protected:
-  virtual ~MediaEngineWebRTCAudioCaptureSource() = default;
-};
-
 // This class implements a cache for accessing the audio device list. It can be
 // accessed on any thread.
 class CubebDeviceEnumerator final
 {
 public:
   CubebDeviceEnumerator();
   ~CubebDeviceEnumerator();
   // This method returns a list of all the input and output audio devices
@@ -155,303 +80,16 @@ private:
   Mutex mMutex;
   nsTArray<RefPtr<AudioDeviceInfo>> mDevices;
   // If mManualInvalidation is true, then it is necessary to query the device
   // list each time instead of relying on automatic invalidation of the cache by
   // cubeb itself. Set in the constructor and then can be access on any thread.
   bool mManualInvalidation;
 };
 
-// This class is instantiated on the MediaManager thread, and is then sent and
-// only ever access again on the MediaStreamGraph.
-class WebRTCAudioDataListener : public AudioDataListener
-{
-protected:
-  // Protected destructor, to discourage deletion outside of Release():
-  virtual ~WebRTCAudioDataListener() {}
-
-public:
-  explicit WebRTCAudioDataListener(MediaEngineWebRTCMicrophoneSource* aAudioSource)
-    : mAudioSource(aAudioSource)
-  {}
-
-  // AudioDataListenerInterface methods
-  void NotifyOutputData(MediaStreamGraphImpl* aGraph,
-                        AudioDataValue* aBuffer,
-                        size_t aFrames,
-                        TrackRate aRate,
-                        uint32_t aChannels) override;
-
-  void NotifyInputData(MediaStreamGraphImpl* aGraph,
-                       const AudioDataValue* aBuffer,
-                       size_t aFrames,
-                       TrackRate aRate,
-                       uint32_t aChannels) override;
-
-  uint32_t RequestedInputChannelCount(MediaStreamGraphImpl* aGraph) override;
-
-  void DeviceChanged(MediaStreamGraphImpl* aGraph) override;
-
-  void Disconnect(MediaStreamGraphImpl* aGraph) override;
-
-private:
-  RefPtr<MediaEngineWebRTCMicrophoneSource> mAudioSource;
-};
-
-class MediaEngineWebRTCMicrophoneSource : public MediaEngineSource,
-                                          public AudioDataListenerInterface
-{
-public:
-  MediaEngineWebRTCMicrophoneSource(RefPtr<AudioDeviceInfo> aInfo,
-                                    const nsString& name,
-                                    const nsCString& uuid,
-                                    uint32_t maxChannelCount,
-                                    bool aDelayAgnostic,
-                                    bool aExtendedFilter);
-
-  bool RequiresSharing() const override
-  {
-    return false;
-  }
-
-  nsString GetName() const override;
-  nsCString GetUUID() const override;
-
-  nsresult Allocate(const dom::MediaTrackConstraints &aConstraints,
-                    const MediaEnginePrefs& aPrefs,
-                    const nsString& aDeviceId,
-                    const ipc::PrincipalInfo& aPrincipalInfo,
-                    AllocationHandle** aOutHandle,
-                    const char** aOutBadConstraint) override;
-  nsresult Deallocate(const RefPtr<const AllocationHandle>& aHandle) override;
-  nsresult SetTrack(const RefPtr<const AllocationHandle>& aHandle,
-                    const RefPtr<SourceMediaStream>& aStream,
-                    TrackID aTrackID,
-                    const PrincipalHandle& aPrincipal) override;
-  nsresult Start(const RefPtr<const AllocationHandle>& aHandle) override;
-  nsresult Stop(const RefPtr<const AllocationHandle>& aHandle) override;
-  nsresult Reconfigure(const RefPtr<AllocationHandle>& aHandle,
-                       const dom::MediaTrackConstraints& aConstraints,
-                       const MediaEnginePrefs& aPrefs,
-                       const nsString& aDeviceId,
-                       const char** aOutBadConstraint) override;
-
-  /**
-   * Assigns the current settings of the capture to aOutSettings.
-   * Main thread only.
-   */
-  void GetSettings(dom::MediaTrackSettings& aOutSettings) const override;
-
-  void Pull(const RefPtr<const AllocationHandle>& aHandle,
-            const RefPtr<SourceMediaStream>& aStream,
-            TrackID aTrackID,
-            StreamTime aDesiredTime,
-            const PrincipalHandle& aPrincipalHandle) override;
-
-  // AudioDataListenerInterface methods
-  void NotifyOutputData(MediaStreamGraphImpl* aGraph,
-                        AudioDataValue* aBuffer, size_t aFrames,
-                        TrackRate aRate, uint32_t aChannels) override;
-  void NotifyInputData(MediaStreamGraphImpl* aGraph,
-                       const AudioDataValue* aBuffer, size_t aFrames,
-                       TrackRate aRate, uint32_t aChannels) override;
-
-  void DeviceChanged(MediaStreamGraphImpl* aGraph) override;
-
-  uint32_t RequestedInputChannelCount(MediaStreamGraphImpl* aGraph) override
-  {
-    return GetRequestedInputChannelCount(aGraph);
-  }
-
-  void Disconnect(MediaStreamGraphImpl* aGraph) override;
-
-  dom::MediaSourceEnum GetMediaSource() const override
-  {
-    return dom::MediaSourceEnum::Microphone;
-  }
-
-  nsresult TakePhoto(MediaEnginePhotoCallback* aCallback) override
-  {
-    return NS_ERROR_NOT_IMPLEMENTED;
-  }
-
-  uint32_t GetBestFitnessDistance(
-    const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
-    const nsString& aDeviceId) const override;
-
-  void Shutdown() override;
-
-protected:
-  ~MediaEngineWebRTCMicrophoneSource() {}
-
-private:
-  /**
-   * Representation of data tied to an AllocationHandle rather than to the source.
-   */
-  struct Allocation {
-    Allocation() = delete;
-    explicit Allocation(const RefPtr<AllocationHandle>& aHandle);
-    ~Allocation();
-
-#ifdef DEBUG
-    // The MSGImpl::IterationEnd() of the last time we appended data from an
-    // audio callback.
-    // Guarded by MediaEngineWebRTCMicrophoneSource::mMutex.
-    GraphTime mLastCallbackAppendTime = 0;
-#endif
-    // Set to false by Start(). Becomes true after the first time we append real
-    // audio frames from the audio callback.
-    // Guarded by MediaEngineWebRTCMicrophoneSource::mMutex.
-    bool mLiveFramesAppended = false;
-
-    // Set to false by Start(). Becomes true after the first time we append
-    // silence *after* the first audio callback has appended real frames.
-    // Guarded by MediaEngineWebRTCMicrophoneSource::mMutex.
-    bool mLiveSilenceAppended = false;
-
-    const RefPtr<AllocationHandle> mHandle;
-    RefPtr<SourceMediaStream> mStream;
-    TrackID mTrackID = TRACK_NONE;
-    PrincipalHandle mPrincipal = PRINCIPAL_HANDLE_NONE;
-    bool mEnabled = false;
-  };
-
-  /**
-   * Used with nsTArray<Allocation>::IndexOf to locate an Allocation by a handle.
-   */
-  class AllocationHandleComparator {
-  public:
-    bool Equals(const Allocation& aAllocation,
-                const RefPtr<const AllocationHandle>& aHandle) const
-    {
-      return aHandle == aAllocation.mHandle;
-    }
-  };
-
-  /**
-   * Reevaluates the aggregated constraints of all allocations and restarts the
-   * underlying device if necessary.
-   *
-   * If the given AllocationHandle was already registered, its constraints will
-   * be updated before reevaluation. If not, they will be added before
-   * reevaluation.
-   */
-  nsresult ReevaluateAllocation(const RefPtr<AllocationHandle>& aHandle,
-                                const NormalizedConstraints* aConstraintsUpdate,
-                                const MediaEnginePrefs& aPrefs,
-                                const nsString& aDeviceId,
-                                const char** aOutBadConstraint);
-
-  /**
-   * Updates the underlying (single) device with the aggregated constraints
-   * aNetConstraints. If the chosen settings for the device changes based on
-   * these new constraints, and capture is active, the device will be restarted.
-   */
-  nsresult UpdateSingleSource(const RefPtr<const AllocationHandle>& aHandle,
-                              const NormalizedConstraints& aNetConstraints,
-                              const MediaEnginePrefs& aPrefs,
-                              const nsString& aDeviceId,
-                              const char** aOutBadConstraint);
-
-
-  void UpdateAECSettingsIfNeeded(bool aEnable, webrtc::EcModes aMode);
-  void UpdateAGCSettingsIfNeeded(bool aEnable, webrtc::AgcModes aMode);
-  void UpdateNSSettingsIfNeeded(bool aEnable, webrtc::NsModes aMode);
-
-  void ApplySettings(const MediaEnginePrefs& aPrefs,
-                     RefPtr<MediaStreamGraphImpl> aGraph);
-
-  bool HasEnabledTrack() const;
-
-  template<typename T>
-  void InsertInGraph(const T* aBuffer,
-                     size_t aFrames,
-                     uint32_t aChannels);
-
-  void PacketizeAndProcess(MediaStreamGraphImpl* aGraph,
-                           const AudioDataValue* aBuffer,
-                           size_t aFrames,
-                           TrackRate aRate,
-                           uint32_t aChannels);
-
-
-  // This is true when all processing is disabled, we can skip
-  // packetization, resampling and other processing passes.
-  // Graph thread only.
-  bool PassThrough(MediaStreamGraphImpl* aGraphImpl) const;
-
-  // Graph thread only.
-  void SetPassThrough(bool aPassThrough);
-  uint32_t GetRequestedInputChannelCount(MediaStreamGraphImpl* aGraphImpl);
-  void SetRequestedInputChannelCount(uint32_t aRequestedInputChannelCount);
-
-  // mListener is created on the MediaManager thread, and then sent to the MSG
-  // thread. On shutdown, we send this pointer to the MSG thread again, telling
-  // it to clean up.
-  RefPtr<WebRTCAudioDataListener> mListener;
-
-  // Can be shared on any thread.
-  const RefPtr<AudioDeviceInfo> mDeviceInfo;
-
-  const UniquePtr<webrtc::AudioProcessing> mAudioProcessing;
-
-  // accessed from the GraphDriver thread except for deletion.
-  nsAutoPtr<AudioPacketizer<AudioDataValue, float>> mPacketizerInput;
-  nsAutoPtr<AudioPacketizer<AudioDataValue, float>> mPacketizerOutput;
-
-  // mMutex protects some of our members off the owning thread.
-  Mutex mMutex;
-
-  // We append an allocation in Allocate() and remove it in Deallocate().
-  // Both the array and the Allocation members are modified under mMutex on
-  // the owning thread. Accessed under one of the two.
-  nsTArray<Allocation> mAllocations;
-
-  // Current state of the shared resource for this source. Written on the
-  // owning thread, read on either the owning thread or the MSG thread.
-  Atomic<MediaEngineSourceState> mState;
-
-  bool mDelayAgnostic;
-  bool mExtendedFilter;
-  bool mStarted;
-
-  const nsString mDeviceName;
-  const nsCString mDeviceUUID;
-
-  // The current settings for the underlying device.
-  // Member access is main thread only after construction.
-  const nsMainThreadPtrHandle<media::Refcountable<dom::MediaTrackSettings>> mSettings;
-
-  // The number of channels asked for by content, after clamping to the range of
-  // legal channel count for this particular device. This is the number of
-  // channels of the input buffer passed as parameter in NotifyInputData.
-  uint32_t mRequestedInputChannelCount;
-  uint64_t mTotalFrames;
-  uint64_t mLastLogFrames;
-
-  // mSkipProcessing is true if none of the processing passes are enabled,
-  // because of prefs or constraints. This allows simply copying the audio into
-  // the MSG, skipping resampling and the whole webrtc.org code.
-  // This is read and written to only on the MSG thread.
-  bool mSkipProcessing;
-
-  // To only update microphone when needed, we keep track of the prefs
-  // representing the currently applied settings for this source. This is the
-  // net result of the prefs across all allocations.
-  // Owning thread only.
-  MediaEnginePrefs mNetPrefs;
-
-  // Stores the mixed audio output for the reverse-stream of the AEC.
-  AlignedFloatBuffer mOutputBuffer;
-
-  AlignedFloatBuffer mInputBuffer;
-  AlignedFloatBuffer mDeinterleavedBuffer;
-  AlignedFloatBuffer mInputDownmixBuffer;
-};
-
 class MediaEngineWebRTC : public MediaEngine
 {
   typedef MediaEngine Super;
 public:
   explicit MediaEngineWebRTC(MediaEnginePrefs& aPrefs);
 
   virtual void SetFakeDeviceChangeEvents() override;
 
--- a/dom/media/webrtc/MediaEngineWebRTCAudio.cpp
+++ b/dom/media/webrtc/MediaEngineWebRTCAudio.cpp
@@ -1,14 +1,14 @@
 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
-#include "MediaEngineWebRTC.h"
+#include "MediaEngineWebRTCAudio.h"
 
 #include <stdio.h>
 #include <algorithm>
 
 #include "AllocationHandle.h"
 #include "AudioConverter.h"
 #include "MediaManager.h"
 #include "MediaStreamGraphImpl.h"
@@ -45,114 +45,42 @@ namespace mozilla {
 #ifdef LOG
 #undef LOG
 #endif
 
 LogModule* GetMediaManagerLog();
 #define LOG(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Debug, msg)
 #define LOG_FRAMES(msg) MOZ_LOG(GetMediaManagerLog(), mozilla::LogLevel::Verbose, msg)
 
-LogModule* AudioLogModule() {
-  static mozilla::LazyLogModule log("AudioLatency");
-  return static_cast<LogModule*>(log);
-}
-
-void
-WebRTCAudioDataListener::NotifyOutputData(MediaStreamGraphImpl* aGraph,
-                                          AudioDataValue* aBuffer,
-                                          size_t aFrames,
-                                          TrackRate aRate,
-                                          uint32_t aChannels)
-{
-  MOZ_ASSERT(aGraph->CurrentDriver()->OnThread());
-  if (mAudioSource) {
-    mAudioSource->NotifyOutputData(aGraph, aBuffer, aFrames, aRate, aChannels);
-  }
-}
-
-void
-WebRTCAudioDataListener::NotifyInputData(MediaStreamGraphImpl* aGraph,
-                                         const AudioDataValue* aBuffer,
-                                         size_t aFrames,
-                                         TrackRate aRate,
-                                         uint32_t aChannels)
-{
-  MOZ_ASSERT(aGraph->CurrentDriver()->OnThread());
-  if (mAudioSource) {
-    mAudioSource->NotifyInputData(aGraph, aBuffer, aFrames, aRate, aChannels);
-  }
-}
-
-void
-WebRTCAudioDataListener::DeviceChanged(MediaStreamGraphImpl* aGraph)
-{
-  MOZ_ASSERT(aGraph->CurrentDriver()->OnThread());
-  if (mAudioSource) {
-    mAudioSource->DeviceChanged(aGraph);
-  }
-}
-
-uint32_t
-WebRTCAudioDataListener::RequestedInputChannelCount(MediaStreamGraphImpl* aGraph)
-{
-  MOZ_ASSERT(aGraph->CurrentDriver()->OnThread());
-  if (mAudioSource) {
-    return mAudioSource->RequestedInputChannelCount(aGraph);
-  }
-  return 0;
-}
-
-void
-WebRTCAudioDataListener::Disconnect(MediaStreamGraphImpl* aGraph)
-{
-  MOZ_ASSERT(aGraph->CurrentDriver()->OnThread());
-  if (mAudioSource) {
-    mAudioSource->Disconnect(aGraph);
-    mAudioSource = nullptr;
-  }
-}
-
 /**
  * WebRTC Microphone MediaEngineSource.
  */
 
-MediaEngineWebRTCMicrophoneSource::Allocation::Allocation(
-    const RefPtr<AllocationHandle>& aHandle)
-  : mHandle(aHandle)
-{}
-
-MediaEngineWebRTCMicrophoneSource::Allocation::~Allocation() = default;
-
 MediaEngineWebRTCMicrophoneSource::MediaEngineWebRTCMicrophoneSource(
-    RefPtr<AudioDeviceInfo> aInfo,
-    const nsString& aDeviceName,
-    const nsCString& aDeviceUUID,
-    uint32_t aMaxChannelCount,
-    bool aDelayAgnostic,
-    bool aExtendedFilter)
-  : mDeviceInfo(std::move(aInfo))
-  , mAudioProcessing(AudioProcessing::Create())
-  , mMutex("WebRTCMic::Mutex")
+  RefPtr<AudioDeviceInfo> aInfo,
+  const nsString& aDeviceName,
+  const nsCString& aDeviceUUID,
+  uint32_t aMaxChannelCount,
+  bool aDelayAgnostic,
+  bool aExtendedFilter)
+  : mTrackID(TRACK_NONE)
+  , mPrincipal(PRINCIPAL_HANDLE_NONE)
+  , mDeviceInfo(std::move(aInfo))
   , mDelayAgnostic(aDelayAgnostic)
   , mExtendedFilter(aExtendedFilter)
-  , mStarted(false)
   , mDeviceName(aDeviceName)
   , mDeviceUUID(aDeviceUUID)
+  , mDeviceMaxChannelCount(aMaxChannelCount)
   , mSettings(
       new nsMainThreadPtrHolder<media::Refcountable<dom::MediaTrackSettings>>(
         "MediaEngineWebRTCMicrophoneSource::mSettings",
         new media::Refcountable<dom::MediaTrackSettings>(),
         // Non-strict means it won't assert main thread for us.
         // It would be great if it did but we're already on the media thread.
         /* aStrict = */ false))
-  , mRequestedInputChannelCount(aMaxChannelCount)
-  , mTotalFrames(0)
-  , mLastLogFrames(0)
-  , mSkipProcessing(false)
-  , mInputDownmixBuffer(MAX_SAMPLING_FREQ * MAX_CHANNELS / 100)
 {
 #ifndef ANDROID
   MOZ_ASSERT(mDeviceInfo->DeviceID());
 #endif
 
   // We'll init lazily as needed
   mSettings->mEchoCancellation.Construct(0);
   mSettings->mAutoGainControl.Construct(0);
@@ -201,24 +129,22 @@ MediaEngineWebRTCMicrophoneSource::Reeva
     const NormalizedConstraints* aConstraintsUpdate,
     const MediaEnginePrefs& aPrefs,
     const nsString& aDeviceId,
     const char** aOutBadConstraint)
 {
   AssertIsOnOwningThread();
 
   // aHandle and/or aConstraintsUpdate may be nullptr (see below)
+  AutoTArray<const NormalizedConstraints*, 10> allConstraints;
 
-  AutoTArray<const NormalizedConstraints*, 10> allConstraints;
-  for (const Allocation& registered : mAllocations) {
-    if (aConstraintsUpdate && registered.mHandle == aHandle) {
-      continue; // Don't count old constraints
-    }
-    allConstraints.AppendElement(&registered.mHandle->mConstraints);
+  if (mHandle && !(aConstraintsUpdate && mHandle == aHandle)) {
+    allConstraints.AppendElement(&mHandle->mConstraints);
   }
+
   if (aConstraintsUpdate) {
     allConstraints.AppendElement(aConstraintsUpdate);
   } else if (aHandle) {
     // In the case of AddShareOfSingleSource, the handle isn't registered yet.
     allConstraints.AppendElement(&aHandle->mConstraints);
   }
 
   NormalizedConstraints netConstraints(allConstraints);
@@ -245,16 +171,17 @@ nsresult
 MediaEngineWebRTCMicrophoneSource::Reconfigure(const RefPtr<AllocationHandle>& aHandle,
                                                const dom::MediaTrackConstraints& aConstraints,
                                                const MediaEnginePrefs& aPrefs,
                                                const nsString& aDeviceId,
                                                const char** aOutBadConstraint)
 {
   AssertIsOnOwningThread();
   MOZ_ASSERT(aHandle);
+  MOZ_ASSERT(mStream);
 
   LOG(("Mic source %p allocation %p Reconfigure()", this, aHandle.get()));
 
   NormalizedConstraints constraints(aConstraints);
   nsresult rv = ReevaluateAllocation(aHandle, &constraints, aPrefs, aDeviceId,
                                      aOutBadConstraint);
   if (NS_FAILED(rv)) {
     if (aOutBadConstraint) {
@@ -264,168 +191,38 @@ MediaEngineWebRTCMicrophoneSource::Recon
     nsAutoCString name;
     GetErrorName(rv, name);
     LOG(("Mic source %p Reconfigure() failed unexpectedly. rv=%s",
          this, name.Data()));
     Stop(aHandle);
     return NS_ERROR_UNEXPECTED;
   }
 
-  size_t i = mAllocations.IndexOf(aHandle, 0, AllocationHandleComparator());
-  MOZ_DIAGNOSTIC_ASSERT(i != mAllocations.NoIndex);
-  ApplySettings(mNetPrefs, mAllocations[i].mStream->GraphImpl());
+  ApplySettings(mNetPrefs, mStream->GraphImpl());
 
   return NS_OK;
 }
 
+void MediaEngineWebRTCMicrophoneSource::Pull(const RefPtr<const AllocationHandle>& aHandle,
+                                             const RefPtr<SourceMediaStream>& aStream,
+                                             TrackID aTrackID,
+                                             StreamTime aDesiredTime,
+                                             const PrincipalHandle& aPrincipalHandle)
+{
+  // If pull is enabled, it means that the audio input is not open, and we
+  // should fill it out with silence. This is the only method called on the
+  // MSG thread.
+  mInputProcessing->Pull(aHandle, aStream, aTrackID, aDesiredTime, aPrincipalHandle);
+}
+
 bool operator == (const MediaEnginePrefs& a, const MediaEnginePrefs& b)
 {
   return !memcmp(&a, &b, sizeof(MediaEnginePrefs));
 };
 
-// This does an early return in case of error.
-#define HANDLE_APM_ERROR(fn)                                \
-do {                                                        \
-  int rv = fn;                                              \
-  if (rv != AudioProcessing::kNoError) {                    \
-    MOZ_ASSERT_UNREACHABLE("APM error in " #fn);            \
-    return;                                                 \
-  }                                                         \
-} while(0);
-
-void MediaEngineWebRTCMicrophoneSource::UpdateAECSettingsIfNeeded(bool aEnable, EcModes aMode)
-{
-  AssertIsOnOwningThread();
-
-  using webrtc::EcModes;
-
-  EchoCancellation::SuppressionLevel level;
-
-  switch(aMode) {
-    case EcModes::kEcUnchanged:
-      level = mAudioProcessing->echo_cancellation()->suppression_level();
-      break;
-    case EcModes::kEcConference:
-      level = EchoCancellation::kHighSuppression;
-      break;
-    case EcModes::kEcDefault:
-      level = EchoCancellation::kModerateSuppression;
-      break;
-    case EcModes::kEcAec:
-      level = EchoCancellation::kModerateSuppression;
-      break;
-    case EcModes::kEcAecm:
-      // No suppression level to set for the mobile echo canceller
-      break;
-    default:
-      MOZ_LOG(GetMediaManagerLog(), LogLevel::Error, ("Bad EcMode value"));
-      MOZ_ASSERT_UNREACHABLE("Bad pref set in all.js or in about:config"
-                             " for the echo cancelation mode.");
-      // fall back to something sensible in release
-      level = EchoCancellation::kModerateSuppression;
-      break;
-  }
-
-  // AECm and AEC are mutually exclusive.
-  if (aMode == EcModes::kEcAecm) {
-    HANDLE_APM_ERROR(mAudioProcessing->echo_cancellation()->Enable(false));
-    HANDLE_APM_ERROR(mAudioProcessing->echo_control_mobile()->Enable(aEnable));
-  } else {
-    HANDLE_APM_ERROR(mAudioProcessing->echo_control_mobile()->Enable(false));
-    HANDLE_APM_ERROR(mAudioProcessing->echo_cancellation()->Enable(aEnable));
-    HANDLE_APM_ERROR(mAudioProcessing->echo_cancellation()->set_suppression_level(level));
-  }
-}
-
-void
-MediaEngineWebRTCMicrophoneSource::UpdateAGCSettingsIfNeeded(bool aEnable, AgcModes aMode)
-{
-  AssertIsOnOwningThread();
-
-#if defined(WEBRTC_IOS) || defined(ATA) || defined(WEBRTC_ANDROID)
-  if (aMode == kAgcAdaptiveAnalog) {
-    MOZ_LOG(GetMediaManagerLog(),
-            LogLevel::Error,
-            ("Invalid AGC mode kAgcAdaptiveAnalog on mobile"));
-    MOZ_ASSERT_UNREACHABLE("Bad pref set in all.js or in about:config"
-                           " for the auto gain, on mobile.");
-    aMode = kAgcDefault;
-  }
-#endif
-  GainControl::Mode mode = kDefaultAgcMode;
-
-  switch (aMode) {
-    case AgcModes::kAgcDefault:
-      mode = kDefaultAgcMode;
-      break;
-    case AgcModes::kAgcUnchanged:
-      mode = mAudioProcessing->gain_control()->mode();
-      break;
-    case AgcModes::kAgcFixedDigital:
-      mode = GainControl::Mode::kFixedDigital;
-      break;
-    case AgcModes::kAgcAdaptiveAnalog:
-      mode = GainControl::Mode::kAdaptiveAnalog;
-      break;
-    case AgcModes::kAgcAdaptiveDigital:
-      mode = GainControl::Mode::kAdaptiveDigital;
-      break;
-    default:
-      MOZ_ASSERT_UNREACHABLE("Bad pref set in all.js or in about:config"
-                             " for the auto gain.");
-      // This is a good fallback, it works regardless of the platform.
-      mode = GainControl::Mode::kAdaptiveDigital;
-      break;
-  }
-
-  HANDLE_APM_ERROR(mAudioProcessing->gain_control()->set_mode(mode));
-  HANDLE_APM_ERROR(mAudioProcessing->gain_control()->Enable(aEnable));
-}
-
-void
-MediaEngineWebRTCMicrophoneSource::UpdateNSSettingsIfNeeded(bool aEnable, NsModes aMode)
-{
-  AssertIsOnOwningThread();
-
-  NoiseSuppression::Level nsLevel;
-
-  switch (aMode) {
-    case NsModes::kNsDefault:
-      nsLevel = kDefaultNsMode;
-      break;
-    case NsModes::kNsUnchanged:
-      nsLevel = mAudioProcessing->noise_suppression()->level();
-      break;
-    case NsModes::kNsConference:
-      nsLevel = NoiseSuppression::kHigh;
-      break;
-    case NsModes::kNsLowSuppression:
-      nsLevel = NoiseSuppression::kLow;
-      break;
-    case NsModes::kNsModerateSuppression:
-      nsLevel = NoiseSuppression::kModerate;
-      break;
-    case NsModes::kNsHighSuppression:
-      nsLevel = NoiseSuppression::kHigh;
-      break;
-    case NsModes::kNsVeryHighSuppression:
-      nsLevel = NoiseSuppression::kVeryHigh;
-      break;
-    default:
-      MOZ_ASSERT_UNREACHABLE("Bad pref set in all.js or in about:config"
-                             " for the noise suppression.");
-      // Pick something sensible as a faillback in release.
-      nsLevel = NoiseSuppression::kModerate;
-  }
-  HANDLE_APM_ERROR(mAudioProcessing->noise_suppression()->set_level(nsLevel));
-  HANDLE_APM_ERROR(mAudioProcessing->noise_suppression()->Enable(aEnable));
-}
-
-#undef HANDLE_APM_ERROR
-
 nsresult
 MediaEngineWebRTCMicrophoneSource::UpdateSingleSource(
     const RefPtr<const AllocationHandle>& aHandle,
     const NormalizedConstraints& aNetConstraints,
     const MediaEnginePrefs& aPrefs,
     const nsString& aDeviceId,
     const char** aOutBadConstraint)
 {
@@ -465,20 +262,17 @@ MediaEngineWebRTCMicrophoneSource::Updat
       prefs.mAecOn ? prefs.mAec : -1,
       prefs.mAgcOn ? prefs.mAgc : -1,
       prefs.mNoiseOn ? prefs.mNoise : -1,
       prefs.mChannels));
 
   switch (mState) {
     case kReleased:
       MOZ_ASSERT(aHandle);
-      {
-        MutexAutoLock lock(mMutex);
-        mState = kAllocated;
-      }
+      mState = kAllocated;
       LOG(("Audio device %s allocated", NS_ConvertUTF16toUTF8(mDeviceInfo->Name()).get()));
       break;
 
     case kStarted:
     case kStopped:
       if (prefs == mNetPrefs) {
         LOG(("UpdateSingleSource: new prefs for %s are the same as the current prefs, returning.",
              NS_ConvertUTF16toUTF8(mDeviceName).get()));
@@ -486,285 +280,430 @@ MediaEngineWebRTCMicrophoneSource::Updat
       }
       break;
 
     default:
       LOG(("Audio device %s in ignored state %d", NS_ConvertUTF16toUTF8(mDeviceInfo->Name()).get(), MediaEngineSourceState(mState)));
       break;
   }
 
-  if (mState != kReleased) {
+  if (mStream) {
     UpdateAGCSettingsIfNeeded(prefs.mAgcOn, static_cast<AgcModes>(prefs.mAgc));
     UpdateNSSettingsIfNeeded(prefs.mNoiseOn, static_cast<NsModes>(prefs.mNoise));
     UpdateAECSettingsIfNeeded(prefs.mAecOn, static_cast<EcModes>(prefs.mAec));
 
-    webrtc::Config config;
-    config.Set<webrtc::ExtendedFilter>(new webrtc::ExtendedFilter(mExtendedFilter));
-    config.Set<webrtc::DelayAgnostic>(new webrtc::DelayAgnostic(mDelayAgnostic));
-    mAudioProcessing->SetExtraOptions(config);
+    UpdateAPMExtraOptions(mExtendedFilter, mDelayAgnostic);
   }
   mNetPrefs = prefs;
   return NS_OK;
 }
 
-#undef HANDLE_APM_ERROR
-
-bool
-MediaEngineWebRTCMicrophoneSource::PassThrough(MediaStreamGraphImpl* aGraph) const
-{
-  MOZ_ASSERT(aGraph->CurrentDriver()->OnThread());
-  return mSkipProcessing;
-}
 void
-MediaEngineWebRTCMicrophoneSource::SetPassThrough(bool aPassThrough)
+MediaEngineWebRTCMicrophoneSource::UpdateAECSettingsIfNeeded(
+  bool aEnable,
+  webrtc::EcModes aMode)
 {
-  {
-    MutexAutoLock lock(mMutex);
-    if (mAllocations.IsEmpty()) {
-      // This can be the case, for now, because we're mixing mutable shared state
-      // and linearization via message queue. This is temporary.
-      return;
-    }
+  AssertIsOnOwningThread();
+
+  RefPtr<MediaEngineWebRTCMicrophoneSource> that = this;
+  RefPtr<MediaStreamGraphImpl> gripGraph = mStream->GraphImpl();
+  NS_DispatchToMainThread(media::NewRunnableFrom(
+    [ that, graph = std::move(gripGraph), aEnable, aMode ]() mutable {
+      class Message : public ControlMessage
+      {
+      public:
+        Message(AudioInputProcessing* aInputProcessing,
+                bool aEnable,
+                webrtc::EcModes aMode)
+          : ControlMessage(nullptr)
+          , mInputProcessing(aInputProcessing)
+          , mEnable(aEnable)
+          , mMode(aMode)
+        {
+        }
 
-    // mStream is always valid because it's set right before ::Start is called.
-    // SetPassThrough cannot be called before that, because it's running on the
-    // graph thread, and this cannot happen before the source has been started.
-    MOZ_ASSERT(mAllocations.Length() == 1 &&
-               mAllocations[0].mStream &&
-               mAllocations[0].mStream->GraphImpl()->CurrentDriver()->OnThread(),
-               "Wrong calling pattern, don't call this before ::SetTrack.");
-  }
-  mSkipProcessing = aPassThrough;
+        void Run() override
+        {
+          mInputProcessing->UpdateAECSettingsIfNeeded(mEnable, mMode);
+        }
+
+      protected:
+        RefPtr<AudioInputProcessing> mInputProcessing;
+        bool mEnable;
+        webrtc::EcModes mMode;
+      };
+
+      if (graph) {
+        graph->AppendMessage(
+          MakeUnique<Message>(that->mInputProcessing, aEnable, aMode));
+      }
+
+      return NS_OK;
+    }));
 }
 
-uint32_t
-MediaEngineWebRTCMicrophoneSource::GetRequestedInputChannelCount(MediaStreamGraphImpl* aGraphImpl)
+void
+MediaEngineWebRTCMicrophoneSource::UpdateAGCSettingsIfNeeded(
+  bool aEnable,
+  webrtc::AgcModes aMode)
 {
-  MOZ_ASSERT(aGraphImpl->CurrentDriver()->OnThread(),
-             "Wrong calling pattern, don't call this before ::SetTrack.");
+  AssertIsOnOwningThread();
 
-  if (mState == kReleased) {
-    // This source has been released, and is waiting for collection. Simply
-    // return 0, this source won't contribute to the channel count decision.
-    // Again, this is temporary.
-    return 0;
-  }
+  RefPtr<MediaEngineWebRTCMicrophoneSource> that = this;
+  RefPtr<MediaStreamGraphImpl> gripGraph = mStream->GraphImpl();
+  NS_DispatchToMainThread(media::NewRunnableFrom(
+    [ that, graph = std::move(gripGraph), aEnable, aMode ]() mutable {
+      class Message : public ControlMessage
+      {
+      public:
+        Message(AudioInputProcessing* aInputProcessing,
+                bool aEnable,
+                webrtc::AgcModes aMode)
+          : ControlMessage(nullptr)
+          , mInputProcessing(aInputProcessing)
+          , mEnable(aEnable)
+          , mMode(aMode)
+        {
+        }
 
-  return mRequestedInputChannelCount;
+        void Run() override
+        {
+          mInputProcessing->UpdateAGCSettingsIfNeeded(mEnable, mMode);
+        }
+
+      protected:
+        RefPtr<AudioInputProcessing> mInputProcessing;
+        bool mEnable;
+        webrtc::AgcModes mMode;
+      };
+
+      if (graph) {
+        graph->AppendMessage(
+          MakeUnique<Message>(that->mInputProcessing, aEnable, aMode));
+      }
+
+      return NS_OK;
+    }));
 }
 
 void
-MediaEngineWebRTCMicrophoneSource::SetRequestedInputChannelCount(
-  uint32_t aRequestedInputChannelCount)
+MediaEngineWebRTCMicrophoneSource::UpdateNSSettingsIfNeeded(
+  bool aEnable,
+  webrtc::NsModes aMode)
 {
-  MutexAutoLock lock(mMutex);
+  AssertIsOnOwningThread();
+
+  RefPtr<MediaEngineWebRTCMicrophoneSource> that = this;
+  RefPtr<MediaStreamGraphImpl> gripGraph = mStream->GraphImpl();
+  NS_DispatchToMainThread(media::NewRunnableFrom(
+    [ that, graph = std::move(gripGraph), aEnable, aMode ]() mutable {
+      class Message : public ControlMessage
+      {
+      public:
+        Message(AudioInputProcessing* aInputProcessing,
+                bool aEnable,
+                webrtc::NsModes aMode)
+          : ControlMessage(nullptr)
+          , mInputProcessing(aInputProcessing)
+          , mEnable(aEnable)
+          , mMode(aMode)
+        {
+        }
 
-  MOZ_ASSERT(mAllocations.Length() <= 1);
+        void Run() override
+        {
+          mInputProcessing->UpdateNSSettingsIfNeeded(mEnable, mMode);
+        }
+
+      protected:
+        RefPtr<AudioInputProcessing> mInputProcessing;
+        bool mEnable;
+        webrtc::NsModes mMode;
+      };
+
+      if (graph) {
+        graph->AppendMessage(
+          MakeUnique<Message>(that->mInputProcessing, aEnable, aMode));
+      }
+
+      return NS_OK;
+    }));
+}
 
-  if (mAllocations.IsEmpty()) {
-      return;
-  }
-  MOZ_ASSERT(mAllocations.Length() == 1 &&
-             mAllocations[0].mStream &&
-             mAllocations[0].mStream->GraphImpl()->CurrentDriver()->OnThread(),
-             "Wrong calling pattern, don't call this before ::SetTrack.");
-  mRequestedInputChannelCount = aRequestedInputChannelCount;
-  mAllocations[0].mStream->GraphImpl()->ReevaluateInputDevice();
+void
+MediaEngineWebRTCMicrophoneSource::UpdateAPMExtraOptions(bool aExtendedFilter,
+                                                         bool aDelayAgnostic)
+{
+  AssertIsOnOwningThread();
+
+  RefPtr<MediaEngineWebRTCMicrophoneSource> that = this;
+  RefPtr<MediaStreamGraphImpl> gripGraph = mStream->GraphImpl();
+  NS_DispatchToMainThread(media::NewRunnableFrom([
+    that,
+    graph = std::move(gripGraph),
+    aExtendedFilter,
+    aDelayAgnostic
+  ]() mutable {
+    class Message : public ControlMessage
+    {
+    public:
+      Message(AudioInputProcessing* aInputProcessing,
+              bool aExtendedFilter,
+              bool aDelayAgnostic)
+        : ControlMessage(nullptr)
+        , mInputProcessing(aInputProcessing)
+        , mExtendedFilter(aExtendedFilter)
+        , mDelayAgnostic(aDelayAgnostic)
+      {
+      }
+
+      void Run() override
+      {
+        mInputProcessing->UpdateAPMExtraOptions(mExtendedFilter,
+                                                mDelayAgnostic);
+      }
+
+    protected:
+      RefPtr<AudioInputProcessing> mInputProcessing;
+      bool mExtendedFilter;
+      bool mDelayAgnostic;
+    };
+
+    if (graph) {
+      graph->AppendMessage(MakeUnique<Message>(
+        that->mInputProcessing, aExtendedFilter, aDelayAgnostic));
+    }
+
+    return NS_OK;
+  }));
 }
 
 void
 MediaEngineWebRTCMicrophoneSource::ApplySettings(const MediaEnginePrefs& aPrefs,
                                                  RefPtr<MediaStreamGraphImpl> aGraph)
 {
   AssertIsOnOwningThread();
   MOZ_DIAGNOSTIC_ASSERT(aGraph);
-#ifdef DEBUG
-  {
-    MutexAutoLock lock(mMutex);
-    MOZ_ASSERT(mAllocations.Length() <= 1);
-  }
-#endif
 
   RefPtr<MediaEngineWebRTCMicrophoneSource> that = this;
   NS_DispatchToMainThread(media::NewRunnableFrom([that, graph = std::move(aGraph), aPrefs]() mutable {
     that->mSettings->mEchoCancellation.Value() = aPrefs.mAecOn;
     that->mSettings->mAutoGainControl.Value() = aPrefs.mAgcOn;
     that->mSettings->mNoiseSuppression.Value() = aPrefs.mNoiseOn;
     that->mSettings->mChannelCount.Value() = aPrefs.mChannels;
 
     class Message : public ControlMessage {
     public:
-      Message(MediaEngineWebRTCMicrophoneSource* aSource,
+      Message(AudioInputProcessing* aInputProcessing,
               bool aPassThrough,
               uint32_t aRequestedInputChannelCount)
         : ControlMessage(nullptr)
-        , mMicrophoneSource(aSource)
+        , mInputProcessing(aInputProcessing)
         , mPassThrough(aPassThrough)
         , mRequestedInputChannelCount(aRequestedInputChannelCount)
-        {}
+      {}
 
       void Run() override
       {
-        mMicrophoneSource->SetPassThrough(mPassThrough);
-        mMicrophoneSource->SetRequestedInputChannelCount(mRequestedInputChannelCount);
+        mInputProcessing->SetPassThrough(mPassThrough);
+        mInputProcessing->SetRequestedInputChannelCount(
+          mRequestedInputChannelCount);
       }
 
     protected:
-      RefPtr<MediaEngineWebRTCMicrophoneSource> mMicrophoneSource;
+      RefPtr<AudioInputProcessing> mInputProcessing;
       bool mPassThrough;
       uint32_t mRequestedInputChannelCount;
     };
 
     bool passThrough = !(aPrefs.mAecOn || aPrefs.mAgcOn || aPrefs.mNoiseOn);
     if (graph) {
-      graph->AppendMessage(MakeUnique<Message>(that,
-                                               passThrough,
-                                               aPrefs.mChannels));
+      graph->AppendMessage(MakeUnique<Message>(
+        that->mInputProcessing, passThrough, aPrefs.mChannels));
     }
 
     return NS_OK;
   }));
 }
 
 nsresult
 MediaEngineWebRTCMicrophoneSource::Allocate(const dom::MediaTrackConstraints &aConstraints,
                                             const MediaEnginePrefs& aPrefs,
                                             const nsString& aDeviceId,
                                             const ipc::PrincipalInfo& aPrincipalInfo,
                                             AllocationHandle** aOutHandle,
                                             const char** aOutBadConstraint)
 {
   AssertIsOnOwningThread();
   MOZ_ASSERT(aOutHandle);
+  // This is going away in bug 1497254
   auto handle = MakeRefPtr<AllocationHandle>(aConstraints, aPrincipalInfo,
                                              aDeviceId);
-
-#ifdef DEBUG
-  {
-    MutexAutoLock lock(mMutex);
-    MOZ_ASSERT(mAllocations.Length() <= 1);
-  }
-#endif
-  LOG(("Mic source %p allocation %p Allocate()", this, handle.get()));
-
   nsresult rv = ReevaluateAllocation(handle, nullptr, aPrefs, aDeviceId,
                                      aOutBadConstraint);
   if (NS_FAILED(rv)) {
     return rv;
   }
 
-  {
-    MutexAutoLock lock(mMutex);
-    MOZ_ASSERT(mAllocations.IsEmpty(), "Only allocate once.");
-    mAllocations.AppendElement(Allocation(handle));
-  }
+  MOZ_ASSERT(!mHandle, "Only allocate once.");
+  mHandle = handle;
 
   handle.forget(aOutHandle);
   return NS_OK;
 }
 
+
 nsresult
 MediaEngineWebRTCMicrophoneSource::Deallocate(const RefPtr<const AllocationHandle>& aHandle)
 {
   AssertIsOnOwningThread();
 
   MOZ_ASSERT(mState == kStopped);
 
-  size_t i = mAllocations.IndexOf(aHandle, 0, AllocationHandleComparator());
-  MOZ_DIAGNOSTIC_ASSERT(i != mAllocations.NoIndex);
-  MOZ_DIAGNOSTIC_ASSERT(!mAllocations[i].mEnabled,
-                        "Source should be stopped for the track before removing");
+  class EndTrackMessage : public ControlMessage
+  {
+    public:
+      EndTrackMessage(MediaStream* aStream,
+                      AudioInputProcessing* aAudioInputProcessing,
+                      TrackID aTrackID)
+      : ControlMessage(aStream)
+      , mInputProcessing(aAudioInputProcessing)
+      , mTrackID(aTrackID)
+    {
+    }
+
+    void Run() override
+    {
+      mInputProcessing->End();
+      mStream->AsSourceStream()->EndTrack(mTrackID);
+    }
 
-  if (mAllocations[i].mStream && IsTrackIDExplicit(mAllocations[i].mTrackID)) {
-    mAllocations[i].mStream->EndTrack(mAllocations[i].mTrackID);
+  protected:
+    RefPtr<AudioInputProcessing> mInputProcessing;
+    TrackID mTrackID;
+  };
+
+  if (mStream && IsTrackIDExplicit(mTrackID)) {
+    RefPtr<MediaStream> sourceStream = mStream;
+    RefPtr<MediaStreamGraphImpl> graphImpl = mStream->GraphImpl();
+    RefPtr<AudioInputProcessing> inputProcessing = mInputProcessing;
+    NS_DispatchToMainThread(media::NewRunnableFrom(
+      [ graph = std::move(graphImpl),
+        stream = std::move(sourceStream),
+        audioInputProcessing = std::move(inputProcessing),
+        trackID = mTrackID]() mutable {
+        if (graph) {
+          graph->AppendMessage(
+              MakeUnique<EndTrackMessage>(stream, audioInputProcessing, trackID));
+        }
+        return NS_OK;
+      }
+    ));
   }
 
-  {
-    MutexAutoLock lock(mMutex);
-    MOZ_ASSERT(mAllocations.Length() == 1, "Only allocate once.");
-    mAllocations.RemoveElementAt(i);
-  }
+  MOZ_ASSERT(mHandle, "Only deallocate once");
 
-  if (mAllocations.IsEmpty()) {
-    // If empty, no callbacks to deliver data should be occuring
-    MOZ_ASSERT(mState != kReleased, "Source not allocated");
-    MOZ_ASSERT(mState != kStarted, "Source not stopped");
+  // Reset all state. This is not strictly necessary, this instance will get
+  // destroyed soon.
+  mHandle = nullptr;
+  mStream = nullptr;
+  mTrackID = TRACK_NONE;
+  mPrincipal = PRINCIPAL_HANDLE_NONE;
 
-    MutexAutoLock lock(mMutex);
-    mState = kReleased;
-    LOG(("Audio device %s deallocated", NS_ConvertUTF16toUTF8(mDeviceName).get()));
-  } else {
-    LOG(("Audio device %s deallocated but still in use", NS_ConvertUTF16toUTF8(mDeviceName).get()));
-  }
+  // If empty, no callbacks to deliver data should be occuring
+  MOZ_ASSERT(mState != kReleased, "Source not allocated");
+  MOZ_ASSERT(mState != kStarted, "Source not stopped");
+
+  mState = kReleased;
+  LOG(("Audio device %s deallocated", NS_ConvertUTF16toUTF8(mDeviceName).get()));
+
   return NS_OK;
 }
 
 nsresult
 MediaEngineWebRTCMicrophoneSource::SetTrack(const RefPtr<const AllocationHandle>& aHandle,
                                             const RefPtr<SourceMediaStream>& aStream,
                                             TrackID aTrackID,
                                             const PrincipalHandle& aPrincipal)
 {
   AssertIsOnOwningThread();
   MOZ_ASSERT(aStream);
   MOZ_ASSERT(IsTrackIDExplicit(aTrackID));
 
-  if (mAllocations.Length() == 1 &&
-      mAllocations[0].mStream &&
-      mAllocations[0].mStream->Graph() != aStream->Graph()) {
+  if (mStream &&
+      mStream->Graph() != aStream->Graph()) {
     return NS_ERROR_NOT_AVAILABLE;
   }
 
-  MOZ_ASSERT(mAllocations.Length() == 1, "Only allocate once.");
-
-  size_t i = mAllocations.IndexOf(aHandle, 0, AllocationHandleComparator());
-  MOZ_DIAGNOSTIC_ASSERT(i != mAllocations.NoIndex);
-  MOZ_ASSERT(!mAllocations[i].mStream);
-  MOZ_ASSERT(mAllocations[i].mTrackID == TRACK_NONE);
-  MOZ_ASSERT(mAllocations[i].mPrincipal == PRINCIPAL_HANDLE_NONE);
-  {
-    MutexAutoLock lock(mMutex);
-    mAllocations[i].mStream = aStream;
-    mAllocations[i].mTrackID = aTrackID;
-    mAllocations[i].mPrincipal = aPrincipal;
-  }
+  MOZ_ASSERT(!mStream);
+  MOZ_ASSERT(mTrackID == TRACK_NONE);
+  MOZ_ASSERT(mPrincipal == PRINCIPAL_HANDLE_NONE);
+  mStream = aStream;
+  mTrackID = aTrackID;
+  mPrincipal = aPrincipal;
 
   AudioSegment* segment = new AudioSegment();
 
   aStream->AddAudioTrack(aTrackID,
                          aStream->GraphRate(),
                          0,
                          segment,
                          SourceMediaStream::ADDTRACK_QUEUED);
 
-  // XXX Make this based on the pref.
-  aStream->RegisterForAudioMixing();
-
   LOG(("Stream %p registered for microphone capture", aStream.get()));
   return NS_OK;
 }
 
+class StartStopMessage : public ControlMessage
+{
+  public:
+    enum StartStop
+    {
+      Start,
+      Stop
+    };
+
+    StartStopMessage(AudioInputProcessing* aInputProcessing, StartStop aAction)
+      : ControlMessage(nullptr)
+      , mInputProcessing(aInputProcessing)
+      , mAction(aAction)
+    {
+  }
+
+  void Run() override
+  {
+    if (mAction == StartStopMessage::Start) {
+      mInputProcessing->Start();
+    } else if (mAction == StartStopMessage::Stop){
+      mInputProcessing->Stop();
+    } else {
+      MOZ_CRASH("Invalid enum value");
+    }
+  }
+
+protected:
+  RefPtr<AudioInputProcessing> mInputProcessing;
+  StartStop mAction;
+};
+
 nsresult
 MediaEngineWebRTCMicrophoneSource::Start(const RefPtr<const AllocationHandle>& aHandle)
 {
   AssertIsOnOwningThread();
+
+  // This spans setting both the enabled state and mState.
   if (mState == kStarted) {
     return NS_OK;
   }
 
   MOZ_ASSERT(mState == kAllocated || mState == kStopped);
 
-  size_t i = mAllocations.IndexOf(aHandle, 0, AllocationHandleComparator());
-  MOZ_DIAGNOSTIC_ASSERT(i != mAllocations.NoIndex,
-                        "Can't start track that hasn't been added");
-  Allocation& allocation = mAllocations[i];
-
   CubebUtils::AudioDeviceID deviceID = mDeviceInfo->DeviceID();
-  if (allocation.mStream->GraphImpl()->InputDeviceID() &&
-      allocation.mStream->GraphImpl()->InputDeviceID() != deviceID) {
+  if (mStream->GraphImpl()->InputDeviceID() &&
+      mStream->GraphImpl()->InputDeviceID() != deviceID) {
     // For now, we only allow opening a single audio input device per document,
     // because we can only have one MSG per document.
     return NS_ERROR_FAILURE;
   }
 
   // On Linux with PulseAudio, we still only allow a certain number of audio
   // input stream in each content process, because of issues related to audio
   // remoting and PulseAudio.
@@ -776,191 +715,410 @@ MediaEngineWebRTCMicrophoneSource::Start
       sInputStreamsOpen == CubebUtils::GetMaxInputStreams()) {
     LOG(("%p Already capturing audio in this process, aborting", this));
     return NS_ERROR_FAILURE;
   }
 
   sInputStreamsOpen++;
 #endif
 
-  MOZ_ASSERT(!allocation.mEnabled, "Source already started");
-  {
-    // This spans setting both the enabled state and mState.
-    MutexAutoLock lock(mMutex);
-    allocation.mEnabled = true;
+  AssertIsOnOwningThread();
+
+  mInputProcessing = new AudioInputProcessing(
+    mDeviceMaxChannelCount, mStream, mTrackID, mPrincipal);
 
-#ifdef DEBUG
-    // Ensure that callback-tracking state is reset when callbacks start coming.
-    allocation.mLastCallbackAppendTime = 0;
-#endif
-    allocation.mLiveFramesAppended = false;
-    allocation.mLiveSilenceAppended = false;
+  RefPtr<MediaEngineWebRTCMicrophoneSource> that = this;
+  RefPtr<MediaStreamGraphImpl> gripGraph = mStream->GraphImpl();
+  NS_DispatchToMainThread(media::NewRunnableFrom(
+    [ that, graph = std::move(gripGraph), deviceID ]() mutable {
 
-    if (!mListener) {
-      mListener = new WebRTCAudioDataListener(this);
-    }
+      if (graph) {
+        graph->AppendMessage(MakeUnique<StartStopMessage>(
+          that->mInputProcessing, StartStopMessage::Start));
+      }
 
-    // Make sure logger starts before capture
-    AsyncLatencyLogger::Get(true);
+      that->mStream->OpenAudioInput(deviceID, that->mInputProcessing);
 
-    allocation.mStream->OpenAudioInput(deviceID, mListener);
+      return NS_OK;
+    }));
 
-    MOZ_ASSERT(mState != kReleased);
-    mState = kStarted;
-  }
+  MOZ_ASSERT(mState != kReleased);
+  mState = kStarted;
 
-  ApplySettings(mNetPrefs, allocation.mStream->GraphImpl());
+  ApplySettings(mNetPrefs, mStream->GraphImpl());
 
   return NS_OK;
 }
 
 nsresult
 MediaEngineWebRTCMicrophoneSource::Stop(const RefPtr<const AllocationHandle>& aHandle)
 {
-  MOZ_ASSERT(mAllocations.Length() <= 1);
   AssertIsOnOwningThread();
 
   LOG(("Mic source %p allocation %p Stop()", this, aHandle.get()));
 
-  size_t i = mAllocations.IndexOf(aHandle, 0, AllocationHandleComparator());
-  MOZ_DIAGNOSTIC_ASSERT(i != mAllocations.NoIndex,
-                        "Cannot stop track that we don't know about");
-  Allocation& allocation = mAllocations[i];
-  MOZ_ASSERT(allocation.mStream, "SetTrack must have been called before ::Stop");
+  MOZ_ASSERT(mStream, "SetTrack must have been called before ::Stop");
 
-  if (!allocation.mEnabled) {
+  if (mState == kStopped) {
     // Already stopped - this is allowed
     return NS_OK;
   }
 
-  {
-    // This spans setting both the enabled state and mState.
-    MutexAutoLock lock(mMutex);
-    allocation.mEnabled = false;
-
-    CubebUtils::AudioDeviceID deviceID = mDeviceInfo->DeviceID();
-    Maybe<CubebUtils::AudioDeviceID> id = Some(deviceID);
-    allocation.mStream->CloseAudioInput(id, mListener);
-    mListener = nullptr;
 #ifdef MOZ_PULSEAUDIO
     MOZ_ASSERT(sInputStreamsOpen > 0);
     sInputStreamsOpen--;
 #endif
+  RefPtr<MediaEngineWebRTCMicrophoneSource> that = this;
+  RefPtr<MediaStreamGraphImpl> gripGraph = mStream->GraphImpl();
+  NS_DispatchToMainThread(media::NewRunnableFrom(
+    [ that, graph = std::move(gripGraph), stream = mStream ]() mutable {
 
-    if (HasEnabledTrack()) {
-      // Another track is keeping us from stopping
+      if (graph) {
+        graph->AppendMessage(MakeUnique<StartStopMessage>(
+          that->mInputProcessing, StartStopMessage::Stop));
+      }
+
+      CubebUtils::AudioDeviceID deviceID = that->mDeviceInfo->DeviceID();
+      Maybe<CubebUtils::AudioDeviceID> id = Some(deviceID);
+      stream->CloseAudioInput(id, that->mInputProcessing);
+
       return NS_OK;
-    }
+    }));
 
-    MOZ_ASSERT(mState == kStarted, "Should be started when stopping");
-    mState = kStopped;
-  }
+  MOZ_ASSERT(mState == kStarted, "Should be started when stopping");
+  mState = kStopped;
 
   return NS_OK;
 }
 
 void
 MediaEngineWebRTCMicrophoneSource::GetSettings(dom::MediaTrackSettings& aOutSettings) const
 {
   MOZ_ASSERT(NS_IsMainThread());
   aOutSettings = *mSettings;
 }
 
+AudioInputProcessing::AudioInputProcessing(uint32_t aMaxChannelCount,
+                                           RefPtr<SourceMediaStream> aStream,
+                                           TrackID aTrackID,
+                                           const PrincipalHandle& aPrincipalHandle)
+  : mStream(std::move(aStream))
+  , mAudioProcessing(AudioProcessing::Create())
+  , mRequestedInputChannelCount(aMaxChannelCount)
+  , mSkipProcessing(false)
+  , mInputDownmixBuffer(MAX_SAMPLING_FREQ * MAX_CHANNELS / 100)
+#ifdef DEBUG
+  , mLastCallbackAppendTime(0)
+#endif
+  , mLiveFramesAppended(false)
+  , mLiveSilenceAppended(false)
+  , mTrackID(aTrackID)
+  , mPrincipal(aPrincipalHandle)
+  , mEnabled(false)
+  , mEnded(false)
+{
+}
+
 void
-MediaEngineWebRTCMicrophoneSource::Pull(const RefPtr<const AllocationHandle>& aHandle,
-                                        const RefPtr<SourceMediaStream>& aStream,
-                                        TrackID aTrackID,
-                                        StreamTime aDesiredTime,
-                                        const PrincipalHandle& aPrincipalHandle)
+AudioInputProcessing::Disconnect(MediaStreamGraphImpl* aGraph)
+{
+  // This method is just for asserts.
+  MOZ_ASSERT(aGraph->CurrentDriver()->OnThread());
+}
+
+void
+MediaEngineWebRTCMicrophoneSource::Shutdown()
+{
+  AssertIsOnOwningThread();
+
+  if (mState == kStarted) {
+    Stop(mHandle);
+    MOZ_ASSERT(mState == kStopped);
+  }
+
+  MOZ_ASSERT(mState == kAllocated || mState == kStopped);
+  Deallocate(mHandle);
+  MOZ_ASSERT(mState == kReleased);
+}
+
+bool
+AudioInputProcessing::PassThrough(MediaStreamGraphImpl* aGraph) const
+{
+  MOZ_ASSERT(aGraph->CurrentDriver()->OnThread());
+  return mSkipProcessing;
+}
+
+void
+AudioInputProcessing::SetPassThrough(bool aPassThrough)
+{
+  mSkipProcessing = aPassThrough;
+}
+
+uint32_t
+AudioInputProcessing::GetRequestedInputChannelCount(
+  MediaStreamGraphImpl* aGraphImpl)
+{
+  return mRequestedInputChannelCount;
+}
+
+void
+AudioInputProcessing::SetRequestedInputChannelCount(
+  uint32_t aRequestedInputChannelCount)
+{
+  mRequestedInputChannelCount = aRequestedInputChannelCount;
+
+  mStream->GraphImpl()->ReevaluateInputDevice();
+}
+
+// This does an early return in case of error.
+#define HANDLE_APM_ERROR(fn)                                                   \
+  do {                                                                         \
+    int rv = fn;                                                               \
+    if (rv != AudioProcessing::kNoError) {                                     \
+      MOZ_ASSERT_UNREACHABLE("APM error in " #fn);                             \
+      return;                                                                  \
+    }                                                                          \
+  } while (0);
+
+void
+AudioInputProcessing::UpdateAECSettingsIfNeeded(bool aEnable, EcModes aMode)
+{
+  using webrtc::EcModes;
+
+  EchoCancellation::SuppressionLevel level;
+
+  switch (aMode) {
+    case EcModes::kEcUnchanged:
+      level = mAudioProcessing->echo_cancellation()->suppression_level();
+      break;
+    case EcModes::kEcConference:
+      level = EchoCancellation::kHighSuppression;
+      break;
+    case EcModes::kEcDefault:
+      level = EchoCancellation::kModerateSuppression;
+      break;
+    case EcModes::kEcAec:
+      level = EchoCancellation::kModerateSuppression;
+      break;
+    case EcModes::kEcAecm:
+      // No suppression level to set for the mobile echo canceller
+      break;
+    default:
+      MOZ_LOG(GetMediaManagerLog(), LogLevel::Error, ("Bad EcMode value"));
+      MOZ_ASSERT_UNREACHABLE("Bad pref set in all.js or in about:config"
+                             " for the echo cancelation mode.");
+      // fall back to something sensible in release
+      level = EchoCancellation::kModerateSuppression;
+      break;
+  }
+
+  // AECm and AEC are mutually exclusive.
+  if (aMode == EcModes::kEcAecm) {
+    HANDLE_APM_ERROR(mAudioProcessing->echo_cancellation()->Enable(false));
+    HANDLE_APM_ERROR(mAudioProcessing->echo_control_mobile()->Enable(aEnable));
+  } else {
+    HANDLE_APM_ERROR(mAudioProcessing->echo_control_mobile()->Enable(false));
+    HANDLE_APM_ERROR(mAudioProcessing->echo_cancellation()->Enable(aEnable));
+    HANDLE_APM_ERROR(
+      mAudioProcessing->echo_cancellation()->set_suppression_level(level));
+  }
+}
+
+void
+AudioInputProcessing::UpdateAGCSettingsIfNeeded(bool aEnable, AgcModes aMode)
+{
+#if defined(WEBRTC_IOS) || defined(ATA) || defined(WEBRTC_ANDROID)
+  if (aMode == kAgcAdaptiveAnalog) {
+    MOZ_LOG(GetMediaManagerLog(),
+            LogLevel::Error,
+            ("Invalid AGC mode kAgcAdaptiveAnalog on mobile"));
+    MOZ_ASSERT_UNREACHABLE("Bad pref set in all.js or in about:config"
+                           " for the auto gain, on mobile.");
+    aMode = kAgcDefault;
+  }
+#endif
+  GainControl::Mode mode = kDefaultAgcMode;
+
+  switch (aMode) {
+    case AgcModes::kAgcDefault:
+      mode = kDefaultAgcMode;
+      break;
+    case AgcModes::kAgcUnchanged:
+      mode = mAudioProcessing->gain_control()->mode();
+      break;
+    case AgcModes::kAgcFixedDigital:
+      mode = GainControl::Mode::kFixedDigital;
+      break;
+    case AgcModes::kAgcAdaptiveAnalog:
+      mode = GainControl::Mode::kAdaptiveAnalog;
+      break;
+    case AgcModes::kAgcAdaptiveDigital:
+      mode = GainControl::Mode::kAdaptiveDigital;
+      break;
+    default:
+      MOZ_ASSERT_UNREACHABLE("Bad pref set in all.js or in about:config"
+                             " for the auto gain.");
+      // This is a good fallback, it works regardless of the platform.
+      mode = GainControl::Mode::kAdaptiveDigital;
+      break;
+  }
+
+  HANDLE_APM_ERROR(mAudioProcessing->gain_control()->set_mode(mode));
+  HANDLE_APM_ERROR(mAudioProcessing->gain_control()->Enable(aEnable));
+}
+
+void
+AudioInputProcessing::UpdateNSSettingsIfNeeded(bool aEnable, NsModes aMode)
+{
+  NoiseSuppression::Level nsLevel;
+
+  switch (aMode) {
+    case NsModes::kNsDefault:
+      nsLevel = kDefaultNsMode;
+      break;
+    case NsModes::kNsUnchanged:
+      nsLevel = mAudioProcessing->noise_suppression()->level();
+      break;
+    case NsModes::kNsConference:
+      nsLevel = NoiseSuppression::kHigh;
+      break;
+    case NsModes::kNsLowSuppression:
+      nsLevel = NoiseSuppression::kLow;
+      break;
+    case NsModes::kNsModerateSuppression:
+      nsLevel = NoiseSuppression::kModerate;
+      break;
+    case NsModes::kNsHighSuppression:
+      nsLevel = NoiseSuppression::kHigh;
+      break;
+    case NsModes::kNsVeryHighSuppression:
+      nsLevel = NoiseSuppression::kVeryHigh;
+      break;
+    default:
+      MOZ_ASSERT_UNREACHABLE("Bad pref set in all.js or in about:config"
+                             " for the noise suppression.");
+      // Pick something sensible as a faillback in release.
+      nsLevel = NoiseSuppression::kModerate;
+  }
+  HANDLE_APM_ERROR(mAudioProcessing->noise_suppression()->set_level(nsLevel));
+  HANDLE_APM_ERROR(mAudioProcessing->noise_suppression()->Enable(aEnable));
+}
+
+#undef HANDLE_APM_ERROR
+
+void
+AudioInputProcessing::UpdateAPMExtraOptions(bool aExtendedFilter,
+                                            bool aDelayAgnostic)
+{
+  webrtc::Config config;
+  config.Set<webrtc::ExtendedFilter>(
+    new webrtc::ExtendedFilter(aExtendedFilter));
+  config.Set<webrtc::DelayAgnostic>(new webrtc::DelayAgnostic(aDelayAgnostic));
+
+  mAudioProcessing->SetExtraOptions(config);
+}
+
+void
+AudioInputProcessing::Start()
+{
+  mEnabled = true;
+}
+
+void
+AudioInputProcessing::Stop()
+{
+  mEnabled = false;
+}
+
+void
+AudioInputProcessing::Pull(const RefPtr<const AllocationHandle>& aHandle,
+                           const RefPtr<SourceMediaStream>& aStream,
+                           TrackID aTrackID,
+                           StreamTime aDesiredTime,
+                           const PrincipalHandle& aPrincipalHandle)
 {
   TRACE_AUDIO_CALLBACK_COMMENT("SourceMediaStream %p track %i",
                                aStream.get(), aTrackID);
   StreamTime delta;
 
-  {
-    MutexAutoLock lock(mMutex);
-    size_t i = mAllocations.IndexOf(aHandle, 0, AllocationHandleComparator());
-    if (i == mAllocations.NoIndex) {
-      // This handle must have been deallocated. That's fine, and its track
-      // will already be ended. No need to do anything.
-      return;
-    }
+  if (mEnded) {
+    return;
+  }
+
+  delta = aDesiredTime - aStream->GetEndOfAppendedData(aTrackID);
 
-    // We don't want to GetEndOfAppendedData() above at the declaration if the
-    // allocation was removed and the track non-existant. An assert will fail.
-    delta = aDesiredTime - aStream->GetEndOfAppendedData(aTrackID);
+  if (delta < 0) {
+    LOG_FRAMES(
+      ("Not appending silence; %" PRId64 " frames already buffered", -delta));
+    return;
+  }
 
-    if (delta < 0) {
-      LOG_FRAMES(("Not appending silence for allocation %p; %" PRId64 " frames already buffered",
-                  mAllocations[i].mHandle.get(), -delta));
-      return;
-    }
+  if (!mLiveFramesAppended ||
+      !mLiveSilenceAppended) {
+    // These are the iterations after starting or resuming audio capture.
+    // Make sure there's at least one extra block buffered until audio
+    // callbacks come in. We also allow appending silence one time after
+    // audio callbacks have started, to cover the case where audio callbacks
+    // start appending data immediately and there is no extra data buffered.
+    delta += WEBAUDIO_BLOCK_SIZE;
 
-    if (!mAllocations[i].mLiveFramesAppended ||
-        !mAllocations[i].mLiveSilenceAppended) {
-      // These are the iterations after starting or resuming audio capture.
-      // Make sure there's at least one extra block buffered until audio
-      // callbacks come in. We also allow appending silence one time after
-      // audio callbacks have started, to cover the case where audio callbacks
-      // start appending data immediately and there is no extra data buffered.
-      delta += WEBAUDIO_BLOCK_SIZE;
+    // If we're supposed to be packetizing but there's no packetizer yet,
+    // there must not have been any live frames appended yet.
+    // If there were live frames appended and we haven't appended the
+    // right amount of silence, we'll have to append silence once more,
+    // failing the other assert below.
+    MOZ_ASSERT_IF(!PassThrough(aStream->GraphImpl()) && !mPacketizerInput,
+                  !mLiveFramesAppended);
 
-      // If we're supposed to be packetizing but there's no packetizer yet,
-      // there must not have been any live frames appended yet.
-      // If there were live frames appended and we haven't appended the
-      // right amount of silence, we'll have to append silence once more,
-      // failing the other assert below.
-      MOZ_ASSERT_IF(!PassThrough(aStream->GraphImpl()) && !mPacketizerInput,
-                    !mAllocations[i].mLiveFramesAppended);
+    if (!PassThrough(aStream->GraphImpl()) && mPacketizerInput) {
+      // Processing is active and is processed in chunks of 10ms through the
+      // input packetizer. We allow for 10ms of silence on the track to
+      // accomodate the buffering worst-case.
+      delta += mPacketizerInput->PacketSize();
+    }
+  }
 
-      if (!PassThrough(aStream->GraphImpl()) && mPacketizerInput) {
-        // Processing is active and is processed in chunks of 10ms through the
-        // input packetizer. We allow for 10ms of silence on the track to
-        // accomodate the buffering worst-case.
-        delta += mPacketizerInput->PacketSize();
-      }
-    }
-
-    LOG_FRAMES(("Pulling %" PRId64 " frames of silence for allocation %p",
-                delta, mAllocations[i].mHandle.get()));
+  LOG_FRAMES(("Pulling %" PRId64 " frames of silence for allocation %p",
+              delta,
+              aHandle.get()));
 
-    // This assertion fails when we append silence here in the same iteration
-    // as there were real audio samples already appended by the audio callback.
-    // Note that this is exempted until live samples and a subsequent chunk of
-    // silence have been appended to the track. This will cover cases like:
-    // - After Start(), there is silence (maybe multiple times) appended before
-    //   the first audio callback.
-    // - After Start(), there is real data (maybe multiple times) appended
-    //   before the first graph iteration.
-    // And other combinations of order of audio sample sources.
-    MOZ_ASSERT_IF(
-      mAllocations[i].mEnabled &&
-      mAllocations[i].mLiveFramesAppended &&
-      mAllocations[i].mLiveSilenceAppended,
-      aStream->GraphImpl()->IterationEnd() >
-      mAllocations[i].mLastCallbackAppendTime);
+  // This assertion fails when we append silence here in the same iteration
+  // as there were real audio samples already appended by the audio callback.
+  // Note that this is exempted until live samples and a subsequent chunk of
+  // silence have been appended to the track. This will cover cases like:
+  // - After Start(), there is silence (maybe multiple times) appended before
+  //   the first audio callback.
+  // - After Start(), there is real data (maybe multiple times) appended
+  //   before the first graph iteration.
+  // And other combinations of order of audio sample sources.
+  MOZ_ASSERT_IF(
+    mEnabled &&
+    mLiveFramesAppended &&
+    mLiveSilenceAppended,
+    aStream->GraphImpl()->IterationEnd() >
+    mLastCallbackAppendTime);
 
-    if (mAllocations[i].mLiveFramesAppended) {
-      mAllocations[i].mLiveSilenceAppended = true;
-    }
+  if (mLiveFramesAppended) {
+    mLiveSilenceAppended = true;
   }
 
   AudioSegment audio;
   audio.AppendNullData(delta);
   aStream->AppendToTrack(aTrackID, &audio);
 }
 
 void
-MediaEngineWebRTCMicrophoneSource::NotifyOutputData(MediaStreamGraphImpl* aGraph,
-                                                    AudioDataValue* aBuffer,
-                                                    size_t aFrames,
-                                                    TrackRate aRate,
-                                                    uint32_t aChannels)
+AudioInputProcessing::NotifyOutputData(MediaStreamGraphImpl* aGraph,
+                                       AudioDataValue* aBuffer,
+                                       size_t aFrames,
+                                       TrackRate aRate,
+                                       uint32_t aChannels)
 {
   MOZ_ASSERT(aGraph->CurrentDriver()->OnThread());
+  MOZ_ASSERT(mEnabled);
 
   if (!mPacketizerOutput ||
       mPacketizerOutput->PacketSize() != aRate/100u ||
       mPacketizerOutput->Channels() != aChannels) {
     // It's ok to drop the audio still in the packetizer here: if this changes,
     // we changed devices or something.
     mPacketizerOutput =
       new AudioPacketizer<AudioDataValue, float>(aRate/100, aChannels);
@@ -1039,40 +1197,34 @@ MediaEngineWebRTCMicrophoneSource::Notif
                                              deinterleavedPacketDataChannelPointers.Elements());
 
     MOZ_ASSERT(!err, "Could not process the reverse stream.");
   }
 }
 
 // Only called if we're not in passthrough mode
 void
-MediaEngineWebRTCMicrophoneSource::PacketizeAndProcess(MediaStreamGraphImpl* aGraph,
-                                                       const AudioDataValue* aBuffer,
-                                                       size_t aFrames,
-                                                       TrackRate aRate,
-                                                       uint32_t aChannels)
+AudioInputProcessing::PacketizeAndProcess(MediaStreamGraphImpl* aGraph,
+                                          const AudioDataValue* aBuffer,
+                                          size_t aFrames,
+                                          TrackRate aRate,
+                                          uint32_t aChannels)
 {
   MOZ_ASSERT(!PassThrough(aGraph), "This should be bypassed when in PassThrough mode.");
+  MOZ_ASSERT(mEnabled);
   size_t offset = 0;
 
   if (!mPacketizerInput ||
       mPacketizerInput->PacketSize() != aRate/100u ||
       mPacketizerInput->Channels() != aChannels) {
     // It's ok to drop the audio still in the packetizer here.
     mPacketizerInput =
       new AudioPacketizer<AudioDataValue, float>(aRate/100, aChannels);
   }
 
-  // On initial capture, throw away all far-end data except the most recent
-  // sample since it's already irrelevant and we want to avoid confusing the AEC
-  // far-end input code with "old" audio.
-  if (!mStarted) {
-    mStarted  = true;
-  }
-
   // Packetize our input data into 10ms chunks, deinterleave into planar channel
   // buffers, process, and append to the right MediaStreamTrack.
   mPacketizerInput->Input(aBuffer, static_cast<uint32_t>(aFrames));
 
   while (mPacketizerInput->PacketsAvailable()) {
     uint32_t samplesPerPacket = mPacketizerInput->PacketSize() *
       mPacketizerInput->Channels();
     if (mInputBuffer.Length() < samplesPerPacket) {
@@ -1122,181 +1274,119 @@ MediaEngineWebRTCMicrophoneSource::Packe
       processedOutputChannelPointersConst[i] = static_cast<float*>(buffer->Data()) + offset;
       offset += mPacketizerInput->PacketSize();
     }
 
     mAudioProcessing->ProcessStream(deinterleavedPacketizedInputDataChannelPointers.Elements(),
                                     inputConfig,
                                     outputConfig,
                                     processedOutputChannelPointers.Elements());
-    MutexAutoLock lock(mMutex);
-    if (mState != kStarted) {
-      return;
-    }
+
 
     AudioSegment segment;
-    for (Allocation& allocation : mAllocations) {
-      if (!allocation.mStream) {
-        continue;
-      }
+    if (!mStream->GraphImpl()) {
+      // The DOMMediaStream that owns mStream has been cleaned up
+      // and MediaStream::DestroyImpl() has run in the MSG. This is fine and
+      // can happen before the MediaManager thread gets to stop capture for
+      // this MediaStream.
+      continue;
+    }
 
-      if (!allocation.mStream->GraphImpl()) {
-        // The DOMMediaStream that owns allocation.mStream has been cleaned up
-        // and MediaStream::DestroyImpl() has run in the MSG. This is fine and
-        // can happen before the MediaManager thread gets to stop capture for
-        // this allocation.
-        continue;
-      }
-
-      if (!allocation.mEnabled) {
-        continue;
-      }
-
-      LOG_FRAMES(("Appending %" PRIu32 " frames of packetized audio for allocation %p",
-                  mPacketizerInput->PacketSize(), allocation.mHandle.get()));
+    LOG_FRAMES(("Appending %" PRIu32 " frames of packetized audio",
+                mPacketizerInput->PacketSize()));
 
 #ifdef DEBUG
-      allocation.mLastCallbackAppendTime =
-        allocation.mStream->GraphImpl()->IterationEnd();
+    mLastCallbackAppendTime = mStream->GraphImpl()->IterationEnd();
 #endif
-      allocation.mLiveFramesAppended = true;
+    mLiveFramesAppended = true;
 
-      // We already have planar audio data of the right format. Insert into the
-      // MSG.
-      MOZ_ASSERT(processedOutputChannelPointers.Length() == aChannels);
-      RefPtr<SharedBuffer> other = buffer;
-      segment.AppendFrames(other.forget(),
-                           processedOutputChannelPointersConst,
-                           mPacketizerInput->PacketSize(),
-                           allocation.mPrincipal);
-      allocation.mStream->AppendToTrack(allocation.mTrackID, &segment);
-    }
+    // We already have planar audio data of the right format. Insert into the
+    // MSG.
+    MOZ_ASSERT(processedOutputChannelPointers.Length() == aChannels);
+    RefPtr<SharedBuffer> other = buffer;
+    segment.AppendFrames(other.forget(),
+                         processedOutputChannelPointersConst,
+                         mPacketizerInput->PacketSize(),
+                         mPrincipal);
+    mStream->AppendToTrack(mTrackID, &segment);
   }
 }
 
 template<typename T>
 void
-MediaEngineWebRTCMicrophoneSource::InsertInGraph(const T* aBuffer,
-                                                 size_t aFrames,
-                                                 uint32_t aChannels)
+AudioInputProcessing::InsertInGraph(const T* aBuffer,
+                                    size_t aFrames,
+                                    uint32_t aChannels)
 {
-  MutexAutoLock lock(mMutex);
-
-  if (mState != kStarted) {
+  if (!mStream->GraphImpl()) {
+    // The DOMMediaStream that owns mStream has been cleaned up
+    // and MediaStream::DestroyImpl() has run in the MSG. This is fine and
+    // can happen before the MediaManager thread gets to stop capture for
+    // this MediaStream.
     return;
   }
 
-  if (MOZ_LOG_TEST(AudioLogModule(), LogLevel::Debug)) {
-    mTotalFrames += aFrames;
-    if (mAllocations[0].mStream &&
-        mTotalFrames > mLastLogFrames +
-                       mAllocations[0].mStream->GraphRate()) { // ~ 1 second
-      MOZ_LOG(AudioLogModule(), LogLevel::Debug,
-              ("%p: Inserting %zu samples into graph, total frames = %" PRIu64,
-               (void*)this, aFrames, mTotalFrames));
-      mLastLogFrames = mTotalFrames;
-    }
-  }
+#ifdef DEBUG
+  mLastCallbackAppendTime = mStream->GraphImpl()->IterationEnd();
+#endif
+  mLiveFramesAppended = true;
+
+  MOZ_ASSERT(aChannels >= 1 && aChannels <= 8, "Support up to 8 channels");
 
-  for (Allocation& allocation : mAllocations) {
-    if (!allocation.mStream) {
-      continue;
-    }
+  AudioSegment segment;
+  RefPtr<SharedBuffer> buffer =
+    SharedBuffer::Create(aFrames * aChannels * sizeof(T));
+  AutoTArray<const T*, 8> channels;
+  if (aChannels == 1) {
+    PodCopy(static_cast<T*>(buffer->Data()), aBuffer, aFrames);
+    channels.AppendElement(static_cast<T*>(buffer->Data()));
+  } else {
+    channels.SetLength(aChannels);
+    AutoTArray<T*, 8> write_channels;
+    write_channels.SetLength(aChannels);
+    T * samples = static_cast<T*>(buffer->Data());
 
-    if (!allocation.mStream->GraphImpl()) {
-      // The DOMMediaStream that owns allocation.mStream has been cleaned up
-      // and MediaStream::DestroyImpl() has run in the MSG. This is fine and
-      // can happen before the MediaManager thread gets to stop capture for
-      // this allocation.
-      continue;
-    }
-
-    if (!allocation.mEnabled) {
-      continue;
+    size_t offset = 0;
+    for(uint32_t i = 0; i < aChannels; ++i) {
+      channels[i] = write_channels[i] = samples + offset;
+      offset += aFrames;
     }
 
-#ifdef DEBUG
-    allocation.mLastCallbackAppendTime =
-      allocation.mStream->GraphImpl()->IterationEnd();
-#endif
-    allocation.mLiveFramesAppended = true;
-
-    TimeStamp insertTime;
-    // Make sure we include the stream and the track.
-    // The 0:1 is a flag to note when we've done the final insert for a given input block.
-    LogTime(AsyncLatencyLogger::AudioTrackInsertion,
-            LATENCY_STREAM_ID(allocation.mStream.get(), allocation.mTrackID),
-            (&allocation != &mAllocations.LastElement()) ? 0 : 1, insertTime);
-
-    // Bug 971528 - Support stereo capture in gUM
-    MOZ_ASSERT(aChannels >= 1 && aChannels <= 8, "Support up to 8 channels");
+    DeinterleaveAndConvertBuffer(aBuffer,
+        aFrames,
+        aChannels,
+        write_channels.Elements());
+  }
 
-    AudioSegment segment;
-    RefPtr<SharedBuffer> buffer =
-      SharedBuffer::Create(aFrames * aChannels * sizeof(T));
-    AutoTArray<const T*, 8> channels;
-    if (aChannels == 1) {
-      PodCopy(static_cast<T*>(buffer->Data()), aBuffer, aFrames);
-      channels.AppendElement(static_cast<T*>(buffer->Data()));
-    } else {
-      channels.SetLength(aChannels);
-      AutoTArray<T*, 8> write_channels;
-      write_channels.SetLength(aChannels);
-      T * samples = static_cast<T*>(buffer->Data());
+  LOG_FRAMES(("Appending %zu frames of raw audio", aFrames));
 
-      size_t offset = 0;
-      for(uint32_t i = 0; i < aChannels; ++i) {
-        channels[i] = write_channels[i] = samples + offset;
-        offset += aFrames;
-      }
+  MOZ_ASSERT(aChannels == channels.Length());
+  segment.AppendFrames(buffer.forget(), channels, aFrames,
+      mPrincipal);
 
-      DeinterleaveAndConvertBuffer(aBuffer,
-                                   aFrames,
-                                   aChannels,
-                                   write_channels.Elements());
-    }
-
-    LOG_FRAMES(("Appending %zu frames of raw audio for allocation %p",
-                aFrames, allocation.mHandle.get()));
-
-    MOZ_ASSERT(aChannels == channels.Length());
-    segment.AppendFrames(buffer.forget(), channels, aFrames,
-                          allocation.mPrincipal);
-    segment.GetStartTime(insertTime);
-
-    allocation.mStream->AppendToTrack(allocation.mTrackID, &segment);
-  }
+  mStream->AppendToTrack(mTrackID, &segment);
 }
 
 // Called back on GraphDriver thread!
 // Note this can be called back after ::Shutdown()
 void
-MediaEngineWebRTCMicrophoneSource::NotifyInputData(MediaStreamGraphImpl* aGraph,
-                                                   const AudioDataValue* aBuffer,
-                                                   size_t aFrames,
-                                                   TrackRate aRate,
-                                                   uint32_t aChannels)
+AudioInputProcessing::NotifyInputData(MediaStreamGraphImpl* aGraph,
+                                      const AudioDataValue* aBuffer,
+                                      size_t aFrames,
+                                      TrackRate aRate,
+                                      uint32_t aChannels)
 {
   MOZ_ASSERT(aGraph->CurrentDriver()->OnThread());
   TRACE_AUDIO_CALLBACK();
 
-  {
-    MutexAutoLock lock(mMutex);
-    if (mAllocations.IsEmpty()) {
-      // This can happen because mAllocations is not yet using message passing, and
-      // is access both on the media manager thread and the MSG thread. This is to
-      // be fixed soon.
-      // When deallocating, the listener is removed via message passing, while the
-      // allocation is removed immediately, so there can be a few iterations where
-      // we need to return early here.
-      return;
-    }
-  }
+  MOZ_ASSERT(mEnabled);
+
   // If some processing is necessary, packetize and insert in the WebRTC.org
-  // code. Otherwise, directly insert the mic data in the MSG, bypassing all processing.
+  // code. Otherwise, directly insert the mic data in the MSG, bypassing all
+  // processing.
   if (PassThrough(aGraph)) {
     InsertInGraph<AudioDataValue>(aBuffer, aFrames, aChannels);
   } else {
     PacketizeAndProcess(aGraph, aBuffer, aFrames, aRate, aChannels);
   }
 }
 
 #define ResetProcessingIfNeeded(_processing)                        \
@@ -1316,52 +1406,29 @@ do {                                    
       #_processing " on device change.");                           \
       return;                                                       \
     }                                                               \
                                                                     \
   }                                                                 \
 }  while(0)
 
 void
-MediaEngineWebRTCMicrophoneSource::DeviceChanged(MediaStreamGraphImpl* aGraph)
+AudioInputProcessing::DeviceChanged(MediaStreamGraphImpl* aGraph)
 {
   MOZ_ASSERT(aGraph->CurrentDriver()->OnThread());
   // Reset some processing
   ResetProcessingIfNeeded(gain_control);
   ResetProcessingIfNeeded(echo_cancellation);
   ResetProcessingIfNeeded(noise_suppression);
 }
 
 void
-MediaEngineWebRTCMicrophoneSource::Disconnect(MediaStreamGraphImpl* aGraph)
-{
-  // This method is just for asserts.
-  MOZ_ASSERT(aGraph->CurrentDriver()->OnThread());
-  MOZ_ASSERT(!mListener);
-}
-
-void
-MediaEngineWebRTCMicrophoneSource::Shutdown()
+AudioInputProcessing::End()
 {
-  AssertIsOnOwningThread();
-
-  if (mState == kStarted) {
-    for (const Allocation& allocation : mAllocations) {
-      if (allocation.mEnabled) {
-        Stop(allocation.mHandle);
-      }
-    }
-    MOZ_ASSERT(mState == kStopped);
-  }
-
-  while (!mAllocations.IsEmpty()) {
-    MOZ_ASSERT(mState == kAllocated || mState == kStopped);
-    Deallocate(mAllocations[0].mHandle);
-  }
-  MOZ_ASSERT(mState == kReleased);
+  mEnded = true;
 }
 
 nsString
 MediaEngineWebRTCAudioCaptureSource::GetName() const
 {
   return NS_LITERAL_STRING(u"AudioCapture");
 }
 
@@ -1380,28 +1447,16 @@ MediaEngineWebRTCAudioCaptureSource::Get
 
   uuid.ToProvidedString(uuidBuffer);
   asciiString.AssignASCII(uuidBuffer);
 
   // Remove {} and the null terminator
   return nsCString(Substring(asciiString, 1, NSID_LENGTH - 3));
 }
 
-bool
-MediaEngineWebRTCMicrophoneSource::HasEnabledTrack() const
-{
-  AssertIsOnOwningThread();
-  for (const Allocation& allocation : mAllocations) {
-    if (allocation.mEnabled) {
-      return true;
-    }
-  }
-  return false;
-}
-
 nsresult
 MediaEngineWebRTCAudioCaptureSource::SetTrack(const RefPtr<const AllocationHandle>& aHandle,
                                               const RefPtr<SourceMediaStream>& aStream,
                                               TrackID aTrackID,
                                               const PrincipalHandle& aPrincipalHandle)
 {
   AssertIsOnOwningThread();
   // Nothing to do here. aStream is a placeholder dummy and not exposed.
new file mode 100644
--- /dev/null
+++ b/dom/media/webrtc/MediaEngineWebRTCAudio.h
@@ -0,0 +1,352 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef MediaEngineWebRTCAudio_h
+#define MediaEngineWebRTCAudio_h
+
+#include "MediaEngineWebRTC.h"
+#include "AudioPacketizer.h"
+#include "AudioSegment.h"
+#include "AudioDeviceInfo.h"
+#include "webrtc/modules/audio_processing/include/audio_processing.h"
+
+namespace mozilla {
+
+class AudioInputProcessing;
+
+// This class is created and used exclusively on the Media Manager thread, with
+// exactly two exceptions:
+// - Pull is always called on the MSG thread. It only ever uses
+//   mInputProcessing. mInputProcessing is set, then a message is sent first to
+//   the main thread and then the MSG thread so that it can be used as part of
+//   the graph processing. On destruction, similarly, a message is sent to the
+//   graph so that it stops using it, and then it is deleted.
+// - mSettings is created on the MediaManager thread is always ever accessed on
+//   the Main Thread. It is const.
+class MediaEngineWebRTCMicrophoneSource : public MediaEngineSource
+{
+public:
+  MediaEngineWebRTCMicrophoneSource(RefPtr<AudioDeviceInfo> aInfo,
+                                    const nsString& name,
+                                    const nsCString& uuid,
+                                    uint32_t maxChannelCount,
+                                    bool aDelayAgnostic,
+                                    bool aExtendedFilter);
+
+  bool RequiresSharing() const override
+  {
+    return false;
+  }
+
+  nsString GetName() const override;
+  nsCString GetUUID() const override;
+
+  nsresult Allocate(const dom::MediaTrackConstraints &aConstraints,
+                    const MediaEnginePrefs& aPrefs,
+                    const nsString& aDeviceId,
+                    const ipc::PrincipalInfo& aPrincipalInfo,
+                    AllocationHandle** aOutHandle,
+                    const char** aOutBadConstraint) override;
+  nsresult Deallocate(const RefPtr<const AllocationHandle>& aHandle) override;
+  nsresult SetTrack(const RefPtr<const AllocationHandle>& aHandle,
+                    const RefPtr<SourceMediaStream>& aStream,
+                    TrackID aTrackID,
+                    const PrincipalHandle& aPrincipal) override;
+  nsresult Start(const RefPtr<const AllocationHandle>& aHandle) override;
+  nsresult Stop(const RefPtr<const AllocationHandle>& aHandle) override;
+  nsresult Reconfigure(const RefPtr<AllocationHandle>& aHandle,
+                       const dom::MediaTrackConstraints& aConstraints,
+                       const MediaEnginePrefs& aPrefs,
+                       const nsString& aDeviceId,
+                       const char** aOutBadConstraint) override;
+
+  void Pull(const RefPtr<const AllocationHandle>& aHandle,
+            const RefPtr<SourceMediaStream>& aStream,
+            TrackID aTrackID,
+            StreamTime aDesiredTime,
+            const PrincipalHandle& aPrincipalHandle) override;
+
+  /**
+   * Assigns the current settings of the capture to aOutSettings.
+   * Main thread only.
+   */
+  void GetSettings(dom::MediaTrackSettings& aOutSettings) const override;
+
+  dom::MediaSourceEnum GetMediaSource() const override
+  {
+    return dom::MediaSourceEnum::Microphone;
+  }
+
+  nsresult TakePhoto(MediaEnginePhotoCallback* aCallback) override
+  {
+    return NS_ERROR_NOT_IMPLEMENTED;
+  }
+
+  uint32_t GetBestFitnessDistance(
+    const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
+    const nsString& aDeviceId) const override;
+
+  void Shutdown() override;
+
+protected:
+  ~MediaEngineWebRTCMicrophoneSource() = default;
+
+private:
+  /**
+   * Reevaluates the aggregated constraints of all allocations and restarts the
+   * underlying device if necessary.
+   *
+   * If the given AllocationHandle was already registered, its constraints will
+   * be updated before reevaluation. If not, they will be added before
+   * reevaluation.
+   */
+  nsresult ReevaluateAllocation(const RefPtr<AllocationHandle>& aHandle,
+                                const NormalizedConstraints* aConstraintsUpdate,
+                                const MediaEnginePrefs& aPrefs,
+                                const nsString& aDeviceId,
+                                const char** aOutBadConstraint);
+
+  /**
+   * Updates the underlying (single) device with the aggregated constraints
+   * aNetConstraints. If the chosen settings for the device changes based on
+   * these new constraints, and capture is active, the device will be restarted.
+   */
+  nsresult UpdateSingleSource(const RefPtr<const AllocationHandle>& aHandle,
+                              const NormalizedConstraints& aNetConstraints,
+                              const MediaEnginePrefs& aPrefs,
+                              const nsString& aDeviceId,
+                              const char** aOutBadConstraint);
+
+  // These methods send a message to the AudioInputProcessing instance.
+  void UpdateAECSettingsIfNeeded(bool aEnable, webrtc::EcModes aMode);
+  void UpdateAGCSettingsIfNeeded(bool aEnable, webrtc::AgcModes aMode);
+  void UpdateNSSettingsIfNeeded(bool aEnable, webrtc::NsModes aMode);
+  void UpdateAPMExtraOptions(bool aExtendedFilter, bool aDelayAgnostic);
+  void ApplySettings(const MediaEnginePrefs& aPrefs,
+                     RefPtr<MediaStreamGraphImpl> aGraph);
+
+  bool HasEnabledTrack() const;
+
+  RefPtr<AllocationHandle> mHandle;
+
+  TrackID mTrackID = TRACK_NONE;
+  PrincipalHandle mPrincipal = PRINCIPAL_HANDLE_NONE;
+  bool mEnabled = false;
+
+  const RefPtr<AudioDeviceInfo> mDeviceInfo;
+  const bool mDelayAgnostic;
+  const bool mExtendedFilter;
+  const nsString mDeviceName;
+  const nsCString mDeviceUUID;
+
+  // The maximum number of channels that this device supports.
+  const uint32_t mDeviceMaxChannelCount;
+  // The current settings for the underlying device.
+  // Constructed on the MediaManager thread, and then only ever accessed on the
+  // main thread.
+  const nsMainThreadPtrHandle<media::Refcountable<dom::MediaTrackSettings>> mSettings;
+  // To only update microphone when needed, we keep track of the prefs
+  // representing the currently applied settings for this source. This is the
+  // net result of the prefs across all allocations.
+  MediaEnginePrefs mNetPrefs;
+
+  // Current state of the resource for this source.
+  MediaEngineSourceState mState;
+
+  // The SourecMediaStream on which to append data for this microphone. Set in
+  // SetTrack as part of the initialization, and nulled in ::Deallocate.
+  RefPtr<SourceMediaStream> mStream;
+
+  // See note at the top of this class.
+  RefPtr<AudioInputProcessing> mInputProcessing;
+};
+
+// This class is created on the MediaManager thread, and then exclusively used
+// on the MSG thread.
+// All communication is done via message passing using MSG ControlMessages
+class AudioInputProcessing : public AudioDataListener
+{
+public:
+  AudioInputProcessing(uint32_t aMaxChannelCount,
+                       RefPtr<SourceMediaStream> aStream,
+                       TrackID aTrackID,
+                       const PrincipalHandle& aPrincipalHandle);
+
+  void Pull(const RefPtr<const AllocationHandle>& aHandle,
+            const RefPtr<SourceMediaStream>& aStream,
+            TrackID aTrackID,
+            StreamTime aDesiredTime,
+            const PrincipalHandle& aPrincipalHandle);
+
+  void NotifyOutputData(MediaStreamGraphImpl* aGraph,
+                        AudioDataValue* aBuffer,
+                        size_t aFrames,
+                        TrackRate aRate,
+                        uint32_t aChannels) override;
+  void NotifyInputData(MediaStreamGraphImpl* aGraph,
+                       const AudioDataValue* aBuffer,
+                       size_t aFrames,
+                       TrackRate aRate,
+                       uint32_t aChannels) override;
+
+  void Start();
+  void Stop();
+
+  void DeviceChanged(MediaStreamGraphImpl* aGraph) override;
+
+  uint32_t RequestedInputChannelCount(MediaStreamGraphImpl* aGraph) override
+  {
+    return GetRequestedInputChannelCount(aGraph);
+  }
+
+  void Disconnect(MediaStreamGraphImpl* aGraph) override;
+
+  template<typename T>
+  void InsertInGraph(const T* aBuffer, size_t aFrames, uint32_t aChannels);
+
+  void PacketizeAndProcess(MediaStreamGraphImpl* aGraph,
+                           const AudioDataValue* aBuffer,
+                           size_t aFrames,
+                           TrackRate aRate,
+                           uint32_t aChannels);
+
+  void SetPassThrough(bool aPassThrough);
+  uint32_t GetRequestedInputChannelCount(MediaStreamGraphImpl* aGraphImpl);
+  void SetRequestedInputChannelCount(uint32_t aRequestedInputChannelCount);
+  // This is true when all processing is disabled, we can skip
+  // packetization, resampling and other processing passes.
+  bool PassThrough(MediaStreamGraphImpl* aGraphImpl) const;
+
+  // This allow changing the APM options, enabling or disabling processing
+  // steps.
+  void UpdateAECSettingsIfNeeded(bool aEnable, webrtc::EcModes aMode);
+  void UpdateAGCSettingsIfNeeded(bool aEnable, webrtc::AgcModes aMode);
+  void UpdateNSSettingsIfNeeded(bool aEnable, webrtc::NsModes aMode);
+  void UpdateAPMExtraOptions(bool aExtendedFilter, bool aDelayAgnostic);
+
+  void End();
+
+private:
+  ~AudioInputProcessing() = default;
+  RefPtr<SourceMediaStream> mStream;
+  // This implements the processing algoritm to apply to the input (e.g. a
+  // microphone). If all algorithms are disabled, this class in not used. This
+  // class only accepts audio chunks of 10ms. It has two inputs and one output:
+  // it is fed the speaker data and the microphone data. It outputs processed
+  // input data.
+  const UniquePtr<webrtc::AudioProcessing> mAudioProcessing;
+  // Packetizer to be able to feed 10ms packets to the input side of
+  // mAudioProcessing. Not used if the processing is bypassed.
+  nsAutoPtr<AudioPacketizer<AudioDataValue, float>> mPacketizerInput;
+  // Packetizer to be able to feed 10ms packets to the output side of
+  // mAudioProcessing. Not used if the processing is bypassed.
+  nsAutoPtr<AudioPacketizer<AudioDataValue, float>> mPacketizerOutput;
+  // The number of channels asked for by content, after clamping to the range of
+  // legal channel count for this particular device. This is the number of
+  // channels of the input buffer passed as parameter in NotifyInputData.
+  uint32_t mRequestedInputChannelCount;
+  // mSkipProcessing is true if none of the processing passes are enabled,
+  // because of prefs or constraints. This allows simply copying the audio into
+  // the MSG, skipping resampling and the whole webrtc.org code.
+  bool mSkipProcessing;
+  // Stores the mixed audio output for the reverse-stream of the AEC (the
+  // speaker data).
+  AlignedFloatBuffer mOutputBuffer;
+  // Stores the input audio, to be processed by the APM.
+  AlignedFloatBuffer mInputBuffer;
+  // Stores the deinterleaved microphone audio
+  AlignedFloatBuffer mDeinterleavedBuffer;
+  // Stores the mixed down input audio
+  AlignedFloatBuffer mInputDownmixBuffer;
+#ifdef DEBUG
+  // The MSGImpl::IterationEnd() of the last time we appended data from an
+  // audio callback.
+  GraphTime mLastCallbackAppendTime;
+#endif
+  // Set to false by Start(). Becomes true after the first time we append real
+  // audio frames from the audio callback.
+  bool mLiveFramesAppended;
+  // Set to false by Start(). Becomes true after the first time we append
+  // silence *after* the first audio callback has appended real frames.
+  bool mLiveSilenceAppended;
+  // Track ID on which the data is to be appended after processing
+  TrackID mTrackID;
+  // Principal for the data that flows through this class.
+  PrincipalHandle mPrincipal;
+  // Whether or not this MediaEngine is enabled. If it's not enabled, it
+  // operates in "pull" mode, and we append silence only, releasing the audio
+  // input stream.
+  bool mEnabled;
+  // Whether or not we've ended and removed the track in the SourceMediaStream
+  bool mEnded;
+};
+
+
+class MediaEngineWebRTCAudioCaptureSource : public MediaEngineSource
+{
+public:
+  explicit MediaEngineWebRTCAudioCaptureSource(const char* aUuid)
+  {
+  }
+  nsString GetName() const override;
+  nsCString GetUUID() const override;
+  nsresult Allocate(const dom::MediaTrackConstraints &aConstraints,
+                    const MediaEnginePrefs &aPrefs,
+                    const nsString& aDeviceId,
+                    const ipc::PrincipalInfo& aPrincipalInfo,
+                    AllocationHandle** aOutHandle,
+                    const char** aOutBadConstraint) override
+  {
+    // Nothing to do here, everything is managed in MediaManager.cpp
+    *aOutHandle = nullptr;
+    return NS_OK;
+  }
+  nsresult Deallocate(const RefPtr<const AllocationHandle>& aHandle) override
+  {
+    // Nothing to do here, everything is managed in MediaManager.cpp
+    MOZ_ASSERT(!aHandle);
+    return NS_OK;
+  }
+  nsresult SetTrack(const RefPtr<const AllocationHandle>& aHandle,
+                    const RefPtr<SourceMediaStream>& aStream,
+                    TrackID aTrackID,
+                    const PrincipalHandle& aPrincipal) override;
+  nsresult Start(const RefPtr<const AllocationHandle>& aHandle) override;
+  nsresult Stop(const RefPtr<const AllocationHandle>& aHandle) override;
+  nsresult Reconfigure(const RefPtr<AllocationHandle>& aHandle,
+                       const dom::MediaTrackConstraints& aConstraints,
+                       const MediaEnginePrefs& aPrefs,
+                       const nsString& aDeviceId,
+                       const char** aOutBadConstraint) override;
+
+  void Pull(const RefPtr<const AllocationHandle>& aHandle,
+            const RefPtr<SourceMediaStream>& aStream,
+            TrackID aTrackID,
+            StreamTime aDesiredTime,
+            const PrincipalHandle& aPrincipalHandle) override
+  {
+  }
+
+  dom::MediaSourceEnum GetMediaSource() const override
+  {
+    return dom::MediaSourceEnum::AudioCapture;
+  }
+
+  nsresult TakePhoto(MediaEnginePhotoCallback* aCallback) override
+  {
+    return NS_ERROR_NOT_IMPLEMENTED;
+  }
+
+  uint32_t GetBestFitnessDistance(
+    const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
+    const nsString& aDeviceId) const override;
+
+protected:
+  virtual ~MediaEngineWebRTCAudioCaptureSource() = default;
+};
+
+} // end namespace mozilla
+
+#endif // MediaEngineWebRTCAudio_h
--- a/dom/payments/PaymentRequest.cpp
+++ b/dom/payments/PaymentRequest.cpp
@@ -55,21 +55,24 @@ NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(
 NS_INTERFACE_MAP_END_INHERITING(DOMEventTargetHelper)
 
 NS_IMPL_ADDREF_INHERITED(PaymentRequest, DOMEventTargetHelper)
 NS_IMPL_RELEASE_INHERITED(PaymentRequest, DOMEventTargetHelper)
 
 bool
 PaymentRequest::PrefEnabled(JSContext* aCx, JSObject* aObj)
 {
+#ifdef NIGHTLY_BUILD
   if (!XRE_IsContentProcess()) {
     return false;
   }
-
   return StaticPrefs::dom_payments_request_enabled();
+#else
+  return false;
+#endif
 }
 
 nsresult
 PaymentRequest::IsValidStandardizedPMI(const nsAString& aIdentifier,
                                        nsAString& aErrorMsg)
 {
   /*
    *   The syntax of a standardized payment method identifier is given by the
--- a/dom/plugins/test/testplugin/nptest_windows.cpp
+++ b/dom/plugins/test/testplugin/nptest_windows.cpp
@@ -66,18 +66,16 @@ bool
 pluginSupportsWindowlessMode()
 {
   return true;
 }
 
 NPError
 pluginInstanceInit(InstanceData* instanceData)
 {
-  NPP npp = instanceData->npp;
-
   instanceData->platformData = static_cast<PlatformData*>
     (NPN_MemAlloc(sizeof(PlatformData)));
   if (!instanceData->platformData)
     return NPERR_OUT_OF_MEMORY_ERROR;
 
   instanceData->platformData->childWindow = nullptr;
   instanceData->platformData->device = nullptr;
   instanceData->platformData->frontBuffer = nullptr;
@@ -393,63 +391,63 @@ drawToDC(InstanceData* instanceData, HDC
       ::SetBkMode(dc, oldBkMode);
     }
     break;
 
     case DM_SOLID_COLOR:
     {
       HDC offscreenDC = ::CreateCompatibleDC(dc);
       if (!offscreenDC)
-	return;
+        return;
 
       const BITMAPV4HEADER bitmapheader = {
-	sizeof(BITMAPV4HEADER),
-	width,
-	height,
-	1, // planes
-	32, // bits
-	BI_BITFIELDS,
-	0, // unused size
-	0, 0, // unused metrics
-	0, 0, // unused colors used/important
-	0x00FF0000, 0x0000FF00, 0x000000FF, 0xFF000000, // ARGB masks
+        sizeof(BITMAPV4HEADER),
+        width,
+        height,
+        1, // planes
+        32, // bits
+        BI_BITFIELDS,
+        0, // unused size
+        0, 0, // unused metrics
+        0, 0, // unused colors used/important
+        0x00FF0000, 0x0000FF00, 0x000000FF, 0xFF000000, // ARGB masks
       };
       uint32_t *pixelData;
       HBITMAP offscreenBitmap =
-	::CreateDIBSection(dc, reinterpret_cast<const BITMAPINFO*>(&bitmapheader),
-			   0, reinterpret_cast<void**>(&pixelData), 0, 0);
+        ::CreateDIBSection(dc, reinterpret_cast<const BITMAPINFO*>(&bitmapheader),
+                           0, reinterpret_cast<void**>(&pixelData), 0, 0);
       if (!offscreenBitmap)
-	return;
+        return;
 
       uint32_t rgba = instanceData->scriptableObject->drawColor;
       unsigned int alpha = ((rgba & 0xFF000000) >> 24);
       BYTE r = ((rgba & 0xFF0000) >> 16);
       BYTE g = ((rgba & 0xFF00) >> 8);
       BYTE b = (rgba & 0xFF);
 
       // Windows expects premultiplied
       r = BYTE(float(alpha * r) / 0xFF);
       g = BYTE(float(alpha * g) / 0xFF);
       b = BYTE(float(alpha * b) / 0xFF);
       uint32_t premultiplied =
-	(alpha << 24) +	(r << 16) + (g << 8) + b;
+        (alpha << 24) + (r << 16) + (g << 8) + b;
 
       for (uint32_t* lastPixel = pixelData + width * height;
-	   pixelData < lastPixel;
-	   ++pixelData)
-	*pixelData = premultiplied;
+           pixelData < lastPixel;
+           ++pixelData)
+        *pixelData = premultiplied;
 
       ::SelectObject(offscreenDC, offscreenBitmap);
       BLENDFUNCTION blendFunc;
       blendFunc.BlendOp = AC_SRC_OVER;
       blendFunc.BlendFlags = 0;
       blendFunc.SourceConstantAlpha = 255;
       blendFunc.AlphaFormat = AC_SRC_ALPHA;
       ::AlphaBlend(dc, x, y, width, height, offscreenDC, 0, 0, width, height,
-		   blendFunc);
+                   blendFunc);
 
       ::DeleteObject(offscreenDC);
       ::DeleteObject(offscreenBitmap);
     }
     break;
   }
 }
 
@@ -795,17 +793,17 @@ pluginHandleEvent(InstanceData* instance
   LRESULT result = 0;
   return handleEventInternal(instanceData, pe, &result);
 }
 
 /* windowed plugin events */
 
 LRESULT CALLBACK PluginWndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
 {
-	WNDPROC wndProc = (WNDPROC)GetProp(hWnd, "MozillaWndProc");
+        WNDPROC wndProc = (WNDPROC)GetProp(hWnd, "MozillaWndProc");
   if (!wndProc)
     return 0;
   InstanceData* pInstance = (InstanceData*)GetProp(hWnd, "InstanceData");
   if (!pInstance)
     return 0;
 
   NPEvent event = { static_cast<uint16_t>(uMsg), wParam, lParam };
 
--- a/gfx/thebes/gfxGDIFontList.cpp
+++ b/gfx/thebes/gfxGDIFontList.cpp
@@ -75,17 +75,17 @@ public:
             NS_ASSERTION(success, buf);
         }
 #endif
     }
 
     HANDLE mFontRef;
 };
 
-BYTE 
+BYTE
 FontTypeToOutPrecision(uint8_t fontType)
 {
     BYTE ret;
     switch (fontType) {
     case GFX_FONT_TYPE_TT_OPENTYPE:
     case GFX_FONT_TYPE_TRUETYPE:
         ret = OUT_TT_ONLY_PRECIS;
         break;
@@ -149,19 +149,19 @@ GDIFontEntry::ReadCMAP(FontInfoData *aFo
     AUTO_PROFILER_LABEL("GDIFontEntry::ReadCMAP", OTHER);
 
     // attempt this once, if errors occur leave a blank cmap
     if (mCharacterMap) {
         return NS_OK;
     }
 
     // skip non-SFNT fonts completely
-    if (mFontType != GFX_FONT_TYPE_PS_OPENTYPE && 
+    if (mFontType != GFX_FONT_TYPE_PS_OPENTYPE &&
         mFontType != GFX_FONT_TYPE_TT_OPENTYPE &&
-        mFontType != GFX_FONT_TYPE_TRUETYPE) 
+        mFontType != GFX_FONT_TYPE_TRUETYPE)
     {
         mCharacterMap = new gfxCharacterMap();
         mCharacterMap->mBuildOnTheFly = true;
         return NS_ERROR_FAILURE;
     }
 
     RefPtr<gfxCharacterMap> charmap;
     nsresult rv;
@@ -283,17 +283,17 @@ GDIFontEntry::FillLogFont(LOGFONTW *aLog
     if (mIsDataUserFont) {
         aLogFont->lfItalic = 0;
     }
 }
 
 #define MISSING_GLYPH 0x1F // glyph index returned for missing characters
                            // on WinXP with .fon fonts, but not Type1 (.pfb)
 
-bool 
+bool
 GDIFontEntry::TestCharacterMap(uint32_t aCh)
 {
     if (!mCharacterMap) {
         ReadCMAP();
         NS_ASSERTION(mCharacterMap, "failed to initialize a character map");
     }
 
     if (mCharacterMap->mBuildOnTheFly) {
@@ -317,32 +317,32 @@ GDIFontEntry::TestCharacterMap(uint32_t 
         HFONT hfont = font->GetHFONT();
         HFONT oldFont = (HFONT)SelectObject(dc, hfont);
 
         wchar_t str[1] = { (wchar_t)aCh };
         WORD glyph[1];
 
         bool hasGlyph = false;
 
-        // Bug 573038 - in some cases GetGlyphIndicesW returns 0xFFFF for a 
-        // missing glyph or 0x1F in other cases to indicate the "invalid" 
+        // Bug 573038 - in some cases GetGlyphIndicesW returns 0xFFFF for a
+        // missing glyph or 0x1F in other cases to indicate the "invalid"
         // glyph.  Map both cases to "not found"
         if (IsType1() || mForceGDI) {
-            // Type1 fonts and uniscribe APIs don't get along.  
+            // Type1 fonts and uniscribe APIs don't get along.
             // ScriptGetCMap will return E_HANDLE
-            DWORD ret = GetGlyphIndicesW(dc, str, 1, 
+            DWORD ret = GetGlyphIndicesW(dc, str, 1,
                                          glyph, GGI_MARK_NONEXISTING_GLYPHS);
             if (ret != GDI_ERROR
                 && glyph[0] != 0xFFFF
                 && (IsType1() || glyph[0] != MISSING_GLYPH))
             {
                 hasGlyph = true;
             }
         } else {
-            // ScriptGetCMap works better than GetGlyphIndicesW 
+            // ScriptGetCMap works better than GetGlyphIndicesW
             // for things like bitmap/vector fonts
             SCRIPT_CACHE sc = nullptr;
             HRESULT rv = ScriptGetCMap(dc, &sc, str, 1, 0, glyph);
             if (rv == S_OK)
                 hasGlyph = true;
         }
 
         SelectObject(dc, oldFont);
@@ -387,17 +387,17 @@ GDIFontEntry::InitLogFont(const nsACStri
     mLogFont.lfWeight         = Weight().Min().ToIntRounded();
 
     NS_ConvertUTF8toUTF16 name(aName);
     int len = std::min<int>(name.Length(), LF_FACESIZE - 1);
     memcpy(&mLogFont.lfFaceName, name.BeginReading(), len * sizeof(char16_t));
     mLogFont.lfFaceName[len] = '\0';
 }
 
-GDIFontEntry* 
+GDIFontEntry*
 GDIFontEntry::CreateFontEntry(const nsACString& aName,
                               gfxWindowsFontType aFontType,
                               SlantStyleRange aStyle,
                               WeightRange aWeight,
                               StretchRange aStretch,
                               gfxUserFontData* aUserFontData)
 {
     // jtdfix - need to set charset, unicode ranges, pitch/family
@@ -469,17 +469,17 @@ GDIFontFamily::FamilyAddStylesProc(const
         fe = static_cast<GDIFontEntry*>(ff->mAvailableFonts[i].get());
         // check if we already know about this face
         if (fe->Weight().Min() == FontWeight(int32_t(logFont.lfWeight)) &&
             fe->IsItalic() == (logFont.lfItalic == 0xFF)) {
             // update the charset bit here since this could be different
             // XXX Can we still do this now that we store mCharset
             // on the font family rather than the font entry?
             ff->mCharset.set(metrics.tmCharSet);
-            return 1; 
+            return 1;
         }
     }
 
     // We can't set the hasItalicFace flag correctly here,
     // because we might not have seen the family's italic face(s) yet.
     // So we'll set that flag for all members after loading all the faces.
     auto italicStyle = (logFont.lfItalic == 0xFF ?
                            FontSlantStyle::Italic() : FontSlantStyle::Normal());
@@ -580,17 +580,17 @@ RemoveCharsetFromFontSubstitute(nsAStrin
 nsresult
 gfxGDIFontList::GetFontSubstitutes()
 {
     HKEY hKey;
     DWORD i, rv, lenAlias, lenActual, valueType;
     WCHAR aliasName[MAX_VALUE_NAME];
     WCHAR actualName[MAX_VALUE_DATA];
 
-    if (RegOpenKeyExW(HKEY_LOCAL_MACHINE, 
+    if (RegOpenKeyExW(HKEY_LOCAL_MACHINE,
           L"SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\FontSubstitutes",
           0, KEY_READ, &hKey) != ERROR_SUCCESS)
     {
         return NS_ERROR_FAILURE;
     }
 
     for (i = 0, rv = ERROR_SUCCESS; rv != ERROR_NO_MORE_ITEMS; i++) {
         aliasName[0] = 0;
@@ -612,17 +612,17 @@ gfxGDIFontList::GetFontSubstitutes()
         nsAutoString actualFontName((char16_t*) actualName);
         RemoveCharsetFromFontSubstitute(substituteName);
         BuildKeyNameFromFontName(substituteName);
         RemoveCharsetFromFontSubstitute(actualFontName);
         BuildKeyNameFromFontName(actualFontName);
         gfxFontFamily *ff;
         NS_ConvertUTF16toUTF8 substitute(substituteName);
         NS_ConvertUTF16toUTF8 actual(actualFontName);
-        if (!actual.IsEmpty() && 
+        if (!actual.IsEmpty() &&
             (ff = mFontFamilies.GetWeak(actual))) {
             mFontSubstitutes.Put(substitute, ff);
         } else {
             mNonExistingFonts.AppendElement(substitute);
         }
     }
 
     // "Courier" on a default Windows install is an ugly bitmap font.
@@ -655,19 +655,18 @@ gfxGDIFontList::InitFontListForPlatform(
     mNonExistingFonts.Clear();
 
     // iterate over available families
     LOGFONTW logfont;
     memset(&logfont, 0, sizeof(logfont));
     logfont.lfCharSet = DEFAULT_CHARSET;
 
     AutoDC hdc;
-    int result = EnumFontFamiliesExW(hdc.GetDC(), &logfont,
-                                     (FONTENUMPROCW)&EnumFontFamExProc,
-                                     0, 0);
+    (void)EnumFontFamiliesExW(hdc.GetDC(), &logfont,
+                              (FONTENUMPROCW)&EnumFontFamExProc, 0, 0);
 
     GetFontSubstitutes();
 
     GetPrefsAndStartLoader();
 
     return NS_OK;
 }
 
@@ -712,37 +711,37 @@ gfxGDIFontList::EnumFontFamExProc(ENUMLO
 
         // mark the charset bit
         family->mCharset.set(metrics.tmCharSet);
     }
 
     return 1;
 }
 
-gfxFontEntry* 
+gfxFontEntry*
 gfxGDIFontList::LookupLocalFont(const nsACString& aFontName,
                                 WeightRange aWeightForEntry,
                                 StretchRange aStretchForEntry,
                                 SlantStyleRange aStyleForEntry)
 {
     gfxFontEntry *lookup;
 
     lookup = LookupInFaceNameLists(aFontName);
     if (!lookup) {
         return nullptr;
     }
 
     bool isCFF = false; // jtdfix -- need to determine this
-    
+
     // use the face name from the lookup font entry, which will be the localized
     // face name which GDI mapping tables use (e.g. with the system locale set to
     // Dutch, a fullname of 'Arial Bold' will find a font entry with the face name
     // 'Arial Vet' which can be used as a key in GDI font lookups).
-    GDIFontEntry *fe = GDIFontEntry::CreateFontEntry(lookup->Name(), 
-        gfxWindowsFontType(isCFF ? GFX_FONT_TYPE_PS_OPENTYPE : GFX_FONT_TYPE_TRUETYPE) /*type*/, 
+    GDIFontEntry *fe = GDIFontEntry::CreateFontEntry(lookup->Name(),
+        gfxWindowsFontType(isCFF ? GFX_FONT_TYPE_PS_OPENTYPE : GFX_FONT_TYPE_TRUETYPE) /*type*/,
         lookup->SlantStyle(), lookup->Weight(), aStretchForEntry, nullptr);
 
     if (!fe)
         return nullptr;
 
     fe->mIsLocalUserFont = true;
 
     // make the new font entry match the userfont entry style characteristics
@@ -840,27 +839,27 @@ gfxGDIFontList::MakePlatformFont(const n
         return nullptr;
 
     FallibleTArray<uint8_t> newFontData;
 
     rv = gfxFontUtils::RenameFont(uniqueName, aFontData, aLength, &newFontData);
 
     if (NS_FAILED(rv))
         return nullptr;
-        
+
     DWORD numFonts = 0;
 
     uint8_t *fontData = reinterpret_cast<uint8_t*> (newFontData.Elements());
     uint32_t fontLength = newFontData.Length();
     NS_ASSERTION(fontData, "null font data after renaming");
 
     // http://msdn.microsoft.com/en-us/library/ms533942(VS.85).aspx
-    // "A font that is added by AddFontMemResourceEx is always private 
+    // "A font that is added by AddFontMemResourceEx is always private
     //  to the process that made the call and is not enumerable."
-    fontRef = AddFontMemResourceEx(fontData, fontLength, 
+    fontRef = AddFontMemResourceEx(fontData, fontLength,
                                     0 /* reserved */, &numFonts);
     if (!fontRef) {
         if (FixupSymbolEncodedFont(fontData, fontLength)) {
             fontRef = AddFontMemResourceEx(fontData, fontLength, 0, &numFonts);
         }
     }
     if (!fontRef) {
         return nullptr;
@@ -919,17 +918,17 @@ gfxGDIFontList::FindAndAddFamilies(const
 gfxFontFamily*
 gfxGDIFontList::GetDefaultFontForPlatform(const gfxFontStyle* aStyle)
 {
     gfxFontFamily *ff = nullptr;
 
     // this really shouldn't fail to find a font....
     NONCLIENTMETRICSW ncm;
     ncm.cbSize = sizeof(ncm);
-    BOOL status = ::SystemParametersInfoW(SPI_GETNONCLIENTMETRICS, 
+    BOOL status = ::SystemParametersInfoW(SPI_GETNONCLIENTMETRICS,
                                           sizeof(ncm), &ncm, 0);
     if (status) {
         ff = FindFamily(NS_ConvertUTF16toUTF8(ncm.lfMessageFont.lfFaceName));
         if (ff) {
             return ff;
         }
     }
 
--- a/gfx/thebes/gfxWindowsPlatform.cpp
+++ b/gfx/thebes/gfxWindowsPlatform.cpp
@@ -1293,27 +1293,27 @@ gfxWindowsPlatform::IsOptimus()
         {
             knowIsOptimus = 1;
         } else {
             knowIsOptimus = 0;
         }
     }
     return knowIsOptimus;
 }
-
+/*
 static inline bool
 IsWARPStable()
 {
   // It seems like nvdxgiwrap makes a mess of WARP. See bug 1154703.
   if (!IsWin8OrLater() || GetModuleHandleA("nvdxgiwrap.dll")) {
     return false;
   }
   return true;
 }
-
+*/
 static void
 InitializeANGLEConfig()
 {
   FeatureState& d3d11ANGLE = gfxConfig::GetFeature(Feature::D3D11_HW_ANGLE);
 
   if (!gfxConfig::IsEnabled(Feature::D3D11_COMPOSITING)) {
     d3d11ANGLE.DisableByDefault(FeatureStatus::Unavailable, "D3D11 compositing is disabled",
                                 NS_LITERAL_CSTRING("FEATURE_FAILURE_D3D11_DISABLED"));
--- a/gfx/webrender/res/clip_shared.glsl
+++ b/gfx/webrender/res/clip_shared.glsl
@@ -64,18 +64,16 @@ ClipVertexInfo write_clip_tile_vertex(Re
         vec4 snap_positions = compute_snap_positions(
             snap_mat,
             local_clip_rect,
             area.common_data.device_pixel_scale
         );
 
         vec2 snap_offsets = compute_snap_offset_impl(
             device_pos,
-            snap_mat,
-            local_clip_rect,
             RectWithSize(snap_positions.xy, snap_positions.zw - snap_positions.xy),
             snap_positions
         );
 
         device_pos -= snap_offsets;
     }
 
     vec2 world_pos = device_pos / area.common_data.device_pixel_scale;
deleted file mode 100644
--- a/gfx/webrender/res/cs_clip_line.glsl
+++ /dev/null
@@ -1,207 +0,0 @@
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include shared,clip_shared
-
-#define LINE_STYLE_SOLID        0
-#define LINE_STYLE_DOTTED       1
-#define LINE_STYLE_DASHED       2
-#define LINE_STYLE_WAVY         3
-
-varying vec3 vLocalPos;
-flat varying int vStyle;
-flat varying float vAxisSelect;
-flat varying vec4 vParams;
-flat varying vec2 vLocalOrigin;
-
-#ifdef WR_VERTEX_SHADER
-
-#define LINE_ORIENTATION_VERTICAL       0
-#define LINE_ORIENTATION_HORIZONTAL     1
-
-struct LineDecorationData {
-    RectWithSize local_rect;
-    float wavyLineThickness;
-    float style;
-    float orientation;
-};
-
-LineDecorationData fetch_data(ivec2 address) {
-    vec4 data[2] = fetch_from_gpu_cache_2_direct(address);
-    RectWithSize local_rect = RectWithSize(data[0].xy, data[0].zw);
-    LineDecorationData line_data = LineDecorationData(
-        local_rect,
-        data[1].x,
-        data[1].y,
-        data[1].z
-    );
-    return line_data;
-}
-
-void main(void) {
-    ClipMaskInstance cmi = fetch_clip_item();
-    ClipArea area = fetch_clip_area(cmi.render_task_address);
-    Transform clip_transform = fetch_transform(cmi.clip_transform_id);
-    Transform prim_transform = fetch_transform(cmi.prim_transform_id);
-    LineDecorationData data = fetch_data(cmi.clip_data_address);
-
-    ClipVertexInfo vi = write_clip_tile_vertex(
-        data.local_rect,
-        prim_transform,
-        clip_transform,
-        area
-    );
-    vLocalPos = vi.local_pos;
-
-    vec2 pos, size;
-
-    switch (int(data.orientation)) {
-        case LINE_ORIENTATION_HORIZONTAL:
-            vAxisSelect = 0.0;
-            pos = data.local_rect.p0;
-            size = data.local_rect.size;
-            break;
-        case LINE_ORIENTATION_VERTICAL:
-            vAxisSelect = 1.0;
-            pos = data.local_rect.p0.yx;
-            size = data.local_rect.size.yx;
-            break;
-        default:
-            vAxisSelect = 0.0;
-            pos = size = vec2(0.0);
-    }
-
-    vLocalOrigin = pos;
-    vStyle = int(data.style);
-
-    switch (vStyle) {
-        case LINE_STYLE_SOLID: {
-            break;
-        }
-        case LINE_STYLE_DASHED: {
-            float dash_length = size.y * 3.0;
-            vParams = vec4(2.0 * dash_length, // period
-                           dash_length,       // dash length
-                           0.0,
-                           0.0);
-            break;
-        }
-        case LINE_STYLE_DOTTED: {
-            float diameter = size.y;
-            float period = diameter * 2.0;
-            float center_line = pos.y + 0.5 * size.y;
-            float max_x = floor(size.x / period) * period;
-            vParams = vec4(period,
-                           diameter / 2.0, // radius
-                           center_line,
-                           max_x);
-            break;
-        }
-        case LINE_STYLE_WAVY: {
-            // This logic copied from gecko to get the same results
-            float line_thickness = max(data.wavyLineThickness, 1.0);
-            // Difference in height between peaks and troughs
-            // (and since slopes are 45 degrees, the length of each slope)
-            float slope_length = size.y - line_thickness;
-            // Length of flat runs
-            float flat_length = max((line_thickness - 1.0) * 2.0, 1.0);
-
-            vParams = vec4(line_thickness / 2.0,
-                           slope_length,
-                           flat_length,
-                           size.y);
-            break;
-        }
-        default:
-            vParams = vec4(0.0);
-    }
-}
-#endif
-
-#ifdef WR_FRAGMENT_SHADER
-
-#define MAGIC_WAVY_LINE_AA_SNAP         0.5
-
-void main(void) {
-    // Find the appropriate distance to apply the step over.
-    vec2 local_pos = vLocalPos.xy / vLocalPos.z;
-    float aa_range = compute_aa_range(local_pos);
-    float alpha = 1.0;
-
-    // Select the x/y coord, depending on which axis this edge is.
-    vec2 pos = mix(local_pos.xy, local_pos.yx, vAxisSelect);
-
-    switch (vStyle) {
-        case LINE_STYLE_SOLID: {
-            break;
-        }
-        case LINE_STYLE_DASHED: {
-            // Get the main-axis position relative to closest dot or dash.
-            float x = mod(pos.x - vLocalOrigin.x, vParams.x);
-
-            // Calculate dash alpha (on/off) based on dash length
-            alpha = step(x, vParams.y);
-            break;
-        }
-        case LINE_STYLE_DOTTED: {
-            // Get the main-axis position relative to closest dot or dash.
-            float x = mod(pos.x - vLocalOrigin.x, vParams.x);
-
-            // Get the dot alpha
-            vec2 dot_relative_pos = vec2(x, pos.y) - vParams.yz;
-            float dot_distance = length(dot_relative_pos) - vParams.y;
-            alpha = distance_aa(aa_range, dot_distance);
-            // Clip off partial dots
-            alpha *= step(pos.x - vLocalOrigin.x, vParams.w);
-            break;
-        }
-        case LINE_STYLE_WAVY: {
-            vec2 normalized_local_pos = pos - vLocalOrigin.xy;
-
-            float half_line_thickness = vParams.x;
-            float slope_length = vParams.y;
-            float flat_length = vParams.z;
-            float vertical_bounds = vParams.w;
-            // Our pattern is just two slopes and two flats
-            float half_period = slope_length + flat_length;
-
-            float mid_height = vertical_bounds / 2.0;
-            float peak_offset = mid_height - half_line_thickness;
-            // Flip the wave every half period
-            float flip = -2.0 * (step(mod(normalized_local_pos.x, 2.0 * half_period), half_period) - 0.5);
-            // float flip = -1.0;
-            peak_offset *= flip;
-            float peak_height = mid_height + peak_offset;
-
-            // Convert pos to a local position within one half period
-            normalized_local_pos.x = mod(normalized_local_pos.x, half_period);
-
-            // Compute signed distance to the 3 lines that make up an arc
-            float dist1 = distance_to_line(vec2(0.0, peak_height),
-                                           vec2(1.0, -flip),
-                                           normalized_local_pos);
-            float dist2 = distance_to_line(vec2(0.0, peak_height),
-                                           vec2(0, -flip),
-                                           normalized_local_pos);
-            float dist3 = distance_to_line(vec2(flat_length, peak_height),
-                                           vec2(-1.0, -flip),
-                                           normalized_local_pos);
-            float dist = abs(max(max(dist1, dist2), dist3));
-
-            // Apply AA based on the thickness of the wave
-            alpha = distance_aa(aa_range, dist - half_line_thickness);
-
-            // Disable AA for thin lines
-            if (half_line_thickness <= 1.0) {
-                alpha = 1.0 - step(alpha, MAGIC_WAVY_LINE_AA_SNAP);
-            }
-
-            break;
-        }
-        default: break;
-    }
-
-    oFragColor = vec4(alpha);
-}
-#endif
new file mode 100644
--- /dev/null
+++ b/gfx/webrender/res/cs_line_decoration.glsl
@@ -0,0 +1,170 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include shared
+
+#define LINE_STYLE_SOLID        0
+#define LINE_STYLE_DOTTED       1
+#define LINE_STYLE_DASHED       2
+#define LINE_STYLE_WAVY         3
+
+// Local space position
+varying vec2 vLocalPos;
+
+flat varying float vAxisSelect;
+flat varying int vStyle;
+flat varying vec4 vParams;
+
+#ifdef WR_VERTEX_SHADER
+
+#define LINE_ORIENTATION_VERTICAL       0
+#define LINE_ORIENTATION_HORIZONTAL     1
+
+in vec4 aTaskRect;
+in vec2 aLocalSize;
+in int aStyle;
+in int aOrientation;
+in float aWavyLineThickness;
+
+void main(void) {
+    vec2 size;
+
+    switch (aOrientation) {
+        case LINE_ORIENTATION_HORIZONTAL:
+            vAxisSelect = 0.0;
+            size = aLocalSize;
+            break;
+        case LINE_ORIENTATION_VERTICAL:
+            vAxisSelect = 1.0;
+            size = aLocalSize.yx;
+            break;
+        default:
+            vAxisSelect = 0.0;
+            size = vec2(0.0);
+    }
+
+    vStyle = aStyle;
+
+    switch (vStyle) {
+        case LINE_STYLE_SOLID: {
+            break;
+        }
+        case LINE_STYLE_DASHED: {
+            vParams = vec4(size.x,          // period
+                           0.5 * size.x,    // dash length
+                           0.0,
+                           0.0);
+            break;
+        }
+        case LINE_STYLE_DOTTED: {
+            float diameter = size.y;
+            float period = diameter * 2.0;
+            float center_line = 0.5 * size.y;
+            vParams = vec4(period,
+                           diameter / 2.0, // radius
+                           center_line,
+                           0.0);
+            break;
+        }
+        case LINE_STYLE_WAVY: {
+            // This logic copied from gecko to get the same results
+            float line_thickness = max(aWavyLineThickness, 1.0);
+            // Difference in height between peaks and troughs
+            // (and since slopes are 45 degrees, the length of each slope)
+            float slope_length = size.y - line_thickness;
+            // Length of flat runs
+            float flat_length = max((line_thickness - 1.0) * 2.0, 1.0);
+
+            vParams = vec4(line_thickness / 2.0,
+                           slope_length,
+                           flat_length,
+                           size.y);
+            break;
+        }
+        default:
+            vParams = vec4(0.0);
+    }
+
+    vLocalPos = aPosition.xy * aLocalSize;
+
+    gl_Position = uTransform * vec4(aTaskRect.xy + aTaskRect.zw * aPosition.xy, 0.0, 1.0);
+}
+#endif
+
+#ifdef WR_FRAGMENT_SHADER
+
+#define MAGIC_WAVY_LINE_AA_SNAP         0.5
+
+void main(void) {
+    // Find the appropriate distance to apply the step over.
+    vec2 local_pos = vLocalPos;
+    float aa_range = compute_aa_range(local_pos);
+    float alpha = 1.0;
+
+    // Select the x/y coord, depending on which axis this edge is.
+    vec2 pos = mix(local_pos.xy, local_pos.yx, vAxisSelect);
+
+    switch (vStyle) {
+        case LINE_STYLE_SOLID: {
+            break;
+        }
+        case LINE_STYLE_DASHED: {
+            // Calculate dash alpha (on/off) based on dash length
+            alpha = step(floor(pos.x + 0.5), vParams.y);
+            break;
+        }
+        case LINE_STYLE_DOTTED: {
+            // Get the dot alpha
+            vec2 dot_relative_pos = pos - vParams.yz;
+            float dot_distance = length(dot_relative_pos) - vParams.y;
+            alpha = distance_aa(aa_range, dot_distance);
+            break;
+        }
+        case LINE_STYLE_WAVY: {
+            float half_line_thickness = vParams.x;
+            float slope_length = vParams.y;
+            float flat_length = vParams.z;
+            float vertical_bounds = vParams.w;
+            // Our pattern is just two slopes and two flats
+            float half_period = slope_length + flat_length;
+
+            float mid_height = vertical_bounds / 2.0;
+            float peak_offset = mid_height - half_line_thickness;
+            // Flip the wave every half period
+            float flip = -2.0 * (step(mod(pos.x, 2.0 * half_period), half_period) - 0.5);
+            // float flip = -1.0;
+            peak_offset *= flip;
+            float peak_height = mid_height + peak_offset;
+
+            // Convert pos to a local position within one half period
+            pos.x = mod(pos.x, half_period);
+
+            // Compute signed distance to the 3 lines that make up an arc
+            float dist1 = distance_to_line(vec2(0.0, peak_height),
+                                           vec2(1.0, -flip),
+                                           pos);
+            float dist2 = distance_to_line(vec2(0.0, peak_height),
+                                           vec2(0, -flip),
+                                           pos);
+            float dist3 = distance_to_line(vec2(flat_length, peak_height),
+                                           vec2(-1.0, -flip),
+                                           pos);
+            float dist = abs(max(max(dist1, dist2), dist3));
+
+            // Apply AA based on the thickness of the wave
+            alpha = distance_aa(aa_range, dist - half_line_thickness);
+
+            // Disable AA for thin lines
+            if (half_line_thickness <= 1.0) {
+                alpha = 1.0 - step(alpha, MAGIC_WAVY_LINE_AA_SNAP);
+            }
+
+            break;
+        }
+        default: break;
+    }
+
+    oFragColor = vec4(alpha);
+}
+#endif
--- a/gfx/webrender/res/ps_text_run.glsl
+++ b/gfx/webrender/res/ps_text_run.glsl
@@ -125,24 +125,22 @@ VertexInfo write_text_vertex(RectWithSiz
     vec2 local_pos = glyph_rect.p0 + glyph_rect.size * aPosition.xy;
 #endif
 
     // Clamp to the local clip rect.
     local_pos = clamp_rect(local_pos, local_clip_rect);
 
     // Map the clamped local space corner into device space.
     vec4 world_pos = transform.m * vec4(local_pos, 0.0, 1.0);
-    vec2 device_pos = world_pos.xy / world_pos.w * task.common_data.device_pixel_scale;
+    vec2 device_pos = world_pos.xy * task.common_data.device_pixel_scale;
 
     // Apply offsets for the render task to get correct screen location.
-    vec2 final_pos = device_pos -
-                     task.content_origin +
-                     task.common_data.task_rect.p0;
+    vec2 final_offset = -task.content_origin + task.common_data.task_rect.p0;
 
-    gl_Position = uTransform * vec4(final_pos, z, 1.0);
+    gl_Position = uTransform * vec4(device_pos + final_offset * world_pos.w, z * world_pos.w, world_pos.w);
 
     VertexInfo vi = VertexInfo(
         local_pos,
         snap_offset,
         world_pos
     );
 
     return vi;
--- a/gfx/webrender/res/snap.glsl
+++ b/gfx/webrender/res/snap.glsl
@@ -23,21 +23,19 @@ vec4 compute_snap_positions(
     // Snap bounds in world coordinates, adjusted for pixel ratio. XY = top left, ZW = bottom right
     vec4 world_snap = device_pixel_scale * vec4(world_snap_p0.xy, world_snap_p1.xy) /
                                            vec4(world_snap_p0.ww, world_snap_p1.ww);
     return world_snap;
 }
 
 vec2 compute_snap_offset_impl(
     vec2 reference_pos,
-    mat4 transform,
-    RectWithSize snap_rect,
     RectWithSize reference_rect,
-    vec4 snap_positions) {
-
+    vec4 snap_positions
+) {
     /// World offsets applied to the corners of the snap rectangle.
     vec4 snap_offsets = floor(snap_positions + 0.5) - snap_positions;
 
     /// Compute the position of this vertex inside the snap rectangle.
     vec2 normalized_snap_pos = (reference_pos - reference_rect.p0) / reference_rect.size;
 
     /// Compute the actual world offset for this vertex needed to make it snap.
     return mix(snap_offsets.xy, snap_offsets.zw, normalized_snap_pos);
@@ -52,18 +50,16 @@ vec2 compute_snap_offset(vec2 local_pos,
     vec4 snap_positions = compute_snap_positions(
         transform,
         snap_rect,
         device_pixel_scale
     );
 
     vec2 snap_offsets = compute_snap_offset_impl(
         local_pos,
-        transform,
-        snap_rect,
         snap_rect,
         snap_positions
     );
 
     return snap_offsets;
 }
 
 #endif //WR_VERTEX_SHADER
--- a/gfx/webrender/src/batch.rs
+++ b/gfx/webrender/src/batch.rs
@@ -1,13 +1,13 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
-use api::{AlphaType, ClipMode, DeviceIntRect, DeviceIntSize};
+use api::{AlphaType, ClipMode, DeviceIntRect, DeviceIntSize, LineStyle};
 use api::{DeviceUintRect, DeviceUintPoint, ExternalImageType, FilterOp, ImageRendering};
 use api::{YuvColorSpace, YuvFormat, WorldPixel, WorldRect, ColorDepth};
 use clip::{ClipDataStore, ClipNodeFlags, ClipNodeRange, ClipItem, ClipStore};
 use clip_scroll_tree::{ClipScrollTree, ROOT_SPATIAL_NODE_INDEX, SpatialNodeIndex};
 use euclid::vec3;
 use glyph_rasterizer::GlyphFormat;
 use gpu_cache::{GpuCache, GpuCacheHandle, GpuCacheAddress};
 use gpu_types::{BrushFlags, BrushInstance, PrimitiveHeaders};
@@ -1408,16 +1408,45 @@ impl BrushPrimitive {
                             cache_item.uv_rect_handle.as_int(gpu_cache),
                             (ShaderColorMode::Image as i32) << 16|
                              RasterizationSpace::Local as i32,
                             0,
                         ],
                     ))
                 }
             }
+            BrushKind::LineDecoration { ref handle, style, .. } => {
+                match style {
+                    LineStyle::Solid => {
+                        Some((
+                            BrushBatchKind::Solid,
+                            BatchTextures::no_texture(),
+                            [0; 3],
+                        ))
+                    }
+                    LineStyle::Dotted |
+                    LineStyle::Dashed |
+                    LineStyle::Wavy => {
+                        let rt_cache_entry = resource_cache
+                            .get_cached_render_task(handle.as_ref().unwrap());
+                        let cache_item = resource_cache.get_texture_cache_item(&rt_cache_entry.handle);
+                        let textures = BatchTextures::color(cache_item.texture_id);
+                        Some((
+                            BrushBatchKind::Image(get_buffer_kind(cache_item.texture_id)),
+                            textures,
+                            [
+                                cache_item.uv_rect_handle.as_int(gpu_cache),
+                                (ShaderColorMode::Image as i32) << 16|
+                                 RasterizationSpace::Local as i32,
+                                0,
+                            ],
+                        ))
+                    }
+                }
+            }
             BrushKind::Border { ref source, .. } => {
                 let cache_item = match *source {
                     BorderSource::Image(request) => {
                         resolve_image(
                             request,
                             resource_cache,
                             gpu_cache,
                             deferred_resolves,
@@ -1563,16 +1592,17 @@ impl Primitive {
                         BlendMode::PremultipliedDestOut
                     }
                     BrushKind::Image { alpha_type, .. } => {
                         match alpha_type {
                             AlphaType::PremultipliedAlpha => BlendMode::PremultipliedAlpha,
                             AlphaType::Alpha => BlendMode::Alpha,
                         }
                     }
+                    BrushKind::LineDecoration { .. } |
                     BrushKind::Solid { .. } |
                     BrushKind::YuvImage { .. } |
                     BrushKind::RadialGradient { .. } |
                     BrushKind::LinearGradient { .. } |
                     BrushKind::Border { .. } |
                     BrushKind::Picture { .. } => {
                         BlendMode::PremultipliedAlpha
                     }
@@ -1706,26 +1736,24 @@ pub fn resolve_image(
 #[cfg_attr(feature = "capture", derive(Serialize))]
 #[cfg_attr(feature = "replay", derive(Deserialize))]
 pub struct ClipBatcher {
     /// Rectangle draws fill up the rectangles with rounded corners.
     pub rectangles: Vec<ClipMaskInstance>,
     /// Image draws apply the image masking.
     pub images: FastHashMap<TextureSource, Vec<ClipMaskInstance>>,
     pub box_shadows: FastHashMap<TextureSource, Vec<ClipMaskInstance>>,
-    pub line_decorations: Vec<ClipMaskInstance>,
 }
 
 impl ClipBatcher {
     pub fn new() -> Self {
         ClipBatcher {
             rectangles: Vec::new(),
             images: FastHashMap::default(),
             box_shadows: FastHashMap::default(),
-            line_decorations: Vec::new(),
         }
     }
 
     pub fn add_clip_region(
         &mut self,
         task_address: RenderTaskAddress,
         clip_data_address: GpuCacheAddress,
     ) {
@@ -1798,22 +1826,16 @@ impl ClipBatcher {
                                 ..instance
                             });
                     } else {
                         warn!("Warnings: skip a image mask");
                         debug!("Key:{:?} Rect::{:?}", mask.image, mask.rect);
                         continue;
                     }
                 }
-                ClipItem::LineDecoration(..) => {
-                    self.line_decorations.push(ClipMaskInstance {
-                        clip_data_address: gpu_address,
-                        ..instance
-                    });
-                }
                 ClipItem::BoxShadow(ref info) => {
                     let rt_handle = info
                         .cache_handle
                         .as_ref()
                         .expect("bug: render task handle not allocated");
                     let rt_cache_entry = resource_cache
                         .get_cached_render_task(rt_handle);
                     let cache_item = resource_cache
--- a/gfx/webrender/src/clip.rs
+++ b/gfx/webrender/src/clip.rs
@@ -1,32 +1,32 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 use api::{BorderRadius, ClipMode, ComplexClipRegion, DeviceIntRect, DevicePixelScale, ImageMask};
 use api::{ImageRendering, LayoutRect, LayoutSize, LayoutPoint, LayoutVector2D};
-use api::{BoxShadowClipMode, LayoutToWorldScale, LineOrientation, LineStyle, PicturePixel, WorldPixel};
+use api::{BoxShadowClipMode, LayoutToWorldScale, PicturePixel, WorldPixel};
 use api::{PictureRect, LayoutPixel, WorldPoint, WorldSize, WorldRect, LayoutToWorldTransform};
 use api::{VoidPtrToSizeFn, LayoutRectAu, ImageKey, AuHelpers};
 use app_units::Au;
 use border::{ensure_no_corner_overlap, BorderRadiusAu};
 use box_shadow::{BLUR_SAMPLE_SCALE, BoxShadowClipSource, BoxShadowCacheKey};
 use clip_scroll_tree::{ClipScrollTree, CoordinateSystemId, ROOT_SPATIAL_NODE_INDEX, SpatialNodeIndex};
 use ellipse::Ellipse;
 use gpu_cache::{GpuCache, GpuCacheHandle, ToGpuBlocks};
 use gpu_types::{BoxShadowStretchMode};
 use intern;
 use internal_types::FastHashSet;
 use prim_store::{ClipData, ImageMaskData, SpaceMapper};
 use render_task::to_cache_size;
 use resource_cache::{ImageRequest, ResourceCache};
 use std::{cmp, u32};
 use std::os::raw::c_void;
-use util::{extract_inner_rect_safe, pack_as_float, project_rect, ScaleOffset};
+use util::{extract_inner_rect_safe, project_rect, ScaleOffset};
 
 /*
 
  Module Overview
 
  There are a number of data structures involved in the clip module:
 
  ClipStore - Main interface used by other modules.
@@ -139,24 +139,16 @@ impl From<ClipItemKey> for ClipNode {
             }
             ClipItemKey::RoundedRectangle(rect, radius, mode) => {
                 ClipItem::RoundedRectangle(
                     LayoutRect::from_au(rect),
                     radius.into(),
                     mode,
                 )
             }
-            ClipItemKey::LineDecoration(rect, style, orientation, wavy_line_thickness) => {
-                ClipItem::LineDecoration(LineDecorationClipSource {
-                    rect: LayoutRect::from_au(rect),
-                    style,
-                    orientation,
-                    wavy_line_thickness: wavy_line_thickness.to_f32_px(),
-                })
-            }
             ClipItemKey::ImageMask(rect, image, repeat) => {
                 ClipItem::Image(ImageMask {
                     image,
                     rect: LayoutRect::from_au(rect),
                     repeat,
                 })
             }
             ClipItemKey::BoxShadow(shadow_rect, shadow_radius, prim_shadow_rect, blur_radius, clip_mode) => {
@@ -293,25 +285,16 @@ impl ClipNode {
                 ClipItem::Rectangle(rect, mode) => {
                     let data = ClipData::uniform(rect, 0.0, mode);
                     data.write(&mut request);
                 }
                 ClipItem::RoundedRectangle(ref rect, ref radius, mode) => {
                     let data = ClipData::rounded_rect(rect, radius, mode);
                     data.write(&mut request);
                 }
-                ClipItem::LineDecoration(ref info) => {
-                    request.push(info.rect);
-                    request.push([
-                        info.wavy_line_thickness,
-                        pack_as_float(info.style as u32),
-                        pack_as_float(info.orientation as u32),
-                        0.0,
-                    ]);
-                }
             }
         }
 
         match self.item {
             ClipItem::Image(ref mask) => {
                 resource_cache.request_image(
                     ImageRequest {
                         key: mask.image,
@@ -348,18 +331,17 @@ impl ClipNode {
                         &info.shadow_radius,
                         ClipMode::Clip,
                     );
 
                     data.write(&mut request);
                 }
             }
             ClipItem::Rectangle(..) |
-            ClipItem::RoundedRectangle(..) |
-            ClipItem::LineDecoration(..) => {}
+            ClipItem::RoundedRectangle(..) => {}
         }
     }
 }
 
 // The main clipping public interface that other modules access.
 pub struct ClipStore {
     pub clip_chain_nodes: Vec<ClipChainNode>,
     clip_node_instances: Vec<ClipNodeInstance>,
@@ -594,18 +576,17 @@ impl ClipStore {
                     // a clip mask. Instead, it can be handled by the primitive
                     // vertex shader as part of the local clip rect. This is an
                     // important optimization for reducing the number of clip
                     // masks that are allocated on common pages.
                     needs_mask |= match node.item {
                         ClipItem::Rectangle(_, ClipMode::ClipOut) |
                         ClipItem::RoundedRectangle(..) |
                         ClipItem::Image(..) |
-                        ClipItem::BoxShadow(..) |
-                        ClipItem::LineDecoration(..) => {
+                        ClipItem::BoxShadow(..) => {
                             true
                         }
 
                         ClipItem::Rectangle(_, ClipMode::Clip) => {
                             !flags.contains(ClipNodeFlags::SAME_COORD_SYSTEM)
                         }
                     };
 
@@ -646,27 +627,16 @@ impl ClipStore {
             size += op(self.clip_chain_nodes.as_ptr() as *const c_void);
             size += op(self.clip_node_instances.as_ptr() as *const c_void);
             size += op(self.clip_node_info.as_ptr() as *const c_void);
         }
         size
     }
 }
 
-#[derive(Debug, Clone)]
-#[cfg_attr(feature = "capture", derive(Serialize))]
-#[cfg_attr(feature = "replay", derive(Deserialize))]
-pub struct LineDecorationClipSource {
-    rect: LayoutRect,
-    style: LineStyle,
-    orientation: LineOrientation,
-    wavy_line_thickness: f32,
-}
-
-
 pub struct ComplexTranslateIter<I> {
     source: I,
     offset: LayoutVector2D,
 }
 
 impl<I: Iterator<Item = ComplexClipRegion>> Iterator for ComplexTranslateIter<I> {
     type Item = ComplexClipRegion;
     fn next(&mut self) -> Option<Self::Item> {
@@ -734,17 +704,16 @@ impl ClipRegion<Option<ComplexClipRegion
 #[derive(Debug, Clone, Eq, PartialEq, Hash)]
 #[cfg_attr(feature = "capture", derive(Serialize))]
 #[cfg_attr(feature = "replay", derive(Deserialize))]
 pub enum ClipItemKey {
     Rectangle(LayoutRectAu, ClipMode),
     RoundedRectangle(LayoutRectAu, BorderRadiusAu, ClipMode),
     ImageMask(LayoutRectAu, ImageKey, bool),
     BoxShadow(LayoutRectAu, BorderRadiusAu, LayoutRectAu, Au, BoxShadowClipMode),
-    LineDecoration(LayoutRectAu, LineStyle, LineOrientation, Au),
 }
 
 impl ClipItemKey {
     pub fn rectangle(rect: LayoutRect, mode: ClipMode) -> Self {
         ClipItemKey::Rectangle(rect.to_au(), mode)
     }
 
     pub fn rounded_rect(rect: LayoutRect, mut radii: BorderRadius, mode: ClipMode) -> Self {
@@ -763,75 +732,41 @@ impl ClipItemKey {
     pub fn image_mask(image_mask: &ImageMask) -> Self {
         ClipItemKey::ImageMask(
             image_mask.rect.to_au(),
             image_mask.image,
             image_mask.repeat,
         )
     }
 
-    pub fn line_decoration(
-        rect: LayoutRect,
-        style: LineStyle,
-        orientation: LineOrientation,
-        wavy_line_thickness: f32,
-    ) -> Self {
-        ClipItemKey::LineDecoration(
-            rect.to_au(),
-            style,
-            orientation,
-            Au::from_f32_px(wavy_line_thickness),
-        )
-    }
-
     pub fn box_shadow(
         shadow_rect: LayoutRect,
         shadow_radius: BorderRadius,
         prim_shadow_rect: LayoutRect,
         blur_radius: f32,
         clip_mode: BoxShadowClipMode,
     ) -> Self {
         ClipItemKey::BoxShadow(
             shadow_rect.to_au(),
             shadow_radius.into(),
             prim_shadow_rect.to_au(),
             Au::from_f32_px(blur_radius),
             clip_mode,
         )
     }
-
-    // Return a modified clip source that is the same as self
-    // but offset in local-space by a specified amount.
-    pub fn offset(&self, offset: &LayoutVector2D) -> Self {
-        let offset = offset.to_au();
-        match *self {
-            ClipItemKey::LineDecoration(rect, style, orientation, wavy_line_thickness) => {
-                ClipItemKey::LineDecoration(
-                    rect.translate(&offset),
-                    style,
-                    orientation,
-                    wavy_line_thickness,
-                )
-            }
-            _ => {
-                panic!("bug: other clip sources not expected here yet");
-            }
-        }
-    }
 }
 
 #[derive(Debug, Clone)]
 #[cfg_attr(feature = "capture", derive(Serialize))]
 #[cfg_attr(feature = "replay", derive(Deserialize))]
 pub enum ClipItem {
     Rectangle(LayoutRect, ClipMode),
     RoundedRectangle(LayoutRect, BorderRadius, ClipMode),
     Image(ImageMask),
     BoxShadow(BoxShadowClipSource),
-    LineDecoration(LineDecorationClipSource),
 }
 
 impl ClipItem {
     pub fn new_box_shadow(
         shadow_rect: LayoutRect,
         mut shadow_radius: BorderRadius,
         prim_shadow_rect: LayoutRect,
         blur_radius: f32,
@@ -936,17 +871,16 @@ impl ClipItem {
         match *self {
             ClipItem::Rectangle(clip_rect, ClipMode::Clip) => Some(clip_rect),
             ClipItem::Rectangle(_, ClipMode::ClipOut) => None,
             ClipItem::RoundedRectangle(clip_rect, _, ClipMode::Clip) => Some(clip_rect),
             ClipItem::RoundedRectangle(_, _, ClipMode::ClipOut) => None,
             ClipItem::Image(ref mask) if mask.repeat => None,
             ClipItem::Image(ref mask) => Some(mask.rect),
             ClipItem::BoxShadow(..) => None,
-            ClipItem::LineDecoration(..) => None,
         }
     }
 
     fn get_clip_result_complex(
         &self,
         transform: &LayoutToWorldTransform,
         prim_world_rect: &WorldRect,
         world_rect: &WorldRect,
@@ -957,18 +891,17 @@ impl ClipItem {
             }
             ClipItem::RoundedRectangle(ref clip_rect, ref radius, ClipMode::Clip) => {
                 let inner_clip_rect = extract_inner_rect_safe(clip_rect, radius);
                 (*clip_rect, inner_clip_rect)
             }
             ClipItem::Rectangle(_, ClipMode::ClipOut) |
             ClipItem::RoundedRectangle(_, _, ClipMode::ClipOut) |
             ClipItem::Image(..) |
-            ClipItem::BoxShadow(..) |
-            ClipItem::LineDecoration(..) => {
+            ClipItem::BoxShadow(..) => {
                 return ClipResult::Partial
             }
         };
 
         let inner_clip_rect = inner_rect.and_then(|ref inner_rect| {
             project_inner_rect(transform, inner_rect)
         });
 
@@ -1082,18 +1015,17 @@ impl ClipItem {
                             ClipResult::Partial
                         }
                         None => {
                             ClipResult::Reject
                         }
                     }
                 }
             }
-            ClipItem::BoxShadow(..) |
-            ClipItem::LineDecoration(..) => {
+            ClipItem::BoxShadow(..) => {
                 ClipResult::Partial
             }
         }
     }
 }
 
 /// Represents a local rect and a device space
 /// rectangles that are either outside or inside bounds.
--- a/gfx/webrender/src/display_list_flattener.rs
+++ b/gfx/webrender/src/display_list_flattener.rs
@@ -274,18 +274,16 @@ impl<'a> DisplayListFlattener<'a> {
             if let Some(pipeline) = self.scene.pipelines.get(&pipeline_id) {
                 if let Some(bg_color) = pipeline.background_color {
                     let root_bounds = LayoutRect::new(LayoutPoint::zero(), *frame_size);
                     let info = LayoutPrimitiveInfo::new(root_bounds);
                     self.add_solid_rectangle(
                         reference_frame_info,
                         &info,
                         bg_color,
-                        None,
-                        Vec::new(),
                     );
                 }
             }
         }
 
         self.prim_count_estimate += pipeline.display_list.prim_count_estimate();
         self.prim_store.primitives.reserve(self.prim_count_estimate);
 
@@ -558,18 +556,16 @@ impl<'a> DisplayListFlattener<'a> {
                     text_info.glyph_options,
                 );
             }
             SpecificDisplayItem::Rectangle(ref info) => {
                 self.add_solid_rectangle(
                     clip_and_scroll,
                     &prim_info,
                     info.color,
-                    None,
-                    Vec::new(),
                 );
             }
             SpecificDisplayItem::ClearRectangle => {
                 self.add_clear_rectangle(
                     clip_and_scroll,
                     &prim_info,
                 );
             }
@@ -913,22 +909,23 @@ impl<'a> DisplayListFlattener<'a> {
                 if cfg!(debug_assertions) && ChasePrimitive::LocalRect(info.rect) == self.config.chase_primitive {
                     println!("Chasing {:?} by local rect", prim_instance.prim_index);
                     self.prim_store.chase_id = Some(prim_instance.prim_index);
                 }
                 self.add_primitive_to_hit_testing_list(info, clip_and_scroll);
                 self.add_primitive_to_draw_list(prim_instance);
             }
         } else {
+            debug_assert!(clip_items.is_empty(), "No per-prim clips expected for shadowed primitives");
+
             // There is an active shadow context. Store as a pending primitive
             // for processing during pop_all_shadows.
             self.pending_shadow_items.push_back(ShadowItem::Primitive(PendingPrimitive {
                 clip_and_scroll,
                 info: *info,
-                clip_items,
                 container,
             }));
         }
     }
 
     pub fn push_stacking_context(
         &mut self,
         pipeline_id: PipelineId,
@@ -1477,32 +1474,20 @@ impl<'a> DisplayListFlattener<'a> {
 
                     for item in &items {
                         if let ShadowItem::Primitive(ref pending_primitive) = item {
                             // Offset the local rect and clip rect by the shadow offset.
                             let mut info = pending_primitive.info.clone();
                             info.rect = info.rect.translate(&pending_shadow.shadow.offset);
                             info.clip_rect = info.clip_rect.translate(&pending_shadow.shadow.offset);
 
-                            // Offset any local clip sources by the shadow offset.
-                            let clip_items: Vec<ClipItemKey> = pending_primitive
-                                .clip_items
-                                .iter()
-                                .map(|cs| cs.offset(&pending_shadow.shadow.offset))
-                                .collect();
-                            let clip_chain_id = self.build_clip_chain(
-                                clip_items,
-                                pending_primitive.clip_and_scroll.spatial_node_index,
-                                pending_primitive.clip_and_scroll.clip_chain_id,
-                            );
-
                             // Construct and add a primitive for the given shadow.
                             let shadow_prim_instance = self.create_primitive(
                                 &info,
-                                clip_chain_id,
+                                pending_primitive.clip_and_scroll.clip_chain_id,
                                 pending_primitive.clip_and_scroll.spatial_node_index,
                                 pending_primitive.container.create_shadow(&pending_shadow.shadow),
                             );
 
                             // Add the new primitive to the shadow picture.
                             prims.push(shadow_prim_instance);
                         }
                     }
@@ -1548,24 +1533,19 @@ impl<'a> DisplayListFlattener<'a> {
                         // picture on to the shadow stack, to avoid infinite recursion!
                         self.add_primitive_to_draw_list(shadow_prim_instance);
                     }
                 }
                 ShadowItem::Primitive(pending_primitive) => {
                     // For a normal primitive, if it has alpha > 0, then we add this
                     // as a normal primitive to the parent picture.
                     if pending_primitive.container.is_visible() {
-                        let clip_chain_id = self.build_clip_chain(
-                            pending_primitive.clip_items,
-                            pending_primitive.clip_and_scroll.spatial_node_index,
-                            pending_primitive.clip_and_scroll.clip_chain_id,
-                        );
                         let prim_instance = self.create_primitive(
                             &pending_primitive.info,
-                            clip_chain_id,
+                            pending_primitive.clip_and_scroll.clip_chain_id,
                             pending_primitive.clip_and_scroll.spatial_node_index,
                             pending_primitive.container,
                         );
                         if cfg!(debug_assertions) && ChasePrimitive::LocalRect(pending_primitive.info.rect) == self.config.chase_primitive {
                             println!("Chasing {:?} by local rect", prim_instance.prim_index);
                             self.prim_store.chase_id = Some(prim_instance.prim_index);
                         }
                         self.add_primitive_to_hit_testing_list(&pending_primitive.info, pending_primitive.clip_and_scroll);
@@ -1579,35 +1559,33 @@ impl<'a> DisplayListFlattener<'a> {
         self.pending_shadow_items = items;
     }
 
     pub fn add_solid_rectangle(
         &mut self,
         clip_and_scroll: ScrollNodeAndClipChain,
         info: &LayoutPrimitiveInfo,
         color: ColorF,
-        segments: Option<BrushSegmentDescriptor>,
-        extra_clips: Vec<ClipItemKey>,
     ) {
         if color.a == 0.0 {
             // Don't add transparent rectangles to the draw list, but do consider them for hit
             // testing. This allows specifying invisible hit testing areas.
             self.add_primitive_to_hit_testing_list(info, clip_and_scroll);
             return;
         }
 
         let prim = BrushPrimitive::new(
             BrushKind::new_solid(color),
-            segments,
+            None,
         );
 
         self.add_primitive(
             clip_and_scroll,
             info,
-            extra_clips,
+            Vec::new(),
             PrimitiveContainer::Brush(prim),
         );
     }
 
     pub fn add_clear_rectangle(
         &mut self,
         clip_and_scroll: ScrollNodeAndClipChain,
         info: &LayoutPrimitiveInfo,
@@ -1626,46 +1604,30 @@ impl<'a> DisplayListFlattener<'a> {
     }
 
     pub fn add_line(
         &mut self,
         clip_and_scroll: ScrollNodeAndClipChain,
         info: &LayoutPrimitiveInfo,
         wavy_line_thickness: f32,
         orientation: LineOrientation,
-        line_color: &ColorF,
+        color: &ColorF,
         style: LineStyle,
     ) {
-        let prim = BrushPrimitive::new(
-            BrushKind::new_solid(*line_color),
-            None,
+        let prim = BrushPrimitive::new_line_decoration(
+            *color,
+            style,
+            orientation,
+            wavy_line_thickness,
         );
 
-        let extra_clips = match style {
-            LineStyle::Solid => {
-                Vec::new()
-            }
-            LineStyle::Wavy |
-            LineStyle::Dotted |
-            LineStyle::Dashed => {
-                vec![
-                    ClipItemKey::line_decoration(
-                        info.rect,
-                        style,
-                        orientation,
-                        wavy_line_thickness,
-                    ),
-                ]
-            }
-        };
-
         self.add_primitive(
             clip_and_scroll,
             info,
-            extra_clips,
+            Vec::new(),
             PrimitiveContainer::Brush(prim),
         );
     }
 
     pub fn add_border(
         &mut self,
         clip_and_scroll: ScrollNodeAndClipChain,
         info: &LayoutPrimitiveInfo,
@@ -2212,17 +2174,16 @@ struct FlattenedStackingContext {
 }
 
 /// A primitive that is added while a shadow context is
 /// active is stored as a pending primitive and only
 /// added to pictures during pop_all_shadows.
 struct PendingPrimitive {
     clip_and_scroll: ScrollNodeAndClipChain,
     info: LayoutPrimitiveInfo,
-    clip_items: Vec<ClipItemKey>,
     container: PrimitiveContainer,
 }
 
 /// As shadows are pushed, they are stored as pending
 /// shadows, and handled at once during pop_all_shadows.
 struct PendingShadow {
     shadow: Shadow,
     clip_and_scroll: ScrollNodeAndClipChain,
--- a/gfx/webrender/src/hit_test.rs
+++ b/gfx/webrender/src/hit_test.rs
@@ -35,17 +35,16 @@ pub struct HitTestClipNode {
 
 impl HitTestClipNode {
     fn new(node: &ClipNode) -> Self {
         let region = match node.item {
             ClipItem::Rectangle(ref rect, mode) => HitTestRegion::Rectangle(*rect, mode),
             ClipItem::RoundedRectangle(ref rect, ref radii, ref mode) =>
                 HitTestRegion::RoundedRectangle(*rect, *radii, *mode),
             ClipItem::Image(ref mask) => HitTestRegion::Rectangle(mask.rect, ClipMode::Clip),
-            ClipItem::LineDecoration(_) |
             ClipItem::BoxShadow(_) => HitTestRegion::Invalid,
         };
 
         HitTestClipNode {
             region,
         }
     }
 }
--- a/gfx/webrender/src/image.rs
+++ b/gfx/webrender/src/image.rs
@@ -232,22 +232,22 @@ pub fn compute_tile_range(
 ) -> TileRange {
     // Tile dimensions in normalized coordinates.
     let tw = 1. / (tile_size as f32);
     let th = 1. / (tile_size as f32);
 
     let t0 = point2(
         f32::floor(visible_area.origin.x as f32 * tw),
         f32::floor(visible_area.origin.y as f32 * th),
-    ).cast::<u16>();
+    ).try_cast::<u16>().unwrap_or_else(|| panic!("compute_tile_range bad values {:?} {:?}", visible_area, tile_size));
 
     let t1 = point2(
         f32::ceil(visible_area.max_x() as f32 * tw),
         f32::ceil(visible_area.max_y() as f32 * th),
-    ).cast::<u16>();
+    ).try_cast::<u16>().unwrap_or_else(|| panic!("compute_tile_range bad values {:?} {:?}", visible_area, tile_size));
 
     TileRange {
         origin: t0,
         size: (t1 - t0).to_size(),
     }
 }
 
 pub fn for_each_tile_in_range(
--- a/gfx/webrender/src/prim_store.rs
+++ b/gfx/webrender/src/prim_store.rs
@@ -3,22 +3,22 @@
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 use api::{AlphaType, BorderRadius, BuiltDisplayList, ClipMode, ColorF, PictureRect};
 use api::{DeviceIntRect, DeviceIntSize, DevicePixelScale, ExtendMode, DeviceRect, PictureToRasterTransform};
 use api::{FilterOp, GlyphInstance, GradientStop, ImageKey, ImageRendering, ItemRange, TileOffset};
 use api::{RasterSpace, LayoutPoint, LayoutRect, LayoutSideOffsets, LayoutSize, LayoutToWorldTransform};
 use api::{LayoutVector2D, PremultipliedColorF, PropertyBinding, Shadow, YuvColorSpace, YuvFormat};
 use api::{DeviceIntSideOffsets, WorldPixel, BoxShadowClipMode, LayoutToWorldScale, NormalBorder, WorldRect};
-use api::{PicturePixel, RasterPixel, ColorDepth};
+use api::{PicturePixel, RasterPixel, ColorDepth, LineStyle, LineOrientation, LayoutSizeAu, AuHelpers};
 use app_units::Au;
 use border::{BorderCacheKey, BorderRenderTaskInfo};
 use clip_scroll_tree::{ClipScrollTree, CoordinateSystemId, SpatialNodeIndex};
 use clip::{ClipNodeFlags, ClipChainId, ClipChainInstance, ClipItem, ClipNodeCollector};
-use euclid::{TypedTransform3D, TypedRect};
+use euclid::{TypedTransform3D, TypedRect, TypedScale};
 use frame_builder::{FrameBuildingContext, FrameBuildingState, PictureContext, PictureState};
 use frame_builder::PrimitiveContext;
 use glyph_rasterizer::{FontInstance, FontTransform, GlyphKey, FONT_SIZE_LIMIT};
 use gpu_cache::{GpuBlockData, GpuCache, GpuCacheAddress, GpuCacheHandle, GpuDataRequest,
                 ToGpuBlocks};
 use gpu_types::BrushFlags;
 use image::{for_each_tile, for_each_repetition};
 use intern;
@@ -402,16 +402,23 @@ pub enum BrushKind {
         reverse_stops: bool,
         start_point: LayoutPoint,
         end_point: LayoutPoint,
         stretch_size: LayoutSize,
         tile_spacing: LayoutSize,
         visible_tiles: Vec<VisibleGradientTile>,
         stops_opacity: PrimitiveOpacity,
     },
+    LineDecoration {
+        color: ColorF,
+        style: LineStyle,
+        orientation: LineOrientation,
+        wavy_line_thickness: f32,
+        handle: Option<RenderTaskCacheEntryHandle>,
+    },
     Border {
         source: BorderSource,
     },
 }
 
 impl BrushKind {
     fn supports_segments(&self, resource_cache: &ResourceCache) -> bool {
         match *self {
@@ -429,16 +436,18 @@ impl BrushKind {
             BrushKind::Border { .. } |
             BrushKind::LinearGradient { .. } => true,
 
             // TODO(gw): Allow batch.rs to add segment instances
             //           for Picture primitives.
             BrushKind::Picture { .. } => false,
 
             BrushKind::Clear => false,
+
+            BrushKind::LineDecoration { .. } => false,
         }
     }
 
     // Construct a brush that is a solid color rectangle.
     pub fn new_solid(color: ColorF) -> BrushKind {
         BrushKind::Solid {
             color,
             opacity_binding: OpacityBinding::new(),
@@ -633,16 +642,34 @@ impl BrushPrimitive {
 
     pub fn new_picture(prim: PicturePrimitive) -> Self {
         BrushPrimitive {
             kind: BrushKind::Picture(prim),
             segment_desc: None,
         }
     }
 
+    pub fn new_line_decoration(
+        color: ColorF,
+        style: LineStyle,
+        orientation: LineOrientation,
+        wavy_line_thickness: f32,
+    ) -> Self {
+        BrushPrimitive::new(
+            BrushKind::LineDecoration {
+                color,
+                style,
+                orientation,
+                wavy_line_thickness,
+                handle: None,
+            },
+            None,
+        )
+    }
+
     fn write_gpu_blocks(
         &self,
         request: &mut GpuDataRequest,
         local_rect: LayoutRect,
     ) {
         // has to match VECS_PER_SPECIFIC_BRUSH
         match self.kind {
             BrushKind::Border { .. } => {
@@ -683,16 +710,48 @@ impl BrushPrimitive {
                 request.push(PremultipliedColorF::WHITE);
                 request.push([
                     stretch_size.width + tile_spacing.width,
                     stretch_size.height + tile_spacing.height,
                     0.0,
                     0.0,
                 ]);
             }
+            BrushKind::LineDecoration { style, ref color, orientation, wavy_line_thickness, .. } => {
+                // Work out the stretch parameters (for image repeat) based on the
+                // line decoration parameters.
+
+                let size = get_line_decoration_sizes(
+                    &local_rect.size,
+                    orientation,
+                    style,
+                    wavy_line_thickness,
+                );
+
+                match size {
+                    Some((inline_size, _)) => {
+                        let (sx, sy) = match orientation {
+                            LineOrientation::Horizontal => (inline_size, local_rect.size.height),
+                            LineOrientation::Vertical => (local_rect.size.width, inline_size),
+                        };
+
+                        request.push(color.premultiplied());
+                        request.push(PremultipliedColorF::WHITE);
+                        request.push([
+                            sx,
+                            sy,
+                            0.0,
+                            0.0,
+                        ]);
+                    }
+                    None => {
+                        request.push(color.premultiplied());
+                    }
+                }
+            }
             // Solid rects also support opacity collapsing.
             BrushKind::Solid { color, ref opacity_binding, .. } => {
                 request.push(color.scale_alpha(opacity_binding.current).premultiplied());
             }
             BrushKind::Clear => {
                 // Opaque black with operator dest out
                 request.push(PremultipliedColorF::BLACK);
             }
@@ -733,16 +792,26 @@ impl BrushPrimitive {
 #[derive(Debug, Copy, Clone, Eq, Hash, PartialEq)]
 #[cfg_attr(feature = "capture", derive(Serialize))]
 #[cfg_attr(feature = "replay", derive(Deserialize))]
 pub struct ImageCacheKey {
     pub request: ImageRequest,
     pub texel_rect: Option<DeviceIntRect>,
 }
 
+#[derive(Clone, Debug, Hash, PartialEq, Eq)]
+#[cfg_attr(feature = "capture", derive(Serialize))]
+#[cfg_attr(feature = "replay", derive(Deserialize))]
+pub struct LineDecorationCacheKey {
+    style: LineStyle,
+    orientation: LineOrientation,
+    wavy_line_thickness: Au,
+    size: LayoutSizeAu,
+}
+
 // Where to find the texture data for an image primitive.
 #[derive(Debug)]
 pub enum ImageSource {
     // A normal image - just reference the texture cache.
     Default,
     // An image that is pre-rendered into the texture cache
     // via a render task.
     Cache {
@@ -1318,16 +1387,19 @@ impl PrimitiveContainer {
             PrimitiveContainer::TextRun(ref info) => {
                 info.specified_font.color.a > 0
             }
             PrimitiveContainer::Brush(ref brush) => {
                 match brush.kind {
                     BrushKind::Solid { ref color, .. } => {
                         color.a > 0.0
                     }
+                    BrushKind::LineDecoration { ref color, .. } => {
+                        color.a > 0.0
+                    }
                     BrushKind::Clear |
                     BrushKind::Picture { .. } |
                     BrushKind::Image { .. } |
                     BrushKind::YuvImage { .. } |
                     BrushKind::RadialGradient { .. } |
                     BrushKind::Border { .. } |
                     BrushKind::LinearGradient { .. } => {
                         true
@@ -1380,16 +1452,24 @@ impl PrimitiveContainer {
 
                             }
                         };
                         PrimitiveContainer::Brush(BrushPrimitive::new(
                             source,
                             None,
                         ))
                     }
+                    BrushKind::LineDecoration { style, orientation, wavy_line_thickness, .. } => {
+                        PrimitiveContainer::Brush(BrushPrimitive::new_line_decoration(
+                            shadow.color,
+                            style,
+                            orientation,
+                            wavy_line_thickness,
+                        ))
+                    }
                     BrushKind::Image { request, stretch_size, .. } => {
                         PrimitiveContainer::Brush(BrushPrimitive::new(
                             BrushKind::new_image(request.clone(),
                                                  stretch_size.clone(),
                                                  shadow.color),
                             None,
                         ))
                     }
@@ -1604,16 +1684,17 @@ impl PrimitiveStore {
                     // as the primitive to collapse the opacity into.
                     BrushKind::Solid { .. } | BrushKind::Image { .. } => {
                         return Some(prim_instance.prim_index)
                     }
                     BrushKind::Border { .. } |
                     BrushKind::YuvImage { .. } |
                     BrushKind::LinearGradient { .. } |
                     BrushKind::RadialGradient { .. } |
+                    BrushKind::LineDecoration { .. } |
                     BrushKind::Clear => {}
                 }
             }
             PrimitiveDetails::TextRun(..) => {}
         }
 
         None
     }
@@ -1650,16 +1731,17 @@ impl PrimitiveStore {
                             BrushKind::Image { ref mut opacity_binding, .. } => {
                                 opacity_binding.push(binding);
                             }
                             BrushKind::Clear { .. } |
                             BrushKind::Picture { .. } |
                             BrushKind::YuvImage { .. } |
                             BrushKind::Border { .. } |
                             BrushKind::LinearGradient { .. } |
+                            BrushKind::LineDecoration { .. } |
                             BrushKind::RadialGradient { .. } => {
                                 unreachable!("bug: invalid prim type for opacity collapse");
                             }
                         }
                     }
                     PrimitiveDetails::TextRun(..) => {
                         unreachable!("bug: invalid prim type for opacity collapse");
                     }
@@ -2217,17 +2299,17 @@ fn write_brush_segment_description(
                         -0.5 * info.shadow_rect_alloc_size.width,
                         -0.5 * info.shadow_rect_alloc_size.height,
                     ),
                     inner_clip_mode,
                 );
 
                 continue;
             }
-            ClipItem::LineDecoration(..) | ClipItem::Image(..) => {
+            ClipItem::Image(..) => {
                 rect_clips_only = false;
                 continue;
             }
         };
 
         segment_builder.push_clip_rect(local_clip_rect, radius, mode);
     }
 
@@ -2638,16 +2720,95 @@ impl Primitive {
                             } else if request_source_image {
                                 frame_state.resource_cache.request_image(
                                     request,
                                     frame_state.gpu_cache,
                                 );
                             }
                         }
                     }
+                    BrushKind::LineDecoration { ref mut handle, style, orientation, wavy_line_thickness, .. } => {
+                        // Work out the device pixel size to be used to cache this line decoration.
+
+                        let size = get_line_decoration_sizes(
+                            &metadata.local_rect.size,
+                            orientation,
+                            style,
+                            wavy_line_thickness,
+                        );
+
+                        if let Some((inline_size, block_size)) = size {
+                            let size = match orientation {
+                                LineOrientation::Horizontal => LayoutSize::new(inline_size, block_size),
+                                LineOrientation::Vertical => LayoutSize::new(block_size, inline_size),
+                            };
+
+                            // If dotted, adjust the clip rect to ensure we don't draw a final
+                            // partial dot.
+                            if style == LineStyle::Dotted {
+                                let clip_size = match orientation {
+                                    LineOrientation::Horizontal => {
+                                        LayoutSize::new(
+                                            inline_size * (metadata.local_rect.size.width / inline_size).floor(),
+                                            metadata.local_rect.size.height,
+                                        )
+                                    }
+                                    LineOrientation::Vertical => {
+                                        LayoutSize::new(
+                                            metadata.local_rect.size.width,
+                                            inline_size * (metadata.local_rect.size.height / inline_size).floor(),
+                                        )
+                                    }
+                                };
+                                let clip_rect = LayoutRect::new(
+                                    metadata.local_rect.origin,
+                                    clip_size,
+                                );
+                                prim_instance.combined_local_clip_rect = clip_rect
+                                    .intersection(&prim_instance.combined_local_clip_rect)
+                                    .unwrap_or(LayoutRect::zero());
+                            }
+
+                            // TODO(gw): Do we ever need / want to support scales for text decorations
+                            //           based on the current transform?
+                            let scale_factor = TypedScale::new(1.0) * frame_context.device_pixel_scale;
+                            let task_size = (size * scale_factor).ceil().to_i32();
+
+                            let cache_key = LineDecorationCacheKey {
+                                style,
+                                orientation,
+                                wavy_line_thickness: Au::from_f32_px(wavy_line_thickness),
+                                size: size.to_au(),
+                            };
+
+                            // Request a pre-rendered image task.
+                            *handle = Some(frame_state.resource_cache.request_render_task(
+                                RenderTaskCacheKey {
+                                    size: task_size,
+                                    kind: RenderTaskCacheKeyKind::LineDecoration(cache_key),
+                                },
+                                frame_state.gpu_cache,
+                                frame_state.render_tasks,
+                                None,
+                                false,
+                                |render_tasks| {
+                                    let task = RenderTask::new_line_decoration(
+                                        task_size,
+                                        style,
+                                        orientation,
+                                        wavy_line_thickness,
+                                        size,
+                                    );
+                                    let task_id = render_tasks.add(task);
+                                    pic_state.tasks.push(task_id);
+                                    task_id
+                                }
+                            ));
+                        }
+                    }
                     BrushKind::YuvImage { format, yuv_key, image_rendering, .. } => {
                         prim_instance.opacity = PrimitiveOpacity::opaque();
 
                         let channel_num = format.get_plane_num();
                         debug_assert!(channel_num <= 3);
                         for channel in 0 .. channel_num {
                             frame_state.resource_cache.request_image(
                                 ImageRequest {
@@ -3048,8 +3209,53 @@ pub fn get_raster_rects(
         clipped_raster_rect,
         device_pixel_scale,
     );
 
     let transform = map_to_raster.get_transform();
 
     Some((clipped.to_i32(), unclipped, transform))
 }
+
+/// Get the inline (horizontal) and block (vertical) sizes
+/// for a given line decoration.
+fn get_line_decoration_sizes(
+    rect_size: &LayoutSize,
+    orientation: LineOrientation,
+    style: LineStyle,
+    wavy_line_thickness: f32,
+) -> Option<(f32, f32)> {
+    let h = match orientation {
+        LineOrientation::Horizontal => rect_size.height,
+        LineOrientation::Vertical => rect_size.width,
+    };
+
+    // TODO(gw): The formulae below are based on the existing gecko and line
+    //           shader code. They give reasonable results for most inputs,
+    //           but could definitely do with a detailed pass to get better
+    //           quality on a wider range of inputs!
+    //           See nsCSSRendering::PaintDecorationLine in Gecko.
+
+    match style {
+        LineStyle::Solid => {
+            None
+        }
+        LineStyle::Dashed => {
+            let dash_length = (3.0 * h).min(64.0).max(1.0);
+
+            Some((2.0 * dash_length, 4.0))
+        }
+        LineStyle::Dotted => {
+            let diameter = h.min(64.0).max(1.0);
+            let period = 2.0 * diameter;
+
+            Some((period, diameter))
+        }
+        LineStyle::Wavy => {
+            let line_thickness = wavy_line_thickness.max(1.0);
+            let slope_length = h - line_thickness;
+            let flat_length = ((line_thickness - 1.0) * 2.0).max(1.0);
+            let approx_period = 2.0 * (slope_length + flat_length);
+
+            Some((approx_period, h))
+        }
+    }
+}
--- a/gfx/webrender/src/render_task.rs
+++ b/gfx/webrender/src/render_task.rs
@@ -1,14 +1,15 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 use api::{DeviceIntPoint, DeviceIntRect, DeviceIntSize, DeviceSize, DeviceIntSideOffsets};
 use api::{DevicePixelScale, ImageDescriptor, ImageFormat};
+use api::{LineStyle, LineOrientation, LayoutSize};
 #[cfg(feature = "pathfinder")]
 use api::FontRenderMode;
 use border::BorderCacheKey;
 use box_shadow::{BoxShadowCacheKey};
 use clip::{ClipDataStore, ClipItem, ClipStore, ClipNodeRange};
 use clip_scroll_tree::SpatialNodeIndex;
 use device::TextureFilter;
 #[cfg(feature = "pathfinder")]
@@ -16,17 +17,17 @@ use euclid::{TypedPoint2D, TypedVector2D
 use freelist::{FreeList, FreeListHandle, WeakFreeListHandle};
 use glyph_rasterizer::GpuGlyphCacheKey;
 use gpu_cache::{GpuCache, GpuCacheAddress, GpuCacheHandle};
 use gpu_types::{BorderInstance, ImageSource, UvRectKind};
 use internal_types::{CacheTextureId, FastHashMap, SavedTargetIndex};
 #[cfg(feature = "pathfinder")]
 use pathfinder_partitioner::mesh::Mesh;
 use picture::PictureCacheKey;
-use prim_store::{PrimitiveIndex, ImageCacheKey};
+use prim_store::{PrimitiveIndex, ImageCacheKey, LineDecorationCacheKey};
 #[cfg(feature = "debugger")]
 use print_tree::{PrintTreePrinter};
 use render_backend::FrameId;
 use resource_cache::{CacheItem, ResourceCache};
 use std::{cmp, ops, usize, f32, i32};
 use texture_cache::{TextureCache, TextureCacheHandle, Eviction};
 use tiling::{RenderPass, RenderTargetIndex};
 use tiling::{RenderTargetKind};
@@ -112,16 +113,22 @@ impl RenderTaskTree {
                 debug_assert!(pass_index == passes.len() - 1);
             }
             RenderTaskLocation::Dynamic(..) |
             RenderTaskLocation::TextureCache(..) => {
                 debug_assert!(pass_index < passes.len() - 1);
             }
         }
 
+        let pass_index = if task.is_global_cached_task() {
+            0
+        } else {
+            pass_index
+        };
+
         let pass = &mut passes[pass_index];
         pass.add_render_task(id, task.get_dynamic_size(), task.target_kind());
     }
 
     pub fn prepare_for_render(&mut self) {
         for task in &mut self.tasks {
             task.prepare_for_render();
         }
@@ -288,16 +295,26 @@ pub struct BorderTask {
 pub struct BlitTask {
     pub source: BlitSource,
     pub padding: DeviceIntSideOffsets,
 }
 
 #[derive(Debug)]
 #[cfg_attr(feature = "capture", derive(Serialize))]
 #[cfg_attr(feature = "replay", derive(Deserialize))]
+pub struct LineDecorationTask {
+    pub wavy_line_thickness: f32,
+    pub style: LineStyle,
+    pub orientation: LineOrientation,
+    pub local_size: LayoutSize,
+}
+
+#[derive(Debug)]
+#[cfg_attr(feature = "capture", derive(Serialize))]
+#[cfg_attr(feature = "replay", derive(Deserialize))]
 pub struct RenderTaskData {
     pub data: [f32; FLOATS_PER_RENDER_TASK_INFO],
 }
 
 #[derive(Debug)]
 #[cfg_attr(feature = "capture", derive(Serialize))]
 #[cfg_attr(feature = "replay", derive(Deserialize))]
 pub enum RenderTaskKind {
@@ -307,16 +324,17 @@ pub enum RenderTaskKind {
     VerticalBlur(BlurTask),
     HorizontalBlur(BlurTask),
     #[allow(dead_code)]
     Glyph(GlyphTask),
     Readback(DeviceIntRect),
     Scaling(ScalingTask),
     Blit(BlitTask),
     Border(BorderTask),
+    LineDecoration(LineDecorationTask),
 }
 
 #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
 #[cfg_attr(feature = "capture", derive(Serialize))]
 #[cfg_attr(feature = "replay", derive(Deserialize))]
 pub enum ClearMode {
     // Applicable to color and alpha targets.
     Zero,
@@ -433,16 +451,36 @@ impl RenderTask {
             RenderTaskKind::Blit(BlitTask {
                 source,
                 padding: *padding,
             }),
             ClearMode::Transparent,
         )
     }
 
+    pub fn new_line_decoration(
+        size: DeviceIntSize,
+        style: LineStyle,
+        orientation: LineOrientation,
+        wavy_line_thickness: f32,
+        local_size: LayoutSize,
+    ) -> Self {
+        RenderTask::with_dynamic_location(
+            size,
+            Vec::new(),
+            RenderTaskKind::LineDecoration(LineDecorationTask {
+                style,
+                orientation,
+                wavy_line_thickness,
+                local_size,
+            }),
+            ClearMode::Transparent,
+        )
+    }
+
     pub fn new_mask(
         outer_rect: DeviceIntRect,
         clip_node_range: ClipNodeRange,
         root_spatial_node_index: SpatialNodeIndex,
         clip_store: &mut ClipStore,
         gpu_cache: &mut GpuCache,
         resource_cache: &mut ResourceCache,
         render_tasks: &mut RenderTaskTree,
@@ -504,18 +542,17 @@ impl RenderTask {
                             children.push(root_task_id);
 
                             root_task_id
                         }
                     ));
                 }
                 ClipItem::Rectangle(..) |
                 ClipItem::RoundedRectangle(..) |
-                ClipItem::Image(..) |
-                ClipItem::LineDecoration(..) => {}
+                ClipItem::Image(..) => {}
             }
         }
 
         RenderTask::with_dynamic_location(
             outer_rect.size,
             children,
             RenderTaskKind::CacheMask(CacheMaskTask {
                 actual_rect: outer_rect,
@@ -696,16 +733,17 @@ impl RenderTask {
 
             RenderTaskKind::Scaling(ref task) => {
                 task.uv_rect_kind
             }
 
             RenderTaskKind::ClipRegion(..) |
             RenderTaskKind::Glyph(_) |
             RenderTaskKind::Border(..) |
+            RenderTaskKind::LineDecoration(..) |
             RenderTaskKind::Blit(..) => {
                 UvRectKind::Rect
             }
         }
     }
 
     // Write (up to) 8 floats of data specific to the type
     // of render task that is provided to the GPU shaders
@@ -742,16 +780,17 @@ impl RenderTask {
             }
             RenderTaskKind::Glyph(_) => {
                 [1.0, 0.0]
             }
             RenderTaskKind::ClipRegion(..) |
             RenderTaskKind::Readback(..) |
             RenderTaskKind::Scaling(..) |
             RenderTaskKind::Border(..) |
+            RenderTaskKind::LineDecoration(..) |
             RenderTaskKind::Blit(..) => {
                 [0.0; 2]
             }
         };
 
         let (mut target_rect, target_index) = self.get_target_rect();
         // The primitives inside a fixed-location render task
         // are already placed to their corresponding positions,
@@ -784,16 +823,17 @@ impl RenderTask {
                 gpu_cache.get_address(&info.uv_rect_handle)
             }
             RenderTaskKind::ClipRegion(..) |
             RenderTaskKind::Readback(..) |
             RenderTaskKind::Scaling(..) |
             RenderTaskKind::Blit(..) |
             RenderTaskKind::Border(..) |
             RenderTaskKind::CacheMask(..) |
+            RenderTaskKind::LineDecoration(..) |
             RenderTaskKind::Glyph(..) => {
                 panic!("texture handle not supported for this task kind");
             }
         }
     }
 
     pub fn get_dynamic_size(&self) -> DeviceIntSize {
         match self.location {
@@ -833,16 +873,18 @@ impl RenderTask {
             }
         }
     }
 
     pub fn target_kind(&self) -> RenderTargetKind {
         match self.kind {
             RenderTaskKind::Readback(..) => RenderTargetKind::Color,
 
+            RenderTaskKind::LineDecoration(..) => RenderTargetKind::Color,
+
             RenderTaskKind::ClipRegion(..) |
             RenderTaskKind::CacheMask(..) => {
                 RenderTargetKind::Alpha
             }
 
             RenderTaskKind::VerticalBlur(ref task_info) |
             RenderTaskKind::HorizontalBlur(ref task_info) => {
                 task_info.target_kind
@@ -862,16 +904,40 @@ impl RenderTask {
             }
 
             RenderTaskKind::Blit(..) => {
                 RenderTargetKind::Color
             }
         }
     }
 
+    /// If true, draw this task in the first pass. This is useful
+    /// for simple texture cached render tasks that we want to be made
+    /// available to all subsequent render passes.
+    pub fn is_global_cached_task(&self) -> bool {
+        match self.kind {
+            RenderTaskKind::LineDecoration(..) => {
+                true
+            }
+
+            RenderTaskKind::Readback(..) |
+            RenderTaskKind::ClipRegion(..) |
+            RenderTaskKind::CacheMask(..) |
+            RenderTaskKind::VerticalBlur(..) |
+            RenderTaskKind::HorizontalBlur(..) |
+            RenderTaskKind::Glyph(..) |
+            RenderTaskKind::Scaling(..) |
+            RenderTaskKind::Border(..) |
+            RenderTaskKind::Picture(..) |
+            RenderTaskKind::Blit(..) => {
+                false
+            }
+        }
+    }
+
     // Optionally, prepare the render task for drawing. This is executed
     // after all resource cache items (textures and glyphs) have been
     // resolved and can be queried. It also allows certain render tasks
     // to defer calculating an exact size until now, if desired.
     pub fn prepare_for_render(&mut self) {
     }
 
     pub fn write_gpu_blocks(
@@ -889,16 +955,17 @@ impl RenderTask {
                 (&mut info.uv_rect_handle, info.uv_rect_kind)
             }
             RenderTaskKind::Readback(..) |
             RenderTaskKind::Scaling(..) |
             RenderTaskKind::Blit(..) |
             RenderTaskKind::ClipRegion(..) |
             RenderTaskKind::Border(..) |
             RenderTaskKind::CacheMask(..) |
+            RenderTaskKind::LineDecoration(..) |
             RenderTaskKind::Glyph(..) => {
                 return;
             }
         };
 
         if let Some(mut request) = gpu_cache.request(cache_handle) {
             let p0 = target_rect.origin.to_f32();
             let p1 = target_rect.bottom_right().to_f32();
@@ -919,16 +986,19 @@ impl RenderTask {
         match self.kind {
             RenderTaskKind::Picture(ref task) => {
                 pt.new_level(format!("Picture of {:?}", task.prim_index));
             }
             RenderTaskKind::CacheMask(ref task) => {
                 pt.new_level(format!("CacheMask with {} clips", task.clip_node_range.count));
                 pt.add_item(format!("rect: {:?}", task.actual_rect));
             }
+            RenderTaskKind::LineDecoration(..) => {
+                pt.new_level("LineDecoration".to_owned());
+            }
             RenderTaskKind::ClipRegion(..) => {
                 pt.new_level("ClipRegion".to_owned());
             }
             RenderTaskKind::VerticalBlur(ref task) => {
                 pt.new_level("VerticalBlur".to_owned());
                 task.print_with(pt);
             }
             RenderTaskKind::HorizontalBlur(ref task) => {
@@ -986,16 +1056,17 @@ impl RenderTask {
 #[cfg_attr(feature = "replay", derive(Deserialize))]
 pub enum RenderTaskCacheKeyKind {
     BoxShadow(BoxShadowCacheKey),
     Image(ImageCacheKey),
     #[allow(dead_code)]
     Glyph(GpuGlyphCacheKey),
     Picture(PictureCacheKey),
     Border(BorderCacheKey),
+    LineDecoration(LineDecorationCacheKey),
 }
 
 #[derive(Clone, Debug, Hash, PartialEq, Eq)]
 #[cfg_attr(feature = "capture", derive(Serialize))]
 #[cfg_attr(feature = "replay", derive(Deserialize))]
 pub struct RenderTaskCacheKey {
     pub size: DeviceIntSize,
     pub kind: RenderTaskCacheKeyKind,
--- a/gfx/webrender/src/renderer.rs
+++ b/gfx/webrender/src/renderer.rs
@@ -151,16 +151,20 @@ const GPU_TAG_BRUSH_SOLID: GpuProfileTag
 const GPU_TAG_CACHE_CLIP: GpuProfileTag = GpuProfileTag {
     label: "C_Clip",
     color: debug_colors::PURPLE,
 };
 const GPU_TAG_CACHE_BORDER: GpuProfileTag = GpuProfileTag {
     label: "C_Border",
     color: debug_colors::CORNSILK,
 };
+const GPU_TAG_CACHE_LINE_DECORATION: GpuProfileTag = GpuProfileTag {
+    label: "C_LineDecoration",
+    color: debug_colors::YELLOWGREEN,
+};
 const GPU_TAG_SETUP_TARGET: GpuProfileTag = GpuProfileTag {
     label: "target init",
     color: debug_colors::SLATEGREY,
 };
 const GPU_TAG_SETUP_DATA: GpuProfileTag = GpuProfileTag {
     label: "data init",
     color: debug_colors::LIGHTGREY,
 };
@@ -387,16 +391,53 @@ pub(crate) mod desc {
             VertexAttribute {
                 name: "aBlurDirection",
                 count: 1,
                 kind: VertexAttributeKind::I32,
             },
         ],
     };
 
+    pub const LINE: VertexDescriptor = VertexDescriptor {
+        vertex_attributes: &[
+            VertexAttribute {
+                name: "aPosition",
+                count: 2,
+                kind: VertexAttributeKind::F32,
+            },
+        ],
+        instance_attributes: &[
+            VertexAttribute {
+                name: "aTaskRect",
+                count: 4,
+                kind: VertexAttributeKind::F32,
+            },
+            VertexAttribute {
+                name: "aLocalSize",
+                count: 2,
+                kind: VertexAttributeKind::F32,
+            },
+            VertexAttribute {
+                name: "aWavyLineThickness",
+                count: 1,
+                kind: VertexAttributeKind::F32,
+            },
+            VertexAttribute {
+                name: "aStyle",
+                count: 1,
+                kind: VertexAttributeKind::I32,
+            },
+            VertexAttribute {
+                name: "aOrientation",
+                count: 1,
+                kind: VertexAttributeKind::I32,
+            },
+        ],
+    };
+
     pub const BORDER: VertexDescriptor = VertexDescriptor {
         vertex_attributes: &[
             VertexAttribute {
                 name: "aPosition",
                 count: 2,
                 kind: VertexAttributeKind::F32,
             },
         ],
@@ -613,16 +654,17 @@ pub(crate) mod desc {
 pub(crate) enum VertexArrayKind {
     Primitive,
     Blur,
     Clip,
     VectorStencil,
     VectorCover,
     Border,
     Scale,
+    LineDecoration,
 }
 
 #[derive(Clone, Debug, PartialEq)]
 pub enum GraphicsApi {
     OpenGL,
 }
 
 #[derive(Clone, Debug)]
@@ -1319,24 +1361,28 @@ impl VertexDataTexture {
     }
 
     /// Returns an estimate of the GPU memory consumed by this VertexDataTexture.
     fn size_in_bytes(&self) -> usize {
         self.texture.as_ref().map_or(0, |t| t.size_in_bytes())
     }
 
     fn update<T>(&mut self, device: &mut Device, data: &mut Vec<T>) {
-        if data.is_empty() {
-            return;
-        }
-
         debug_assert!(mem::size_of::<T>() % 16 == 0);
         let texels_per_item = mem::size_of::<T>() / 16;
         let items_per_row = MAX_VERTEX_TEXTURE_WIDTH / texels_per_item;
 
+        // Ensure we always end up with a texture when leaving this method.
+        if data.is_empty() {
+            if self.texture.is_some() {
+                return;
+            }
+            data.push(unsafe { mem::uninitialized() });
+        }
+
         // Extend the data array to be a multiple of the row size.
         // This ensures memory safety when the array is passed to
         // OpenGL to upload to the GPU.
         if items_per_row != 0 {
             while data.len() % items_per_row != 0 {
                 data.push(unsafe { mem::uninitialized() });
             }
         }
@@ -1442,16 +1488,17 @@ impl LazyInitializedDebugRenderer {
 
 // NB: If you add more VAOs here, be sure to deinitialize them in
 // `Renderer::deinit()` below.
 pub struct RendererVAOs {
     prim_vao: VAO,
     blur_vao: VAO,
     clip_vao: VAO,
     border_vao: VAO,
+    line_vao: VAO,
     scale_vao: VAO,
 }
 
 /// The renderer is responsible for submitting to the GPU the work prepared by the
 /// RenderBackend.
 ///
 /// We have a separate `Renderer` instance for each instance of WebRender (generally
 /// one per OS window), and all instances share the same thread.
@@ -1746,16 +1793,17 @@ impl Renderer {
         let gpu_glyph_renderer = try!(GpuGlyphRenderer::new(&mut device,
                                                             &prim_vao,
                                                             options.precache_flags));
 
         let blur_vao = device.create_vao_with_new_instances(&desc::BLUR, &prim_vao);
         let clip_vao = device.create_vao_with_new_instances(&desc::CLIP, &prim_vao);
         let border_vao = device.create_vao_with_new_instances(&desc::BORDER, &prim_vao);
         let scale_vao = device.create_vao_with_new_instances(&desc::SCALE, &prim_vao);
+        let line_vao = device.create_vao_with_new_instances(&desc::LINE, &prim_vao);
         let texture_cache_upload_pbo = device.create_pbo();
 
         let texture_resolver = TextureResolver::new(&mut device);
 
         let prim_header_f_texture = VertexDataTexture::new(&mut device, ImageFormat::RGBAF32);
         let prim_header_i_texture = VertexDataTexture::new(&mut device, ImageFormat::RGBAI32);
         let transforms_texture = VertexDataTexture::new(&mut device, ImageFormat::RGBAF32);
         let render_task_texture = VertexDataTexture::new(&mut device, ImageFormat::RGBAF32);
@@ -1938,16 +1986,17 @@ impl Renderer {
             gpu_profile,
             gpu_glyph_renderer,
             vaos: RendererVAOs {
                 prim_vao,
                 blur_vao,
                 clip_vao,
                 border_vao,
                 scale_vao,
+                line_vao,
             },
             transforms_texture,
             prim_header_i_texture,
             prim_header_f_texture,
             render_task_texture,
             pipeline_info: PipelineInfo::default(),
             dither_matrix_texture,
             external_image_handler: None,
@@ -2160,21 +2209,16 @@ impl Renderer {
             target.zero_clears.len(),
         );
         debug_target.add(
             debug_server::BatchKind::Clip,
             "BoxShadows",
             target.clip_batcher.box_shadows.len(),
         );
         debug_target.add(
-            debug_server::BatchKind::Clip,
-            "LineDecorations",
-            target.clip_batcher.line_decorations.len(),
-        );
-        debug_target.add(
             debug_server::BatchKind::Cache,
             "Vertical Blur",
             target.vertical_blurs.len(),
         );
         debug_target.add(
             debug_server::BatchKind::Cache,
             "Horizontal Blur",
             target.horizontal_blurs.len(),
@@ -3445,32 +3489,16 @@ impl Renderer {
                 self.draw_instanced_batch(
                     items,
                     VertexArrayKind::Clip,
                     &textures,
                     stats,
                 );
             }
 
-            // draw line decoration clips
-            if !target.clip_batcher.line_decorations.is_empty() {
-                let _gm2 = self.gpu_profile.start_marker("clip lines");
-                self.shaders.borrow_mut().cs_clip_line.bind(
-                    &mut self.device,
-                    projection,
-                    &mut self.renderer_errors,
-                );
-                self.draw_instanced_batch(
-                    &target.clip_batcher.line_decorations,
-                    VertexArrayKind::Clip,
-                    &BatchTextures::no_texture(),
-                    stats,
-                );
-            }
-
             // draw image masks
             for (mask_texture_id, items) in target.clip_batcher.images.iter() {
                 let _gm2 = self.gpu_profile.start_marker("clip images");
                 let textures = BatchTextures {
                     colors: [
                         mask_texture_id.clone(),
                         TextureSource::Invalid,
                         TextureSource::Invalid,
@@ -3579,16 +3607,41 @@ impl Renderer {
                     &BatchTextures::no_texture(),
                     stats,
                 );
             }
 
             self.set_blend(false, FramebufferKind::Other);
         }
 
+        // Draw any line decorations for this target.
+        if !target.line_decorations.is_empty() {
+            let _timer = self.gpu_profile.start_timer(GPU_TAG_CACHE_LINE_DECORATION);
+
+            self.set_blend(true, FramebufferKind::Other);
+            self.set_blend_mode_premultiplied_alpha(FramebufferKind::Other);
+
+            if !target.line_decorations.is_empty() {
+                self.shaders.borrow_mut().cs_line_decoration.bind(
+                    &mut self.device,
+                    &projection,
+                    &mut self.renderer_errors,
+                );
+
+                self.draw_instanced_batch(
+                    &target.line_decorations,
+                    VertexArrayKind::LineDecoration,
+                    &BatchTextures::no_texture(),
+                    stats,
+                );
+            }
+
+            self.set_blend(false, FramebufferKind::Other);
+        }
+
         // Draw any blurs for this target.
         if !target.horizontal_blurs.is_empty() {
             let _timer = self.gpu_profile.start_timer(GPU_TAG_BLUR);
 
             {
                 let mut shaders = self.shaders.borrow_mut();
                 match target.target_kind {
                     RenderTargetKind::Alpha => &mut shaders.cs_blur_a8,
@@ -4242,16 +4295,17 @@ impl Renderer {
         self.prim_header_f_texture.deinit(&mut self.device);
         self.prim_header_i_texture.deinit(&mut self.device);
         self.render_task_texture.deinit(&mut self.device);
         self.device.delete_pbo(self.texture_cache_upload_pbo);
         self.texture_resolver.deinit(&mut self.device);
         self.device.delete_vao(self.vaos.prim_vao);
         self.device.delete_vao(self.vaos.clip_vao);
         self.device.delete_vao(self.vaos.blur_vao);
+        self.device.delete_vao(self.vaos.line_vao);
         self.device.delete_vao(self.vaos.border_vao);
         self.device.delete_vao(self.vaos.scale_vao);
 
         #[cfg(feature = "debug_renderer")]
         {
             self.debug.deinit(&mut self.device);
         }
 
@@ -4970,31 +5024,33 @@ fn get_vao<'a>(vertex_array_kind: Vertex
     match vertex_array_kind {
         VertexArrayKind::Primitive => &vaos.prim_vao,
         VertexArrayKind::Clip => &vaos.clip_vao,
         VertexArrayKind::Blur => &vaos.blur_vao,
         VertexArrayKind::VectorStencil => &gpu_glyph_renderer.vector_stencil_vao,
         VertexArrayKind::VectorCover => &gpu_glyph_renderer.vector_cover_vao,
         VertexArrayKind::Border => &vaos.border_vao,
         VertexArrayKind::Scale => &vaos.scale_vao,
+        VertexArrayKind::LineDecoration => &vaos.line_vao,
     }
 }
 
 #[cfg(not(feature = "pathfinder"))]
 fn get_vao<'a>(vertex_array_kind: VertexArrayKind,
                vaos: &'a RendererVAOs,
                _: &'a GpuGlyphRenderer)
                -> &'a VAO {
     match vertex_array_kind {
         VertexArrayKind::Primitive => &vaos.prim_vao,
         VertexArrayKind::Clip => &vaos.clip_vao,
         VertexArrayKind::Blur => &vaos.blur_vao,
         VertexArrayKind::VectorStencil | VertexArrayKind::VectorCover => unreachable!(),
         VertexArrayKind::Border => &vaos.border_vao,
         VertexArrayKind::Scale => &vaos.scale_vao,
+        VertexArrayKind::LineDecoration => &vaos.line_vao,
     }
 }
 
 #[derive(Clone, Copy, PartialEq)]
 enum FramebufferKind {
     Main,
     Other,
 }
--- a/gfx/webrender/src/shade.rs
+++ b/gfx/webrender/src/shade.rs
@@ -165,16 +165,17 @@ impl LazilyCompiledShader {
                 ShaderKind::Cache(format) => format,
                 ShaderKind::VectorStencil => VertexArrayKind::VectorStencil,
                 ShaderKind::VectorCover => VertexArrayKind::VectorCover,
                 ShaderKind::ClipCache => VertexArrayKind::Clip,
             };
 
             let vertex_descriptor = match vertex_format {
                 VertexArrayKind::Primitive => &desc::PRIM_INSTANCES,
+                VertexArrayKind::LineDecoration => &desc::LINE,
                 VertexArrayKind::Blur => &desc::BLUR,
                 VertexArrayKind::Clip => &desc::CLIP,
                 VertexArrayKind::VectorStencil => &desc::VECTOR_STENCIL,
                 VertexArrayKind::VectorCover => &desc::VECTOR_COVER,
                 VertexArrayKind::Border => &desc::BORDER,
                 VertexArrayKind::Scale => &desc::SCALE,
             };
 
@@ -444,33 +445,33 @@ pub struct Shaders {
     // draw intermediate results to cache targets. The results
     // of these shaders are then used by the primitive shaders.
     pub cs_blur_a8: LazilyCompiledShader,
     pub cs_blur_rgba8: LazilyCompiledShader,
     pub cs_border_segment: LazilyCompiledShader,
     pub cs_border_solid: LazilyCompiledShader,
     pub cs_scale_a8: LazilyCompiledShader,
     pub cs_scale_rgba8: LazilyCompiledShader,
+    pub cs_line_decoration: LazilyCompiledShader,
 
     // Brush shaders
     brush_solid: BrushShader,
     brush_image: Vec<Option<BrushShader>>,
     brush_blend: BrushShader,
     brush_mix_blend: BrushShader,
     brush_yuv_image: Vec<Option<BrushShader>>,
     brush_radial_gradient: BrushShader,
     brush_linear_gradient: BrushShader,
 
     /// These are "cache clip shaders". These shaders are used to
     /// draw clip instances into the cached clip mask. The results
     /// of these shaders are also used by the primitive shaders.
     pub cs_clip_rectangle: LazilyCompiledShader,
     pub cs_clip_box_shadow: LazilyCompiledShader,
     pub cs_clip_image: LazilyCompiledShader,
-    pub cs_clip_line: LazilyCompiledShader,
 
     // The are "primitive shaders". These shaders draw and blend
     // final results on screen. They are aware of tile boundaries.
     // Most draw directly to the framebuffer, but some use inputs
     // from the cache shaders to draw. Specifically, the box
     // shadow primitive shader stretches the box shadow cache
     // output, and the cache_image shader blits the results of
     // a cache shader (e.g. blur) to the screen.
@@ -561,24 +562,16 @@ impl Shaders {
         let cs_clip_box_shadow = LazilyCompiledShader::new(
             ShaderKind::ClipCache,
             "cs_clip_box_shadow",
             &[],
             device,
             options.precache_flags,
         )?;
 
-        let cs_clip_line = LazilyCompiledShader::new(
-            ShaderKind::ClipCache,
-            "cs_clip_line",
-            &[],
-            device,
-            options.precache_flags,
-        )?;
-
         let cs_clip_image = LazilyCompiledShader::new(
             ShaderKind::ClipCache,
             "cs_clip_image",
             &[],
             device,
             options.precache_flags,
         )?;
 
@@ -679,16 +672,24 @@ impl Shaders {
                         );
                         brush_yuv_image[index] = Some(shader);
                         yuv_features.clear();
                     }
                 }
             }
         }
 
+        let cs_line_decoration = LazilyCompiledShader::new(
+            ShaderKind::Cache(VertexArrayKind::LineDecoration),
+            "cs_line_decoration",
+            &[],
+            device,
+            options.precache_flags,
+        )?;
+
         let cs_border_segment = LazilyCompiledShader::new(
             ShaderKind::Cache(VertexArrayKind::Border),
             "cs_border_segment",
              &[],
              device,
              options.precache_flags,
         )?;
 
@@ -707,30 +708,30 @@ impl Shaders {
             device,
             options.precache_flags,
         )?;
 
         Ok(Shaders {
             cs_blur_a8,
             cs_blur_rgba8,
             cs_border_segment,
+            cs_line_decoration,
             cs_border_solid,
             cs_scale_a8,
             cs_scale_rgba8,
             brush_solid,
             brush_image,
             brush_blend,
             brush_mix_blend,
             brush_yuv_image,
             brush_radial_gradient,
             brush_linear_gradient,
             cs_clip_rectangle,
             cs_clip_box_shadow,
             cs_clip_image,
-            cs_clip_line,
             ps_text_run,
             ps_text_run_dual_source,
             ps_split_composite,
         })
     }
 
     fn get_yuv_shader_index(
         buffer_kind: ImageBufferKind,
@@ -796,30 +797,30 @@ impl Shaders {
         self.brush_solid.deinit(device);
         self.brush_blend.deinit(device);
         self.brush_mix_blend.deinit(device);
         self.brush_radial_gradient.deinit(device);
         self.brush_linear_gradient.deinit(device);
         self.cs_clip_rectangle.deinit(device);
         self.cs_clip_box_shadow.deinit(device);
         self.cs_clip_image.deinit(device);
-        self.cs_clip_line.deinit(device);
         self.ps_text_run.deinit(device);
         self.ps_text_run_dual_source.deinit(device);
         for shader in self.brush_image {
             if let Some(shader) = shader {
                 shader.deinit(device);
             }
         }
         for shader in self.brush_yuv_image {
             if let Some(shader) = shader {
                 shader.deinit(device);
             }
         }
         self.cs_border_solid.deinit(device);
+        self.cs_line_decoration.deinit(device);
         self.cs_border_segment.deinit(device);
         self.ps_split_composite.deinit(device);
     }
 }
 
 // A wrapper around a strong reference to a Shaders
 // object. We have this so that external (ffi)
 // consumers can own a reference to a shared Shaders
--- a/gfx/webrender/src/tiling.rs
+++ b/gfx/webrender/src/tiling.rs
@@ -1,15 +1,15 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 use api::{ColorF, BorderStyle, DeviceIntPoint, DeviceIntRect, DeviceIntSize, DevicePixelScale};
 use api::{DeviceUintPoint, DeviceUintRect, DeviceUintSize, DocumentLayer, FilterOp, ImageFormat};
-use api::{MixBlendMode, PipelineId};
+use api::{MixBlendMode, PipelineId, DeviceRect, LayoutSize};
 use batch::{AlphaBatchBuilder, AlphaBatchContainer, ClipBatcher, resolve_image};
 use clip::ClipStore;
 use clip_scroll_tree::{ClipScrollTree};
 use device::{FrameId, Texture};
 #[cfg(feature = "pathfinder")]
 use euclid::{TypedPoint2D, TypedVector2D};
 use gpu_cache::{GpuCache};
 use gpu_types::{BorderInstance, BlurDirection, BlurInstance, PrimitiveHeaders, ScalingInstance};
@@ -314,16 +314,26 @@ pub enum BlitJobSource {
 // Information required to do a blit from a source to a target.
 #[cfg_attr(feature = "capture", derive(Serialize))]
 #[cfg_attr(feature = "replay", derive(Deserialize))]
 pub struct BlitJob {
     pub source: BlitJobSource,
     pub target_rect: DeviceIntRect,
 }
 
+#[cfg_attr(feature = "capture", derive(Serialize))]
+#[cfg_attr(feature = "replay", derive(Deserialize))]
+pub struct LineDecorationJob {
+    pub task_rect: DeviceRect,
+    pub local_size: LayoutSize,
+    pub wavy_line_thickness: f32,
+    pub style: i32,
+    pub orientation: i32,
+}
+
 #[cfg(feature = "pathfinder")]
 #[cfg_attr(feature = "capture", derive(Serialize))]
 #[cfg_attr(feature = "replay", derive(Deserialize))]
 pub struct GlyphJob {
     pub mesh: Mesh,
     pub target_rect: DeviceIntRect,
     pub origin: DeviceIntPoint,
     pub subpixel_offset: TypedPoint2D<f32, DevicePixel>,
@@ -473,17 +483,18 @@ impl RenderTarget for ColorRenderTarget 
                     self.outputs.push(FrameOutput {
                         pipeline_id,
                         task_id,
                     });
                 }
             }
             RenderTaskKind::ClipRegion(..) |
             RenderTaskKind::Border(..) |
-            RenderTaskKind::CacheMask(..) => {
+            RenderTaskKind::CacheMask(..) |
+            RenderTaskKind::LineDecoration(..) => {
                 panic!("Should not be added to color target!");
             }
             RenderTaskKind::Glyph(..) => {
                 // FIXME(pcwalton): Support color glyphs.
                 panic!("Glyphs should not be added to color target!");
             }
             RenderTaskKind::Readback(device_rect) => {
                 self.readbacks.push(device_rect);
@@ -611,16 +622,17 @@ impl RenderTarget for AlphaRenderTarget 
             }
         }
 
         match task.kind {
             RenderTaskKind::Readback(..) |
             RenderTaskKind::Picture(..) |
             RenderTaskKind::Blit(..) |
             RenderTaskKind::Border(..) |
+            RenderTaskKind::LineDecoration(..) |
             RenderTaskKind::Glyph(..) => {
                 panic!("BUG: should not be added to alpha target!");
             }
             RenderTaskKind::VerticalBlur(ref info) => {
                 info.add_instances(
                     &mut self.vertical_blurs,
                     BlurDirection::Vertical,
                     render_tasks.get_task_address(task_id),
@@ -680,28 +692,30 @@ impl RenderTarget for AlphaRenderTarget 
 pub struct TextureCacheRenderTarget {
     pub target_kind: RenderTargetKind,
     pub horizontal_blurs: Vec<BlurInstance>,
     pub blits: Vec<BlitJob>,
     pub glyphs: Vec<GlyphJob>,
     pub border_segments_complex: Vec<BorderInstance>,
     pub border_segments_solid: Vec<BorderInstance>,
     pub clears: Vec<DeviceIntRect>,
+    pub line_decorations: Vec<LineDecorationJob>,
 }
 
 impl TextureCacheRenderTarget {
     fn new(target_kind: RenderTargetKind) -> Self {
         TextureCacheRenderTarget {
             target_kind,
             horizontal_blurs: vec![],
             blits: vec![],
             glyphs: vec![],
             border_segments_complex: vec![],
             border_segments_solid: vec![],
             clears: vec![],
+            line_decorations: vec![],
         }
     }
 
     fn add_task(
         &mut self,
         task_id: RenderTaskId,
         render_tasks: &mut RenderTaskTree,
     ) {
@@ -709,16 +723,27 @@ impl TextureCacheRenderTarget {
         let src_task_address = render_tasks[task_id].children.get(0).map(|src_task_id| {
             render_tasks.get_task_address(*src_task_id)
         });
 
         let task = &mut render_tasks[task_id];
         let target_rect = task.get_target_rect();
 
         match task.kind {
+            RenderTaskKind::LineDecoration(ref info) => {
+                self.clears.push(target_rect.0);
+
+                self.line_decorations.push(LineDecorationJob {
+                    task_rect: target_rect.0.to_f32(),
+                    local_size: info.local_size,
+                    style: info.style as i32,
+                    orientation: info.orientation as i32,
+                    wavy_line_thickness: info.wavy_line_thickness,
+                });
+            }
             RenderTaskKind::HorizontalBlur(ref info) => {
                 info.add_instances(
                     &mut self.horizontal_blurs,
                     BlurDirection::Horizontal,
                     task_address,
                     src_task_address.unwrap(),
                 );
             }
--- a/gfx/webrender/tests/angle_shader_validation.rs
+++ b/gfx/webrender/tests/angle_shader_validation.rs
@@ -33,30 +33,30 @@ const SHADERS: &[Shader] = &[
     Shader {
         name: "cs_clip_image",
         features: CLIP_FEATURES,
     },
     Shader {
         name: "cs_clip_box_shadow",
         features: CLIP_FEATURES,
     },
-    Shader {
-        name: "cs_clip_line",
-        features: CLIP_FEATURES,
-    },
     // Cache shaders
     Shader {
         name: "cs_blur",
         features: &[ "ALPHA_TARGET", "COLOR_TARGET" ],
     },
     Shader {
         name: "cs_border_segment",
         features: CACHE_FEATURES,
     },
     Shader {
+        name: "cs_line_decoration",
+        features: CACHE_FEATURES,
+    },
+    Shader {
         name: "cs_border_solid",
         features: CACHE_FEATURES,
     },
     // Prim shaders
     Shader {
         name: "ps_split_composite",
         features: PRIM_FEATURES,
     },
--- a/gfx/webrender_bindings/revision.txt
+++ b/gfx/webrender_bindings/revision.txt
@@ -1,1 +1,1 @@
-f83c387824b156e7f97f88edee96956bd0de482d
+7aa1d42ad41097b68e8026e3384127242601c95b
--- a/image/FrameAnimator.cpp
+++ b/image/FrameAnimator.cpp
@@ -32,17 +32,18 @@ AnimationState::UpdateState(bool aAnimat
                             RasterImage *aImage,
                             const gfx::IntSize& aSize,
                             bool aAllowInvalidation /* = true */)
 {
   LookupResult result =
     SurfaceCache::Lookup(ImageKey(aImage),
                          RasterSurfaceKey(aSize,
                                           DefaultSurfaceFlags(),
-                                          PlaybackType::eAnimated));
+                                          PlaybackType::eAnimated),
+                         /* aMarkUsed = */ false);
 
   return UpdateStateInternal(result, aAnimationFinished, aSize, aAllowInvalidation);
 }
 
 const gfx::IntRect
 AnimationState::UpdateStateInternal(LookupResult& aResult,
                                     bool aAnimationFinished,
                                     const gfx::IntSize& aSize,
@@ -393,17 +394,18 @@ FrameAnimator::ResetAnimation(AnimationS
   aState.ResetAnimation();
 
   // Our surface provider is synchronized to our state, so we need to reset its
   // state as well, if we still have one.
   LookupResult result =
     SurfaceCache::Lookup(ImageKey(mImage),
                          RasterSurfaceKey(mSize,
                                           DefaultSurfaceFlags(),
-                                          PlaybackType::eAnimated));
+                                          PlaybackType::eAnimated),
+                         /* aMarkUsed = */ false);
   if (!result) {
     return;
   }
 
   result.Surface().Reset();
 }
 
 RefreshResult
@@ -422,17 +424,18 @@ FrameAnimator::RequestRefresh(AnimationS
   // Get the animation frames once now, and pass them down to callees because
   // the surface could be discarded at anytime on a different thread. This is
   // must easier to reason about then trying to write code that is safe to
   // having the surface disappear at anytime.
   LookupResult result =
     SurfaceCache::Lookup(ImageKey(mImage),
                          RasterSurfaceKey(mSize,
                                           DefaultSurfaceFlags(),
-                                          PlaybackType::eAnimated));
+                                          PlaybackType::eAnimated),
+                         /* aMarkUsed = */ true);
 
   ret.mDirtyRect = aState.UpdateStateInternal(result, aAnimationFinished, mSize);
   if (aState.IsDiscarded() || !result) {
     aState.MaybeAdvanceAnimationFrameTime(aTime);
     if (!ret.mDirtyRect.IsEmpty()) {
       ret.mFrameAdvanced = true;
     }
     return ret;
@@ -497,32 +500,33 @@ FrameAnimator::RequestRefresh(AnimationS
   }
 
   MOZ_ASSERT(!aState.mIsCurrentlyDecoded || !aState.mCompositedFrameInvalid);
 
   return ret;
 }
 
 LookupResult
-FrameAnimator::GetCompositedFrame(AnimationState& aState)
+FrameAnimator::GetCompositedFrame(AnimationState& aState, bool aMarkUsed)
 {
   aState.mCompositedFrameRequested = true;
 
   // If we have a composited version of this frame, return that.
   if (!aState.mCompositedFrameInvalid && mLastCompositedFrameIndex >= 0 &&
       (uint32_t(mLastCompositedFrameIndex) == aState.mCurrentAnimationFrameIndex)) {
     return LookupResult(DrawableSurface(mCompositingFrame->DrawableRef()),
                         MatchType::EXACT);
   }
 
   LookupResult result =
     SurfaceCache::Lookup(ImageKey(mImage),
                          RasterSurfaceKey(mSize,
                                           DefaultSurfaceFlags(),
-                                          PlaybackType::eAnimated));
+                                          PlaybackType::eAnimated),
+                         aMarkUsed);
 
   if (aState.mCompositedFrameInvalid) {
     MOZ_ASSERT(gfxPrefs::ImageMemAnimatedDiscardable());
     MOZ_ASSERT(aState.GetHasRequestedDecode());
     MOZ_ASSERT(!aState.GetIsCurrentlyDecoded());
     if (result.Type() == MatchType::NOT_FOUND) {
       return result;
     }
--- a/image/FrameAnimator.h
+++ b/image/FrameAnimator.h
@@ -318,17 +318,18 @@ public:
                                const TimeStamp& aTime,
                                bool aAnimationFinished);
 
   /**
    * Get the full frame for the current frame of the animation (it may or may
    * not have required compositing). It may not be available because it hasn't
    * been decoded yet, in which case we return an empty LookupResult.
    */
-  LookupResult GetCompositedFrame(AnimationState& aState);
+  LookupResult GetCompositedFrame(AnimationState& aState,
+                                  bool aMarkUsed);
 
   /**
    * Collect an accounting of the memory occupied by the compositing surfaces we
    * use during animation playback. All of the actual animation frames are
    * stored in the SurfaceCache, so we don't need to report them here.
    */
   void CollectSizeOfCompositingSurfaces(nsTArray<SurfaceMemoryCounter>& aCounters,
                                         MallocSizeOf aMallocSizeOf) const;
--- a/image/RasterImage.cpp
+++ b/image/RasterImage.cpp
@@ -293,48 +293,52 @@ RasterImage::GetType(uint16_t* aType)
 
   *aType = imgIContainer::TYPE_RASTER;
   return NS_OK;
 }
 
 LookupResult
 RasterImage::LookupFrameInternal(const IntSize& aSize,
                                  uint32_t aFlags,
-                                 PlaybackType aPlaybackType)
+                                 PlaybackType aPlaybackType,
+                                 bool aMarkUsed)
 {
   if (mAnimationState && aPlaybackType == PlaybackType::eAnimated) {
     MOZ_ASSERT(mFrameAnimator);
     MOZ_ASSERT(ToSurfaceFlags(aFlags) == DefaultSurfaceFlags(),
                "Can't composite frames with non-default surface flags");
-    return mFrameAnimator->GetCompositedFrame(*mAnimationState);
+    return mFrameAnimator->GetCompositedFrame(*mAnimationState, aMarkUsed);
   }
 
   SurfaceFlags surfaceFlags = ToSurfaceFlags(aFlags);
 
   // We don't want any substitution for sync decodes, and substitution would be
   // illegal when high quality downscaling is disabled, so we use
   // SurfaceCache::Lookup in this case.
   if ((aFlags & FLAG_SYNC_DECODE) || !(aFlags & FLAG_HIGH_QUALITY_SCALING)) {
     return SurfaceCache::Lookup(ImageKey(this),
                                 RasterSurfaceKey(aSize,
                                                  surfaceFlags,
-                                                 PlaybackType::eStatic));
+                                                 PlaybackType::eStatic),
+                                aMarkUsed);
   }
 
   // We'll return the best match we can find to the requested frame.
   return SurfaceCache::LookupBestMatch(ImageKey(this),
                                        RasterSurfaceKey(aSize,
                                                         surfaceFlags,
-                                                        PlaybackType::eStatic));
+                                                        PlaybackType::eStatic),
+                                       aMarkUsed);
 }
 
 LookupResult
 RasterImage::LookupFrame(const IntSize& aSize,
                          uint32_t aFlags,
-                         PlaybackType aPlaybackType)
+                         PlaybackType aPlaybackType,
+                         bool aMarkUsed)
 {
   MOZ_ASSERT(NS_IsMainThread());
 
   // If we're opaque, we don't need to care about premultiplied alpha, because
   // that can only matter for frames with transparency.
   if (IsOpaque()) {
     aFlags &= ~FLAG_DECODE_NO_PREMULTIPLY_ALPHA;
   }
@@ -342,17 +346,17 @@ RasterImage::LookupFrame(const IntSize& 
   IntSize requestedSize = CanDownscaleDuringDecode(aSize, aFlags)
                         ? aSize : mSize;
   if (requestedSize.IsEmpty()) {
     // Can't decode to a surface of zero size.
     return LookupResult(MatchType::NOT_FOUND);
   }
 
   LookupResult result =
-    LookupFrameInternal(requestedSize, aFlags, aPlaybackType);
+    LookupFrameInternal(requestedSize, aFlags, aPlaybackType, aMarkUsed);
 
   if (!result && !mHasSize) {
     // We can't request a decode without knowing our intrinsic size. Give up.
     return LookupResult(MatchType::NOT_FOUND);
   }
 
   if (result.Type() == MatchType::NOT_FOUND ||
       result.Type() == MatchType::SUBSTITUTE_BECAUSE_NOT_FOUND ||
@@ -372,17 +376,17 @@ RasterImage::LookupFrame(const IntSize& 
                   (aFlags & FLAG_HIGH_QUALITY_SCALING));
       requestedSize = result.SuggestedSize();
     }
 
     bool ranSync = Decode(requestedSize, aFlags, aPlaybackType);
 
     // If we can or did sync decode, we should already have the frame.
     if (ranSync || (aFlags & FLAG_SYNC_DECODE)) {
-      result = LookupFrameInternal(requestedSize, aFlags, aPlaybackType);
+      result = LookupFrameInternal(requestedSize, aFlags, aPlaybackType, aMarkUsed);
     }
   }
 
   if (!result) {
     // We still weren't able to get a frame. Give up.
     return result;
   }
 
@@ -457,17 +461,18 @@ RasterImage::WillDrawOpaqueNow()
   if (mLockCount == 0) {
     return false;
   }
 
   LookupResult result =
     SurfaceCache::LookupBestMatch(ImageKey(this),
                                   RasterSurfaceKey(mSize,
                                                    DefaultSurfaceFlags(),
-                                                   PlaybackType::eStatic));
+                                                   PlaybackType::eStatic),
+                                  /* aMarkUsed = */ false);
   MatchType matchType = result.Type();
   if (matchType == MatchType::NOT_FOUND || matchType == MatchType::PENDING ||
       !result.Surface()->IsFinished()) {
     return false;
   }
 
   return true;
 }
@@ -599,17 +604,17 @@ RasterImage::GetFrameInternal(const IntS
     return MakeTuple(ImgDrawResult::BAD_IMAGE, aSize,
                      RefPtr<SourceSurface>());
   }
 
   // Get the frame. If it's not there, it's probably the caller's fault for
   // not waiting for the data to be loaded from the network or not passing
   // FLAG_SYNC_DECODE.
   LookupResult result =
-    LookupFrame(aSize, aFlags, ToPlaybackType(aWhichFrame));
+    LookupFrame(aSize, aFlags, ToPlaybackType(aWhichFrame), /* aMarkUsed = */ true);
 
   // The surface cache may have suggested we use a different size than the
   // given size in the future. This may or may not be accompanied by an
   // actual surface, depending on what it has in its cache.
   IntSize suggestedSize = result.SuggestedSize().IsEmpty()
                           ? aSize : result.SuggestedSize();
   MOZ_ASSERT_IF(result.Type() == MatchType::SUBSTITUTE_BECAUSE_BEST,
                 suggestedSize != aSize);
@@ -1195,17 +1200,17 @@ RasterImage::RequestDecodeForSizeInterna
 
   uint32_t flags = shouldSyncDecodeIfFast
                  ? aFlags
                  : aFlags & ~FLAG_SYNC_DECODE_IF_FAST;
 
   // Perform a frame lookup, which will implicitly start decoding if needed.
   PlaybackType playbackType = mAnimationState ? PlaybackType::eAnimated
                                               : PlaybackType::eStatic;
-  LookupResult result = LookupFrame(aSize, flags, playbackType);
+  LookupResult result = LookupFrame(aSize, flags, playbackType, /* aMarkUsed = */ false);
   return std::move(result.Surface());
 }
 
 static bool
 LaunchDecodingTask(IDecodingTask* aTask,
                    RasterImage* aImage,
                    uint32_t aFlags,
                    bool aHaveSourceData)
@@ -1513,17 +1518,17 @@ RasterImage::Draw(gfxContext* aContext,
 
   // If we're not using SamplingFilter::GOOD, we shouldn't high-quality scale or
   // downscale during decode.
   uint32_t flags = aSamplingFilter == SamplingFilter::GOOD
                  ? aFlags
                  : aFlags & ~FLAG_HIGH_QUALITY_SCALING;
 
   LookupResult result =
-    LookupFrame(aSize, flags, ToPlaybackType(aWhichFrame));
+    LookupFrame(aSize, flags, ToPlaybackType(aWhichFrame), /* aMarkUsed = */ true);
   if (!result) {
     // Getting the frame (above) touches the image and kicks off decoding.
     if (mDrawStartTime.IsNull()) {
       mDrawStartTime = TimeStamp::Now();
     }
     return ImgDrawResult::NOT_READY;
   }
 
--- a/image/RasterImage.h
+++ b/image/RasterImage.h
@@ -279,29 +279,32 @@ private:
   /**
    * Tries to retrieve a surface for this image with size @aSize, surface flags
    * matching @aFlags, and a playback type of @aPlaybackType.
    *
    * If @aFlags specifies FLAG_SYNC_DECODE and we already have all the image
    * data, we'll attempt a sync decode if no matching surface is found. If
    * FLAG_SYNC_DECODE was not specified and no matching surface was found, we'll
    * kick off an async decode so that the surface is (hopefully) available next
-   * time it's requested.
+   * time it's requested. aMarkUsed determines if we mark the surface used in
+   * the surface cache or not.
    *
    * @return a drawable surface, which may be empty if the requested surface
    *         could not be found.
    */
   LookupResult LookupFrame(const gfx::IntSize& aSize,
                            uint32_t aFlags,
-                           PlaybackType aPlaybackType);
+                           PlaybackType aPlaybackType,
+                           bool aMarkUsed);
 
   /// Helper method for LookupFrame().
   LookupResult LookupFrameInternal(const gfx::IntSize& aSize,
                                    uint32_t aFlags,
-                                   PlaybackType aPlaybackType);
+                                   PlaybackType aPlaybackType,
+                                   bool aMarkUsed);
 
   ImgDrawResult DrawInternal(DrawableSurface&& aFrameRef,
                           gfxContext* aContext,
                           const nsIntSize& aSize,
                           const ImageRegion& aRegion,
                           gfx::SamplingFilter aSamplingFilter,
                           uint32_t aFlags,
                           float aOpacity);
--- a/image/SurfaceCache.cpp
+++ b/image/SurfaceCache.cpp
@@ -955,17 +955,17 @@ public:
     mAvailableCost += costEntry.GetCost();
     MOZ_ASSERT(mAvailableCost <= mMaxCost,
                "More available cost than we started with");
   }
 
   LookupResult Lookup(const ImageKey    aImageKey,
                       const SurfaceKey& aSurfaceKey,
                       const StaticMutexAutoLock& aAutoLock,
-                      bool aMarkUsed = true)
+                      bool aMarkUsed)
   {
     RefPtr<ImageSurfaceCache> cache = GetImageCache(aImageKey);
     if (!cache) {
       // No cached surfaces for this image.
       return LookupResult(MatchType::NOT_FOUND);
     }
 
     RefPtr<CachedSurface> surface = cache->Lookup(aSurfaceKey, aMarkUsed);
@@ -994,17 +994,18 @@ public:
 
     MOZ_ASSERT(surface->GetSurfaceKey() == aSurfaceKey,
                "Lookup() not returning an exact match?");
     return LookupResult(std::move(drawableSurface), MatchType::EXACT);
   }
 
   LookupResult LookupBestMatch(const ImageKey         aImageKey,
                                const SurfaceKey&      aSurfaceKey,
-                               const StaticMutexAutoLock& aAutoLock)
+                               const StaticMutexAutoLock& aAutoLock,
+                               bool aMarkUsed)
   {
     RefPtr<ImageSurfaceCache> cache = GetImageCache(aImageKey);
     if (!cache) {
       // No cached surfaces for this image.
       return LookupResult(MatchType::NOT_FOUND);
     }
 
     // Repeatedly look up the best match, trying again if the resulting surface
@@ -1040,17 +1041,18 @@ public:
     MOZ_ASSERT_IF(matchType == MatchType::SUBSTITUTE_BECAUSE_NOT_FOUND ||
                   matchType == MatchType::SUBSTITUTE_BECAUSE_PENDING,
       surface->GetSurfaceKey().SVGContext() == aSurfaceKey.SVGContext() &&
       surface->GetSurfaceKey().Playback() == aSurfaceKey.Playback() &&
       surface->GetSurfaceKey().Flags() == aSurfaceKey.Flags());
 
     if (matchType == MatchType::EXACT ||
         matchType == MatchType::SUBSTITUTE_BECAUSE_BEST) {
-      if (!MarkUsed(WrapNotNull(surface), WrapNotNull(cache), aAutoLock)) {
+      if (aMarkUsed &&
+          !MarkUsed(WrapNotNull(surface), WrapNotNull(cache), aAutoLock)) {
         Remove(WrapNotNull(surface), /* aStopTracking */ false, aAutoLock);
       }
     }
 
     return LookupResult(std::move(drawableSurface), matchType, suggestedSize);
   }
 
   bool CanHold(const Cost aCost) const
@@ -1511,48 +1513,50 @@ SurfaceCache::Shutdown()
     MOZ_ASSERT(NS_IsMainThread());
     MOZ_ASSERT(sInstance, "No singleton - was Shutdown() called twice?");
     cache = sInstance.forget();
   }
 }
 
 /* static */ LookupResult
 SurfaceCache::Lookup(const ImageKey         aImageKey,
-                     const SurfaceKey&      aSurfaceKey)
+                     const SurfaceKey&      aSurfaceKey,
+                     bool aMarkUsed)
 {
   nsTArray<RefPtr<CachedSurface>> discard;
   LookupResult rv(MatchType::NOT_FOUND);
 
   {
     StaticMutexAutoLock lock(sInstanceMutex);
     if (!sInstance) {
       return rv;
     }
 
-    rv = sInstance->Lookup(aImageKey, aSurfaceKey, lock);
+    rv = sInstance->Lookup(aImageKey, aSurfaceKey, lock, aMarkUsed);
     sInstance->TakeDiscard(discard, lock);
   }
 
   return rv;
 }
 
 /* static */ LookupResult
 SurfaceCache::LookupBestMatch(const ImageKey         aImageKey,
-                              const SurfaceKey&      aSurfaceKey)
+                              const SurfaceKey&      aSurfaceKey,
+                              bool aMarkUsed)
 {
   nsTArray<RefPtr<CachedSurface>> discard;
   LookupResult rv(MatchType::NOT_FOUND);
 
   {
     StaticMutexAutoLock lock(sInstanceMutex);
     if (!sInstance) {
       return rv;
     }
 
-    rv = sInstance->LookupBestMatch(aImageKey, aSurfaceKey, lock);
+    rv = sInstance->LookupBestMatch(aImageKey, aSurfaceKey, lock, aMarkUsed);
     sInstance->TakeDiscard(discard, lock);
   }
 
   return rv;
 }
 
 /* static */ InsertOutcome
 SurfaceCache::Insert(NotNull<ISurfaceProvider*> aProvider)
--- a/image/SurfaceCache.h
+++ b/image/SurfaceCache.h
@@ -227,17 +227,18 @@ struct SurfaceCache
    * @param aImageKey       Key data identifying which image the cache entry
    *                        belongs to.
    * @param aSurfaceKey     Key data which uniquely identifies the requested
    *                        cache entry.
    * @return                a LookupResult which will contain a DrawableSurface
    *                        if the cache entry was found.
    */
   static LookupResult Lookup(const ImageKey    aImageKey,
-                             const SurfaceKey& aSurfaceKey);
+                             const SurfaceKey& aSurfaceKey,
+                             bool aMarkUsed);
 
   /**
    * Looks up the best matching cache entry and returns a drawable reference to
    * its associated surface.
    *
    * The result may vary from the requested cache entry only in terms of size.
    *
    * @param aImageKey       Key data identifying which image the cache entry
@@ -246,17 +247,18 @@ struct SurfaceCache
    *                        cache entry.
    * @return                a LookupResult which will contain a DrawableSurface
    *                        if a cache entry similar to the one the caller
    *                        requested could be found. Callers can use
    *                        LookupResult::IsExactMatch() to check whether the
    *                        returned surface exactly matches @aSurfaceKey.
    */
   static LookupResult LookupBestMatch(const ImageKey    aImageKey,
-                                      const SurfaceKey& aSurfaceKey);
+                                      const SurfaceKey& aSurfaceKey,
+                                      bool aMarkUsed);
 
   /**
    * Insert an ISurfaceProvider into the cache. If an entry with the same
    * ImageKey and SurfaceKey is already in the cache, Insert returns
    * FAILURE_ALREADY_PRESENT. If a matching placeholder is already present, it
    * is replaced.
    *
    * Cache entries will never expire as long as they remain locked, but if they
--- a/image/VectorImage.cpp
+++ b/image/VectorImage.cpp
@@ -1126,19 +1126,19 @@ VectorImage::LookupCachedSurface(const I
   // in this case either.
   if (mHaveAnimations) {
     return MakeTuple(RefPtr<SourceSurface>(), aSize);
   }
 
   LookupResult result(MatchType::NOT_FOUND);
   SurfaceKey surfaceKey = VectorSurfaceKey(aSize, aSVGContext);
   if ((aFlags & FLAG_SYNC_DECODE) || !(aFlags & FLAG_HIGH_QUALITY_SCALING)) {
-    result = SurfaceCache::Lookup(ImageKey(this), surfaceKey);
+    result = SurfaceCache::Lookup(ImageKey(this), surfaceKey, /* aMarkUsed = */ true);
   } else {
-    result = SurfaceCache::LookupBestMatch(ImageKey(this), surfaceKey);
+    result = SurfaceCache::LookupBestMatch(ImageKey(this), surfaceKey, /* aMarkUsed = */ true);
   }
 
   IntSize rasterSize = result.SuggestedSize().IsEmpty()
                        ? aSize : result.SuggestedSize();
   MOZ_ASSERT(result.Type() != MatchType::SUBSTITUTE_BECAUSE_PENDING);
   if (!result || result.Type() == MatchType::SUBSTITUTE_BECAUSE_NOT_FOUND) {
     // No matching surface, or the OS freed the volatile buffer.
     return MakeTuple(RefPtr<SourceSurface>(), rasterSize);
--- a/image/imgIContainer.idl
+++ b/image/imgIContainer.idl
@@ -471,21 +471,17 @@ interface imgIContainer : nsISupports
   /*
    * This method triggers decoding for an image, but unlike startDecoding() it
    * enables the caller to provide more detailed information about the decode
    * request.
    *
    * @param aSize The size to which the image should be scaled while decoding,
    *              if possible. If the image cannot be scaled to this size while
    *              being decoded, it will be decoded at its intrinsic size.
-   * @param aFlags Flags of the FLAG_* variety. Only the decode flags
-   *               (FLAG_DECODE_*) and FLAG_SYNC_DECODE (which will
-   *               synchronously decode images that can be decoded "quickly",
-   *               just like startDecoding() does) are accepted; others will be
-   *               ignored.
+   * @param aFlags Flags of the FLAG_* variety.
    */
   [noscript] void requestDecodeForSize([const] in nsIntSize aSize,
                                        in uint32_t aFlags);
 
   /**
     * Increments the lock count on the image. An image will not be discarded
     * as long as the lock count is nonzero. Note that it is still possible for
     * the image to be undecoded if decode-on-draw is enabled and the image
--- a/image/test/gtest/TestDecoders.cpp
+++ b/image/test/gtest/TestDecoders.cpp
@@ -568,60 +568,64 @@ TEST_F(ImageDecoders, AnimatedGIFWithFRA
   EXPECT_TRUE(bool(imageProgress & FLAG_IS_ANIMATED) == true);
 
   // Ensure that we decoded the static version of the image.
   {
     LookupResult result =
       SurfaceCache::Lookup(ImageKey(image.get()),
                            RasterSurfaceKey(imageSize,
                                             DefaultSurfaceFlags(),
-                                            PlaybackType::eStatic));
+                                            PlaybackType::eStatic),
+                           /* aMarkUsed = */ false);
     ASSERT_EQ(MatchType::EXACT, result.Type());
     EXPECT_TRUE(bool(result.Surface()));
   }
 
   // Ensure that we didn't decode the animated version of the image.
   {
     LookupResult result =
       SurfaceCache::Lookup(ImageKey(image.get()),
                            RasterSurfaceKey(imageSize,
                                             DefaultSurfaceFlags(),
-                                            PlaybackType::eAnimated));
+                                            PlaybackType::eAnimated),
+                           /* aMarkUsed = */ false);
     ASSERT_EQ(MatchType::NOT_FOUND, result.Type());
   }
 
   // Use GetFrame() to force a sync decode of the image, this time specifying
   // FRAME_CURRENT to ensure that we get an animated decode.
   RefPtr<SourceSurface> animatedSurface =
     image->GetFrame(imgIContainer::FRAME_CURRENT,
                     imgIContainer::FLAG_SYNC_DECODE);
 
   // Ensure that we decoded both frames of the animated version of the image.
   {
     LookupResult result =
       SurfaceCache::Lookup(ImageKey(image.get()),
                            RasterSurfaceKey(imageSize,
                                             DefaultSurfaceFlags(),
-                                            PlaybackType::eAnimated));
+                                            PlaybackType::eAnimated),
+                           /* aMarkUsed = */ true);
     ASSERT_EQ(MatchType::EXACT, result.Type());
 
     EXPECT_TRUE(NS_SUCCEEDED(result.Surface().Seek(0)));
     EXPECT_TRUE(bool(result.Surface()));
 
     RawAccessFrameRef partialFrame = result.Surface().RawAccessRef(1);
     EXPECT_TRUE(bool(partialFrame));
   }
 
   // Ensure that the static version is still around.
   {
     LookupResult result =
       SurfaceCache::Lookup(ImageKey(image.get()),
                            RasterSurfaceKey(imageSize,
                                             DefaultSurfaceFlags(),
-                                            PlaybackType::eStatic));
+                                            PlaybackType::eStatic),
+                           /* aMarkUsed = */ true);
     ASSERT_EQ(MatchType::EXACT, result.Type());
     EXPECT_TRUE(bool(result.Surface()));
   }
 }
 
 TEST_F(ImageDecoders, AnimatedGIFWithFRAME_CURRENT)
 {
   ImageTestCase testCase = GreenFirstFrameAnimatedGIFTestCase();
@@ -681,60 +685,64 @@ TEST_F(ImageDecoders, AnimatedGIFWithFRA
   EXPECT_TRUE(bool(imageProgress & FLAG_IS_ANIMATED) == true);
 
   // Ensure that we decoded both frames of the animated version of the image.
   {
     LookupResult result =
       SurfaceCache::Lookup(ImageKey(image.get()),
                            RasterSurfaceKey(imageSize,
                                             DefaultSurfaceFlags(),
-                                            PlaybackType::eAnimated));
+                                            PlaybackType::eAnimated),
+                           /* aMarkUsed = */ true);
     ASSERT_EQ(MatchType::EXACT, result.Type());
 
     EXPECT_TRUE(NS_SUCCEEDED(result.Surface().Seek(0)));
     EXPECT_TRUE(bool(result.Surface()));
 
     RawAccessFrameRef partialFrame = result.Surface().RawAccessRef(1);
     EXPECT_TRUE(bool(partialFrame));
   }
 
   // Ensure that we didn't decode the static version of the image.
   {
     LookupResult result =
       SurfaceCache::Lookup(ImageKey(image.get()),
                            RasterSurfaceKey(imageSize,
                                             DefaultSurfaceFlags(),
-                                            PlaybackType::eStatic));
+                                            PlaybackType::eStatic),
+                           /* aMarkUsed = */ false);
     ASSERT_EQ(MatchType::NOT_FOUND, result.Type());
   }
 
   // Use GetFrame() to force a sync decode of the image, this time specifying
   // FRAME_FIRST to ensure that we get a single-frame decode.
   RefPtr<SourceSurface> animatedSurface =
     image->GetFrame(imgIContainer::FRAME_FIRST,
                     imgIContainer::FLAG_SYNC_DECODE);
 
   // Ensure that we decoded the static version of the image.
   {
     LookupResult result =
       SurfaceCache::Lookup(ImageKey(image.get()),
                            RasterSurfaceKey(imageSize,
                                             DefaultSurfaceFlags(),
-                                            PlaybackType::eStatic));
+                                            PlaybackType::eStatic),
+                           /* aMarkUsed = */ true);
     ASSERT_EQ(MatchType::EXACT, result.Type());
     EXPECT_TRUE(bool(result.Surface()));
   }
 
   // Ensure that both frames of the animated version are still around.
   {
     LookupResult result =
       SurfaceCache::Lookup(ImageKey(image.get()),
                            RasterSurfaceKey(imageSize,
                                             DefaultSurfaceFlags(),
-                                            PlaybackType::eAnimated));
+                                            PlaybackType::eAnimated),
+                           /* aMarkUsed = */ true);
     ASSERT_EQ(MatchType::EXACT, result.Type());
 
     EXPECT_TRUE(NS_SUCCEEDED(result.Surface().Seek(0)));
     EXPECT_TRUE(bool(result.Surface()));
 
     RawAccessFrameRef partialFrame = result.Surface().RawAccessRef(1);
     EXPECT_TRUE(bool(partialFrame));
   }
@@ -793,17 +801,18 @@ TEST_F(ImageDecoders, AnimatedGIFWithExt
   EXPECT_TRUE(bool(imageProgress & FLAG_HAS_TRANSPARENCY) == false);
   EXPECT_TRUE(bool(imageProgress & FLAG_IS_ANIMATED) == true);
 
   // Ensure that we decoded both frames of the image.
   LookupResult result =
     SurfaceCache::Lookup(ImageKey(image.get()),
                          RasterSurfaceKey(imageSize,
                                           DefaultSurfaceFlags(),
-                                          PlaybackType::eAnimated));
+                                          PlaybackType::eAnimated),
+                         /* aMarkUsed = */ true);
   ASSERT_EQ(MatchType::EXACT, result.Type());
 
   EXPECT_TRUE(NS_SUCCEEDED(result.Surface().Seek(0)));
   EXPECT_TRUE(bool(result.Surface()));
 
   RawAccessFrameRef partialFrame = result.Surface().RawAccessRef(1);
   EXPECT_TRUE(bool(partialFrame));
 }
--- a/image/test/gtest/TestMetadata.cpp
+++ b/image/test/gtest/TestMetadata.cpp
@@ -245,17 +245,18 @@ TEST_F(ImageDecoderMetadata, NoFrameDela
   EXPECT_TRUE(bool(imageProgress & FLAG_HAS_TRANSPARENCY) == false);
   EXPECT_TRUE(bool(imageProgress & FLAG_IS_ANIMATED) == true);
 
   // Ensure that we decoded both frames of the image.
   LookupResult result =
     SurfaceCache::Lookup(ImageKey(image.get()),
                          RasterSurfaceKey(imageSize,
                                           DefaultSurfaceFlags(),
-                                          PlaybackType::eAnimated));
+                                          PlaybackType::eAnimated),
+                         /* aMarkUsed = */ true);
   ASSERT_EQ(MatchType::EXACT, result.Type());
 
   EXPECT_TRUE(NS_SUCCEEDED(result.Surface().Seek(0)));
   EXPECT_TRUE(bool(result.Surface()));
 
   RawAccessFrameRef partialFrame = result.Surface().RawAccessRef(1);
   EXPECT_TRUE(bool(partialFrame));
 }
--- a/js/src/util/Unicode.h
+++ b/js/src/util/Unicode.h
@@ -74,16 +74,17 @@ constexpr char16_t DIVISION_SIGN = 0x00F
 constexpr char16_t LATIN_SMALL_LETTER_Y_WITH_DIAERESIS = 0x00FF;
 constexpr char16_t LATIN_CAPITAL_LETTER_I_WITH_DOT_ABOVE = 0x0130;
 constexpr char16_t COMBINING_DOT_ABOVE = 0x0307;
 constexpr char16_t GREEK_CAPITAL_LETTER_SIGMA = 0x03A3;
 constexpr char16_t GREEK_SMALL_LETTER_FINAL_SIGMA = 0x03C2;
 constexpr char16_t GREEK_SMALL_LETTER_SIGMA = 0x03C3;
 constexpr char16_t LINE_SEPARATOR = 0x2028;
 constexpr char16_t PARA_SEPARATOR = 0x2029;
+constexpr char16_t REPLACEMENT_CHARACTER = 0xFFFD;
 constexpr char16_t BYTE_ORDER_MARK2 = 0xFFFE;
 
 const char16_t LeadSurrogateMin = 0xD800;
 const char16_t LeadSurrogateMax = 0xDBFF;
 const char16_t TrailSurrogateMin = 0xDC00;
 const char16_t TrailSurrogateMax = 0xDFFF;
 
 const uint32_t UTF16Max = 0xFFFF;
--- a/js/src/vm/CharacterEncoding.cpp
+++ b/js/src/vm/CharacterEncoding.cpp
@@ -7,16 +7,17 @@
 #include "js/CharacterEncoding.h"
 
 #include "mozilla/Range.h"
 #include "mozilla/Sprintf.h"
 
 #include <algorithm>
 #include <type_traits>
 
+#include "util/Unicode.h" // unicode::REPLACEMENT_CHARACTER
 #include "vm/JSContext.h"
 
 using namespace js;
 
 Latin1CharsZ
 JS::LossyTwoByteCharsToNewLatin1CharsZ(JSContext* cx,
                                        const mozilla::Range<const char16_t> tbchars)
 {
@@ -75,18 +76,16 @@ JS_PUBLIC_API(size_t)
 JS::GetDeflatedUTF8StringLength(JSFlatString* s)
 {
     JS::AutoCheckCannotGC nogc;
     return s->hasLatin1Chars()
            ? ::GetDeflatedUTF8StringLength(s->latin1Chars(nogc), s->length())
            : ::GetDeflatedUTF8StringLength(s->twoByteChars(nogc), s->length());
 }
 
-static const char16_t UTF8_REPLACEMENT_CHAR = 0xFFFD;
-
 template <typename CharT>
 static void
 DeflateStringToUTF8Buffer(const CharT* src, size_t srclen, mozilla::RangedPtr<char> dst,
                           size_t* dstlenp = nullptr, size_t* numcharsp = nullptr)
 {
     size_t capacity = 0;
     if (dstlenp) {
         capacity = *dstlenp;
@@ -96,26 +95,26 @@ DeflateStringToUTF8Buffer(const CharT* s
         *numcharsp = 0;
     }
 
     while (srclen) {
         uint32_t v;
         char16_t c = *src++;
         srclen--;
         if (c >= 0xDC00 && c <= 0xDFFF) {
-            v = UTF8_REPLACEMENT_CHAR;
+            v = unicode::REPLACEMENT_CHARACTER;
         } else if (c < 0xD800 || c > 0xDBFF) {
             v = c;
         } else {
             if (srclen < 1) {
-                v = UTF8_REPLACEMENT_CHAR;
+                v = unicode::REPLACEMENT_CHARACTER;
             } else {
                 char16_t c2 = *src;
                 if (c2 < 0xDC00 || c2 > 0xDFFF) {
-                    v = UTF8_REPLACEMENT_CHAR;
+                    v = unicode::REPLACEMENT_CHARACTER;
                 } else {
                     src++;
                     srclen--;
                     v = ((c - 0xD800) << 10) + (c2 - 0xDC00) + 0x10000;
                 }
             }
         }
 
@@ -266,23 +265,16 @@ enum class LoopDisposition {
 
 enum class OnUTF8Error {
     InsertReplacementCharacter,
     InsertQuestionMark,
     Throw,
     Crash,
 };
 
-// The Unicode REPLACEMENT CHARACTER, rendered as a diamond with a question
-// mark, meaning "someone screwed up here but it wasn't me".
-static const char16_t REPLACEMENT_CHARACTER = 0xFFFD;
-
-// If making changes to this algorithm, make sure to also update
-// LossyConvertUTF8toUTF16() in dom/wifi/WifiUtils.cpp
-//
 // Scan UTF8 input and (internally, at least) convert it to a series of UTF-16
 // code units. But you can also do odd things like pass an empty lambda for
 // `dst`, in which case the output is discarded entirely--the only effect of
 // calling the template that way is error-checking.
 template <OnUTF8Error ErrorAction, typename OutputFn>
 static bool
 InflateUTF8ToUTF16(JSContext* cx, const UTF8Chars src, OutputFn dst)
 {
@@ -306,17 +298,17 @@ InflateUTF8ToUTF16(JSContext* cx, const 
                 if (ErrorAction == OnUTF8Error::Throw) {                \
                     report(cx, arg);                                    \
                     return false;                                       \
                 } else if (ErrorAction == OnUTF8Error::Crash) {         \
                     MOZ_CRASH("invalid UTF-8 string: " # report);       \
                 } else {                                                \
                     char16_t replacement;                               \
                     if (ErrorAction == OnUTF8Error::InsertReplacementCharacter) { \
-                        replacement = REPLACEMENT_CHARACTER;            \
+                        replacement = unicode::REPLACEMENT_CHARACTER;   \
                     } else {                                            \
                         MOZ_ASSERT(ErrorAction == OnUTF8Error::InsertQuestionMark); \
                         replacement = '?';                              \
                     }                                                   \
                     if (dst(replacement) == LoopDisposition::Break) {   \
                         break;                                          \
                     }                                                   \
                     n = n2;                                             \
@@ -393,17 +385,17 @@ InflateUTF8StringHelper(JSContext* cx, c
     static_assert(std::is_same<CharT, char16_t>::value ||
                   std::is_same<CharT, Latin1Char>::value,
                   "bad CharT");
 
     *outlen = 0;
 
     size_t len = 0;
     bool allASCII = true;
-    auto count = [&](char16_t c) -> LoopDisposition {
+    auto count = [&len, &allASCII](char16_t c) -> LoopDisposition {
         len++;
         allASCII &= (c < 0x80);
         return LoopDisposition::Continue;
     };
     if (!InflateUTF8ToUTF16<ErrorAction>(cx, src, count)) {
         return CharsT();
     }
     *outlen = len;
@@ -420,17 +412,17 @@ InflateUTF8StringHelper(JSContext* cx, c
         for (uint32_t i = 0; i < srclen; i++) {
             dst[i] = CharT(src[i]);
         }
     } else {
         constexpr OnUTF8Error errorMode = std::is_same<CharT, Latin1Char>::value
             ? OnUTF8Error::InsertQuestionMark
             : OnUTF8Error::InsertReplacementCharacter;
         size_t j = 0;
-        auto push = [&](char16_t c) -> LoopDisposition {
+        auto push = [dst, &j](char16_t c) -> LoopDisposition {
             dst[j++] = CharT(c);
             return LoopDisposition::Continue;
         };
         MOZ_ALWAYS_TRUE((InflateUTF8ToUTF16<errorMode>(cx, src, push)));
         MOZ_ASSERT(j == len);
     }
     dst[*outlen] = 0;    // NUL char
 
@@ -462,17 +454,17 @@ JS::LossyUTF8CharsToNewTwoByteCharsZ(JSC
     UTF8Chars chars(utf8.c_str(), strlen(utf8.c_str()));
     return InflateUTF8StringHelper<OnUTF8Error::InsertReplacementCharacter, TwoByteCharsZ>(cx, chars, outlen);
 }
 
 JS::SmallestEncoding
 JS::FindSmallestEncoding(UTF8Chars utf8)
 {
     JS::SmallestEncoding encoding = JS::SmallestEncoding::ASCII;
-    auto onChar = [&](char16_t c) -> LoopDisposition {
+    auto onChar = [&encoding](char16_t c) -> LoopDisposition {
         if (c >= 0x80) {
             if (c < 0x100) {
                 encoding = JS::SmallestEncoding::Latin1;
             } else {
                 encoding = JS::SmallestEncoding::UTF16;
                 return LoopDisposition::Break;
             }
         }
--- a/js/src/vm/JSAtom.cpp
+++ b/js/src/vm/JSAtom.cpp
@@ -1062,22 +1062,16 @@ js::XDRAtom(XDRState<mode>* xdr, Mutable
 
     MOZ_TRY(xdr->codeUint32(&lengthAndEncoding));
 
     if (mode == XDR_DECODE) {
         length = lengthAndEncoding >> 1;
         latin1 = lengthAndEncoding & 0x1;
     }
 
-    // We need to align the string in the XDR buffer such that we can avoid
-    // non-align loads of 16bits characters.
-    if (!latin1) {
-        MOZ_TRY(xdr->codeAlign(sizeof(char16_t)));
-    }
-
     if (mode == XDR_ENCODE) {
         JS::AutoCheckCannotGC nogc;
         if (latin1) {
             return xdr->codeChars(const_cast<JS::Latin1Char*>(atomp->latin1Chars(nogc)),
                                   length);
         }
         return xdr->codeChars(const_cast<char16_t*>(atomp->twoByteChars(nogc)), length);
     }
@@ -1095,17 +1089,23 @@ js::XDRAtom(XDRState<mode>* xdr, Mutable
             chars = reinterpret_cast<const Latin1Char*>(ptr);
         }
         atom = AtomizeChars(cx, chars, length);
     } else {
 #if MOZ_LITTLE_ENDIAN
         /* Directly access the little endian chars in the XDR buffer. */
         const char16_t* chars = nullptr;
         if (length) {
-            const uint8_t *ptr;
+            // In the |mode == XDR_ENCODE| case above, when |nchars > 0|,
+            // |XDRState::codeChars(char16_t*, size_t nchars)| will align the
+            // buffer.  This code never calls that function, but it must act
+            // *as if* it had, so we must align manually here.
+            MOZ_TRY(xdr->codeAlign(sizeof(char16_t)));
+
+            const uint8_t* ptr;
             size_t nbyte = length * sizeof(char16_t);
             MOZ_TRY(xdr->peekData(&ptr, nbyte));
             MOZ_ASSERT(reinterpret_cast<uintptr_t>(ptr) % sizeof(char16_t) == 0,
                        "non-aligned buffer during JSAtom decoding");
             chars = reinterpret_cast<const char16_t*>(ptr);
         }
         atom = AtomizeChars(cx, chars, length);
 #else
--- a/js/src/vm/Xdr.cpp
+++ b/js/src/vm/Xdr.cpp
@@ -93,16 +93,20 @@ XDRState<mode>::codeChars(Utf8Unit* unit
 
 template<XDRMode mode>
 XDRResult
 XDRState<mode>::codeChars(char16_t* chars, size_t nchars)
 {
     if (nchars == 0) {
         return Ok();
     }
+
+    // Align the buffer to avoid unaligned loads.
+    MOZ_TRY(codeAlign(sizeof(char16_t)));
+
     size_t nbytes = nchars * sizeof(char16_t);
     if (mode == XDR_ENCODE) {
         uint8_t* ptr = buf.write(nbytes);
         if (!ptr) {
             return fail(JS::TranscodeResult_Throw);
         }
         mozilla::NativeEndian::copyAndSwapToLittleEndian(ptr, chars, nchars);
     } else {
--- a/js/src/vm/Xdr.h
+++ b/js/src/vm/Xdr.h
@@ -487,16 +487,18 @@ class XDRState : public XDRCoderBase
             *sp = reinterpret_cast<const char*>(ptr);
         }
         return Ok();
     }
 
     XDRResult codeChars(JS::Latin1Char* chars, size_t nchars);
     XDRResult codeChars(mozilla::Utf8Unit* units, size_t nchars);
 
+    // If |nchars > 0|, this calls |codeAlign(sizeof(char16_t))| so callers
+    // don't have to.
     XDRResult codeChars(char16_t* chars, size_t nchars);
 
     XDRResult codeFunction(JS::MutableHandleFunction objp,
                            HandleScriptSourceObject sourceObject = nullptr);
     XDRResult codeScript(MutableHandleScript scriptp);
 };
 
 using XDREncoder = XDRState<XDR_ENCODE>;
--- a/layout/build/nsLayoutStatics.cpp
+++ b/layout/build/nsLayoutStatics.cpp
@@ -74,17 +74,16 @@
 #include "nsXULContentUtils.h"
 #include "nsXULPrototypeCache.h"
 #include "nsXULTooltipListener.h"
 
 #include "nsMenuBarListener.h"
 #endif
 
 #include "CubebUtils.h"
-#include "Latency.h"
 #include "WebAudioUtils.h"
 
 #include "nsError.h"
 
 #include "nsJSEnvironment.h"
 #include "nsContentSink.h"
 #include "nsFrameMessageManager.h"
 #include "nsDOMMutationObserver.h"
@@ -216,17 +215,16 @@ nsLayoutStatics::Initialize()
 #endif
 
   rv = nsFocusManager::Init();
   if (NS_FAILED(rv)) {
     NS_ERROR("Could not initialize nsFocusManager");
     return rv;
   }
 
-  AsyncLatencyLogger::InitializeStatics();
   DecoderDoctorLogger::Init();
   MediaManager::StartupInit();
   CubebUtils::InitLibrary();
 
   nsContentSink::InitializeStatics();
   nsHtml5Module::InitializeStatics();
   mozilla::dom::FallbackEncoding::Initialize();
   nsLayoutUtils::Initialize();
@@ -354,17 +352,16 @@ nsLayoutStatics::Shutdown()
   nsGlobalWindowInner::ShutDown();
   nsGlobalWindowOuter::ShutDown();
   WebIDLGlobalNameHash::Shutdown();
   nsListControlFrame::Shutdown();
   nsXBLService::Shutdown();
   FrameLayerBuilder::Shutdown();
 
   CubebUtils::ShutdownLibrary();
-  AsyncLatencyLogger::ShutdownLogger();
   WebAudioUtils::Shutdown();
 
   nsCORSListenerProxy::Shutdown();
 
   PointerEventHandler::ReleaseStatics();
 
   TouchManager::ReleaseStatics();
 
--- a/layout/forms/nsFieldSetFrame.cpp
+++ b/layout/forms/nsFieldSetFrame.cpp
@@ -38,24 +38,41 @@ nsFieldSetFrame::nsFieldSetFrame(Compute
 {
   mLegendSpace  = 0;
 }
 
 nsRect
 nsFieldSetFrame::VisualBorderRectRelativeToSelf() const
 {
   WritingMode wm = GetWritingMode();
-  Side legendSide = wm.PhysicalSide(eLogicalSideBStart);
-  nscoord legendBorder = StyleBorder()->GetComputedBorderWidth(legendSide);
   LogicalRect r(wm, LogicalPoint(wm, 0, 0), GetLogicalSize(wm));
   nsSize containerSize = r.Size(wm).GetPhysicalSize(wm);
-  if (legendBorder < mLegendRect.BSize(wm)) {
-    nscoord off = (mLegendRect.BSize(wm) - legendBorder) / 2;
-    r.BStart(wm) += off;
-    r.BSize(wm) -= off;
+  if (nsIFrame* legend = GetLegend()) {
+    nscoord legendSize = legend->GetLogicalSize(wm).BSize(wm);
+    auto legendMargin = legend->GetLogicalUsedMargin(wm);
+    nscoord legendStartMargin = legendMargin.BStart(wm);
+    nscoord legendEndMargin = legendMargin.BEnd(wm);
+    nscoord border = GetUsedBorder().Side(wm.PhysicalSide(eLogicalSideBStart));
+    // Calculate the offset from the border area block-axis start edge needed to
+    // center-align our border with the legend's border-box (in the block-axis).
+    nscoord off = (legendStartMargin + legendSize / 2) - border / 2;
+    // We don't want to display our border above our border area.
+    if (off > nscoord(0)) {
+      nscoord marginBoxSize = legendStartMargin + legendSize + legendEndMargin;
+      if (marginBoxSize > border) {
+        // We don't want to display our border below the legend's margin-box,
+        // so we align it to the block-axis end if that happens.
+        nscoord overflow = off + border - marginBoxSize;
+        if (overflow > nscoord(0)) {
+          off -= overflow;
+        }
+        r.BStart(wm) += off;
+        r.BSize(wm) -= off;
+      }
+    }
   }
   return r.GetPhysicalRect(wm, containerSize);
 }
 
 nsIFrame*
 nsFieldSetFrame::GetInner() const
 {
   nsIFrame* last = mFrames.LastChild();
@@ -440,30 +457,41 @@ nsFieldSetFrame::Reflow(nsPresContext*  
     const nsSize dummyContainerSize;
     ReflowChild(legend, aPresContext, legendDesiredSize, *legendReflowInput,
                 wm, LogicalPoint(wm), dummyContainerSize,
                 NS_FRAME_NO_MOVE_FRAME, aStatus);
 #ifdef NOISY_REFLOW
     printf("  returned (%d, %d)\n",
            legendDesiredSize.Width(), legendDesiredSize.Height());
 #endif
-    // figure out the legend's rectangle
+    // Calculate the legend's margin-box rectangle.
     legendMargin = legend->GetLogicalUsedMargin(wm);
     mLegendRect =
       LogicalRect(wm, 0, 0,
                   legendDesiredSize.ISize(wm) + legendMargin.IStartEnd(wm),
                   legendDesiredSize.BSize(wm) + legendMargin.BStartEnd(wm));
     nscoord oldSpace = mLegendSpace;
     mLegendSpace = 0;
-    if (mLegendRect.BSize(wm) > border.BStart(wm)) {
-      // center the border on the legend
-      mLegendSpace = mLegendRect.BSize(wm) - border.BStart(wm);
+    nscoord borderBStart = border.BStart(wm);
+    if (mLegendRect.BSize(wm) > borderBStart) {
+      // mLegendSpace is the space to subtract from our content-box size below.
+      mLegendSpace = mLegendRect.BSize(wm) - borderBStart;
     } else {
-      mLegendRect.BStart(wm) =
-        (border.BStart(wm) - mLegendRect.BSize(wm)) / 2;
+      // Calculate the border-box position that would center the legend's
+      // border-box within the fieldset border:
+      nscoord off = (borderBStart - legendDesiredSize.BSize(wm)) / 2;
+      off -= legendMargin.BStart(wm); // convert to a margin-box position
+      if (off > nscoord(0)) {
+        // Align the legend to the end if center-aligning it would overflow.
+        nscoord overflow = off + mLegendRect.BSize(wm) - borderBStart;
+        if (overflow > nscoord(0)) {
+          off -= overflow;
+        }
+        mLegendRect.BStart(wm) += off;
+      }
     }
 
     // if the legend space changes then we need to reflow the
     // content area as well.
     if (mLegendSpace != oldSpace && inner) {
       reflowInner = true;
     }
 
deleted file mode 100644
--- a/layout/reftests/native-theme/403458-winmenu-ltr.xul
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet href="chrome://global/skin/" type="text/css"?>
-
-<window id="window403458"
-        xmlns="http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul">
-  <hbox style="-moz-appearance: menuarrow; direction: ltr" />
-</window>
deleted file mode 100644
--- a/layout/reftests/native-theme/403458-winmenu-rtl.xul
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet href="chrome://global/skin/" type="text/css"?>
-
-<window id="window403458"
-        xmlns="http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul">
-  <hbox style="-moz-appearance: menuarrow; direction: rtl" />
-</window>
--- a/layout/reftests/native-theme/reftest.list
+++ b/layout/reftests/native-theme/reftest.list
@@ -24,19 +24,16 @@ needs-focus == listbox-nonnative-when-st
 fails-if(!nativeThemePref) != radio-native.html radio-nonnative.html
 == radio-still-native-when-styled.html radio-still-native-when-styled-ref.html
 fails-if(!nativeThemePref) != checkbox-native.html checkbox-nonnative.html
 == checkbox-still-native-when-styled.html checkbox-still-native-when-styled-ref.html
 == native-theme-disabled-cascade-levels.html native-theme-disabled-cascade-levels-ref.html
 
 != 427122-1.html 427122-1-ref.html
 
-# 403458 is a Windows-only bug
-skip-if(!winWidget) != 403458-winmenu-ltr.xul 403458-winmenu-rtl.xul
-
 == 470711-1.xul 470711-1-ref.xul
 
 == 482955-1.xul 482955-1-ref.xul
 
 == 492155-1.html about:blank
 == 492155-2.html about:blank
 == 492155-3.html about:blank
 fails-if(Android&&!asyncPan) != 492155-4.html about:blank
--- a/layout/style/ImageLoader.cpp
+++ b/layout/style/ImageLoader.cpp
@@ -160,21 +160,23 @@ ImageLoader::AssociateRequestToFrame(img
         } else {
           // If we don't already have a complete frame, kickoff decode. This
           // will ensure that either onFrameComplete or onLoadComplete will
           // unblock document onload.
 
           // We want to request decode in such a way that avoids triggering
           // sync decode. First, we attempt to convert the aRequest into
           // a imgIContainer. If that succeeds, then aRequest has an image
-          // and we can request decoding for size at zero size, and that will
-          // trigger async decode. If the conversion to imgIContainer is
-          // unsuccessful, then that means aRequest doesn't have an image yet,
-          // which means we can safely call StartDecoding() on it without
-          // triggering any synchronous work.
+          // and we can request decoding for size at zero size, the size will
+          // be ignored because we don't pass the FLAG_HIGH_QUALITY_SCALING
+          // flag and an async decode (because we didn't pass any sync decoding
+          // flags) at the intrinsic size will be requested. If the conversion
+          // to imgIContainer is unsuccessful, then that means aRequest doesn't
+          // have an image yet, which means we can safely call StartDecoding()
+          // on it without triggering any synchronous work.
           nsCOMPtr<imgIContainer> imgContainer;
           aRequest->GetImage(getter_AddRefs(imgContainer));
           if (imgContainer) {
             imgContainer->RequestDecodeForSize(gfx::IntSize(0, 0),
               imgIContainer::DECODE_FLAGS_DEFAULT);
           } else {
             // It's safe to call StartDecoding directly, since it can't
             // trigger synchronous decode without an image. Flags are ignored.
--- a/layout/style/test/test_non_content_accessible_values.html
+++ b/layout/style/test/test_non_content_accessible_values.html
@@ -12,17 +12,108 @@ const NON_CONTENT_ACCESSIBLE_VALUES = {
     "-moz-grid-line",
     "-moz-stack",
     "-moz-inline-stack",
     "-moz-deck",
     "-moz-popup",
     "-moz-groupbox",
   ],
   "-moz-appearance": [
+    "button-arrow-down",
+    "button-arrow-next",
+    "button-arrow-previous",
+    "button-arrow-up",
+    "button-focus",
+    "dualbutton",
+    "groupbox",
+    "menubar",
+    "menuitem",
+    "checkmenuitem",
+    "radiomenuitem",
+    "menuitemtext",
+    "menupopup",
+    "menucheckbox",
+    "menuradio",
+    "menuseparator",
+    "menuarrow",
+    "menuimage",
     "-moz-menulist-button",
+    "checkbox-container",
+    "radio-container",
+    "checkbox-label",
+    "radio-label",
+    "resizerpanel",
+    "resizer",
+    "scrollbar",
+    "scrollbar-small",
+    "scrollbar-horizontal",
+    "scrollbar-vertical",
+    "scrollbarbutton-up",
+    "scrollbarbutton-down",
+    "scrollbarbutton-left",
+    "scrollbarbutton-right",
+    "scrollcorner",
+    "separator",
+    "spinner",
+    "spinner-upbutton",
+    "spinner-downbutton",
+    "spinner-textfield",
+    "splitter",
+    "statusbar",
+    "statusbarpanel",
+    "tab",
+    "tabpanel",
+    "tabpanels",
+    "tab-scroll-arrow-back",
+    "tab-scroll-arrow-forward",
+    "toolbar",
+    "toolbarbutton",
+    "toolbarbutton-dropdown",
+    "toolbargripper",
+    "toolbox",
+    "tooltip",
+    "treeheader",
+    "treeheadercell",
+    "treeheadersortarrow",
+    "treeitem",
+    "treeline",
+    "treetwisty",
+    "treetwistyopen",
+    "treeview",
+    "window",
+    "dialog",
+    "-moz-win-communications-toolbox",
+    "-moz-win-media-toolbox",
+    "-moz-win-browsertabbar-toolbox",
+    "-moz-win-glass",
+    "-moz-win-borderless-glass",
+    "-moz-win-exclude-glass",
+    "-moz-mac-fullscreen-button",
+    "-moz-mac-help-button",
+    "-moz-window-button-box",
+    "-moz-window-button-box-maximized",
+    "-moz-window-button-close",
+    "-moz-window-button-maximize",
+    "-moz-window-button-minimize",
+    "-moz-window-button-restore",
+    "-moz-window-frame-bottom",
+    "-moz-window-frame-left",
+    "-moz-window-frame-right",
+    "-moz-window-titlebar",
+    "-moz-window-titlebar-maximized",
+    "-moz-gtk-info-bar",
+    "-moz-mac-active-source-list-selection",
+    "-moz-mac-disclosure-button-closed",
+    "-moz-mac-disclosure-button-open",
+    "-moz-mac-source-list",
+    "-moz-mac-source-list-selection",
+    "-moz-mac-vibrancy-dark",
+    "-moz-mac-vibrancy-light",
+    "-moz-mac-vibrant-titlebar-dark",
+    "-moz-mac-vibrant-titlebar-light",
   ],
 };
 
 if (!SpecialPowers.getBoolPref("layout.css.xul-box-display-values.content.enabled")) {
   NON_CONTENT_ACCESSIBLE_VALUES.display.push("-moz-box", "-moz-inline-box");
 }
 
 const sheet = document.getElementById("sheet");
--- a/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
+++ b/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
@@ -13,17 +13,16 @@
 
 #include "AudioConduit.h"
 #include "nsCOMPtr.h"
 #include "mozilla/Services.h"
 #include "nsServiceManagerUtils.h"
 #include "nsIPrefService.h"
 #include "nsIPrefBranch.h"
 #include "nsThreadUtils.h"
-#include "Latency.h"
 #include "mozilla/Telemetry.h"
 
 #include "webrtc/modules/audio_processing/include/audio_processing.h"
 #include "webrtc/modules/rtp_rtcp/include/rtp_rtcp.h"
 #include "webrtc/voice_engine/include/voe_errors.h"
 #include "webrtc/voice_engine/voice_engine_impl.h"
 #include "webrtc/system_wrappers/include/clock.h"
 
@@ -700,20 +699,16 @@ WebrtcAudioConduit::SendAudioFrame(const
 
   // if transmission is not started .. conduit cannot insert frames
   if(!mEngineTransmitting)
   {
     CSFLogError(LOGTAG, "%s Engine not transmitting ", __FUNCTION__);
     return kMediaConduitSessionNotInited;
   }
 
-  if (MOZ_LOG_TEST(GetLatencyLog(), LogLevel::Debug)) {
-    struct Processing insert = { TimeStamp::Now(), 0 };
-    mProcessing.AppendElement(insert);
-  }
 
   capture_delay = mCaptureDelay;
   // Insert the samples
   mPtrVoEBase->audio_transport()->PushCaptureData(mChannel, audio_data,
                                                   sizeof(audio_data[0])*8, // bits
                                                   samplingFreqHz,
                                                   channels,
                                                   lengthSamples);
@@ -802,58 +797,29 @@ WebrtcAudioConduit::GetAudioFrame(int16_
                   "A/V sync: sync delta: %dms, audio jitter delay %dms, playout delay %dms",
                   avsync_offset_ms, jitter_buffer_delay_ms, playout_buffer_delay_ms);
     } else {
       CSFLogError(LOGTAG, "A/V sync: GetAVStats failed");
     }
     mLastSyncLog = mSamples;
   }
 
-  if (MOZ_LOG_TEST(GetLatencyLog(), LogLevel::Debug)) {
-    if (mProcessing.Length() > 0) {
-      unsigned int now;
-      mPtrVoEVideoSync->GetPlayoutTimestamp(mChannel, now);
-      if (static_cast<uint32_t>(now) != mLastTimestamp) {
-        mLastTimestamp = static_cast<uint32_t>(now);
-        // Find the block that includes this timestamp in the network input
-        while (mProcessing.Length() > 0) {
-          // FIX! assumes 20ms @ 48000Hz
-          // FIX handle wrap-around
-          if (mProcessing[0].mRTPTimeStamp + 20*(48000/1000) >= now) {
-            TimeDuration t = TimeStamp::Now() - mProcessing[0].mTimeStamp;
-            // Wrap-around?
-            int64_t delta = t.ToMilliseconds() + (now - mProcessing[0].mRTPTimeStamp)/(48000/1000);
-            LogTime(AsyncLatencyLogger::AudioRecvRTP, ((uint64_t) this), delta);
-            break;
-          }
-          mProcessing.RemoveElementAt(0);
-        }
-      }
-    }
-  }
   CSFLogDebug(LOGTAG,"%s GetAudioFrame:Got samples: length %d ",__FUNCTION__,
                                                                lengthSamples);
   return kMediaConduitNoError;
 }
 
 // Transport Layer Callbacks
 MediaConduitErrorCode
 WebrtcAudioConduit::ReceivedRTPPacket(const void *data, int len, uint32_t ssrc)
 {
   CSFLogDebug(LOGTAG,  "%s : channel %d", __FUNCTION__, mChannel);
 
   if(mEngineReceiving)
   {
-    if (MOZ_LOG_TEST(GetLatencyLog(), LogLevel::Debug)) {
-      // timestamp is at 32 bits in ([1])
-      struct Processing insert = { TimeStamp::Now(),
-                                   ntohl(static_cast<const uint32_t *>(data)[1]) };
-      mProcessing.AppendElement(insert);
-    }
-
     // XXX we need to get passed the time the packet was received
     if(mPtrVoENetwork->ReceivedRTPPacket(mChannel, data, len) == -1)
     {
       int error = mPtrVoEBase->LastError();
       CSFLogError(LOGTAG, "%s RTP Processing Error %d", __FUNCTION__, error);
       if(error == VE_RTP_RTCP_MODULE_ERROR)
       {
         return kMediaConduitRTPRTCPModuleError;
@@ -982,26 +948,16 @@ WebrtcAudioConduit::StartReceiving()
 // Called on AudioGUM or MSG thread
 bool
 WebrtcAudioConduit::SendRtp(const uint8_t* data,
                             size_t len,
                             const webrtc::PacketOptions& options)
 {
   CSFLogDebug(LOGTAG,  "%s: len %lu", __FUNCTION__, (unsigned long)len);
 
-  if (MOZ_LOG_TEST(GetLatencyLog(), LogLevel::Debug)) {
-    if (mProcessing.Length() > 0) {
-      TimeStamp started = mProcessing[0].mTimeStamp;
-      mProcessing.RemoveElementAt(0);
-      mProcessing.RemoveElementAt(0); // 20ms packetization!  Could automate this by watching sizes
-      TimeDuration t = TimeStamp::Now() - started;
-      int64_t delta = t.ToMilliseconds();
-      LogTime(AsyncLatencyLogger::AudioSendRTP, ((uint64_t) this), delta);
-    }
-  }
   ReentrantMonitorAutoEnter enter(mTransportMonitor);
   // XXX(pkerr) - the PacketOptions are being ignored. This parameter was added along
   // with the Call API update in the webrtc.org codebase.
   // The only field in it is the packet_id, which is used when the header
   // extension for TransportSequenceNumber is being used, which we don't.
   (void)options;
   if(mTransmitterTransport &&
      (mTransmitterTransport->SendRtpPacket(data, len) == NS_OK))
--- a/media/webrtc/signaling/src/media-conduit/AudioConduit.h
+++ b/media/webrtc/signaling/src/media-conduit/AudioConduit.h
@@ -180,17 +180,16 @@ public:
                       mTransmitterTransport(nullptr),
                       mReceiverTransport(nullptr),
                       mEngineTransmitting(false),
                       mEngineReceiving(false),
                       mChannel(-1),
                       mDtmfEnabled(false),
                       mCodecMutex("AudioConduit codec db"),
                       mCaptureDelay(150),
-                      mLastTimestamp(0),
                       mSamples(0),
                       mLastSyncLog(0)
   {
   }
 
   virtual ~WebrtcAudioConduit();
 
   MediaConduitErrorCode Init();
@@ -334,18 +333,16 @@ private:
   RecvCodecList    mRecvCodecList;
 
   Mutex mCodecMutex; // protects mCurSendCodecConfig
   nsAutoPtr<AudioCodecConfig> mCurSendCodecConfig;
 
   // Current "capture" delay (really output plus input delay)
   int32_t mCaptureDelay;
 
-  uint32_t mLastTimestamp;
-
   webrtc::AudioFrame mAudioFrame; // for output pulls
 
   uint32_t mSamples;
   uint32_t mLastSyncLog;
 
   RtpSourceObserver mRtpSourceObserver;
 };
 
--- a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
+++ b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
@@ -77,18 +77,16 @@ static_assert((WEBRTC_MAX_SAMPLE_RATE / 
 using namespace mozilla;
 using namespace mozilla::dom;
 using namespace mozilla::gfx;
 using namespace mozilla::layers;
 
 mozilla::LazyLogModule gMediaPipelineLog("MediaPipeline");
 
 namespace mozilla {
-extern mozilla::LogModule*
-AudioLogModule();
 
 class VideoConverterListener
 {
 public:
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(VideoConverterListener)
 
   virtual void OnVideoFrameConverted(const webrtc::VideoFrame& aVideoFrame) = 0;
 
@@ -1928,17 +1926,16 @@ public:
     // If the sampling rate is not-supported, we will use 48kHz instead.
     , mRate(static_cast<AudioSessionConduit*>(mConduit.get())
                 ->IsSamplingFreqSupported(mSource->GraphRate())
               ? mSource->GraphRate()
               : WEBRTC_MAX_SAMPLE_RATE)
     , mTaskQueue(
         new TaskQueue(GetMediaThreadPool(MediaThreadType::WEBRTC_DECODER),
                       "AudioPipelineListener"))
-    , mLastLog(0)
   {
     AddTrackToSource(mRate);
   }
 
   // Implement MediaStreamListener
   void NotifyPull(MediaStreamGraph* aGraph,
                   StreamTime aDesiredTime) override
   {
@@ -2026,42 +2023,32 @@ private:
 
       segment.AppendFrames(
         samples.forget(), outputChannels, frames, mPrincipalHandle);
 
       // Handle track not actually added yet or removed/finished
       if (mSource->AppendToTrack(mTrackId, &segment)) {
         framesNeeded -= frames;
         mPlayedTicks += frames;
-        if (MOZ_LOG_TEST(AudioLogModule(), LogLevel::Debug)) {
-          if (mPlayedTicks > mLastLog + mRate) {
-            MOZ_LOG(AudioLogModule(),
-                    LogLevel::Debug,
-                    ("%p: Inserting samples into track %d, total = "
-                     "%" PRIu64,
-                     (void*)this,
-                     mTrackId,
-                     mPlayedTicks));
-            mLastLog = mPlayedTicks;
-          }
-        }
       } else {
         MOZ_LOG(gMediaPipelineLog, LogLevel::Error, ("AppendToTrack failed"));
         // we can't un-read the data, but that's ok since we don't want to
         // buffer - but don't i-loop!
         break;
       }
     }
   }
 
   RefPtr<MediaSessionConduit> mConduit;
+  // This conduit's sampling rate. This is either 16, 32, 44.1 or 48kHz, and
+  // tries to be the same as the graph rate. If the graph rate is higher than
+  // 48kHz, mRate is capped to 48kHz. If mRate does not match the graph rate,
+  // audio is resampled to the graph rate.
   const TrackRate mRate;
   const RefPtr<TaskQueue> mTaskQueue;
-  // Graph's current sampling rate
-  TrackTicks mLastLog = 0; // mPlayedTicks when we last logged
 };
 
 MediaPipelineReceiveAudio::MediaPipelineReceiveAudio(
   const std::string& aPc,
   nsCOMPtr<nsIEventTarget> aMainThread,
   nsCOMPtr<nsIEventTarget> aStsThread,
   RefPtr<AudioSessionConduit> aConduit,
   dom::MediaStreamTrack* aTrack)
--- a/media/webrtc/signaling/test/FakeMediaStreamsImpl.h
+++ b/media/webrtc/signaling/test/FakeMediaStreamsImpl.h
@@ -5,19 +5,16 @@
 #ifndef FAKE_MEDIA_STREAMIMPL_H_
 #define FAKE_MEDIA_STREAMIMPL_H_
 
 #include "FakeMediaStreams.h"
 
 #include "nspr.h"
 #include "nsError.h"
 
-void LogTime(AsyncLatencyLogger::LatencyLogIndex index, uint64_t b, int64_t c) {}
-void LogLatency(AsyncLatencyLogger::LatencyLogIndex index, uint64_t b, int64_t c) {}
-
 static const int AUDIO_BUFFER_SIZE = 1600;
 static const int NUM_CHANNELS      = 2;
 static const int GRAPH_RATE        = 16000;
 
 NS_IMPL_ISUPPORTS0(Fake_DOMMediaStream)
 
 // Fake_MediaStream
 double Fake_MediaStream::StreamTimeToSeconds(mozilla::StreamTime aTime) {
--- a/servo/components/style/values/specified/box.rs
+++ b/servo/components/style/values/specified/box.rs
@@ -961,61 +961,79 @@ pub enum Resize {
 )]
 #[repr(u8)]
 pub enum Appearance {
     /// No appearance at all.
     None,
     /// A typical dialog button.
     Button,
     /// Various arrows that go in buttons
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     ButtonArrowDown,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     ButtonArrowNext,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     ButtonArrowPrevious,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     ButtonArrowUp,
     /// A rectangular button that contains complex content
     /// like images (e.g. HTML <button> elements)
     ButtonBevel,
     /// The focus outline box inside of a button.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     ButtonFocus,
     /// The caret of a text area
     Caret,
     /// A dual toolbar button (e.g., a Back button with a dropdown)
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Dualbutton,
     /// A groupbox.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Groupbox,
     /// A inner-spin button.
     InnerSpinButton,
     /// List boxes.
     Listbox,
     /// A listbox item.
     Listitem,
     /// Menu Bar background
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Menubar,
     /// <menu> and <menuitem> appearances
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Menuitem,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Checkmenuitem,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Radiomenuitem,
     /// For text on non-iconic menuitems only
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Menuitemtext,
     /// A dropdown list.
     Menulist,
     /// The dropdown button(s) that open up a dropdown list.
     MenulistButton,
     /// The text part of a dropdown list, to left of button.
     MenulistText,
     /// An editable textfield with a dropdown list (a combobox).
     MenulistTextfield,
     /// Menu Popup background.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Menupopup,
     /// menu checkbox/radio appearances
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Menucheckbox,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Menuradio,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Menuseparator,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Menuarrow,
     /// An image in the menu gutter, like in bookmarks or history.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Menuimage,
     /// A horizontal meter bar.
     Meterbar,
     /// The meter bar's meter indicator.
     Meterchunk,
     /// The "arrowed" part of the dropdown button that open up a dropdown list.
     #[parse(condition = "in_ua_or_chrome_sheet")]
     MozMenulistButton,
@@ -1030,163 +1048,236 @@ pub enum Appearance {
     /// A vertical progress chunk.
     ProgresschunkVertical,
     /// A checkbox element.
     Checkbox,
     /// A radio element within a radio group.
     Radio,
     /// A generic container that always repaints on state changes. This is a
     /// hack to make XUL checkboxes and radio buttons work.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     CheckboxContainer,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     RadioContainer,
     /// The label part of a checkbox or radio button, used for painting a focus
     /// outline.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     CheckboxLabel,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     RadioLabel,
     /// nsRangeFrame and its subparts
     Range,
     RangeThumb,
     /// The resizer background area in a status bar for the resizer widget in
     /// the corner of a window.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Resizerpanel,
     /// The resizer itself.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Resizer,
     /// A slider.
     ScaleHorizontal,
     ScaleVertical,
     /// A slider's thumb.
     ScalethumbHorizontal,
     ScalethumbVertical,
     /// If the platform supports it, the left/right chunks of the slider thumb.
     Scalethumbstart,
     Scalethumbend,
     /// The ticks for a slider.
     Scalethumbtick,
     /// A scrollbar.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Scrollbar,
     /// A small scrollbar.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     ScrollbarSmall,
     /// The scrollbar slider
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     ScrollbarHorizontal,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     ScrollbarVertical,
     /// A scrollbar button (up/down/left/right).
     /// Keep these in order (some code casts these values to `int` in order to
     /// compare them against each other).
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     ScrollbarbuttonUp,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     ScrollbarbuttonDown,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     ScrollbarbuttonLeft,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     ScrollbarbuttonRight,
     /// The scrollbar thumb.
     ScrollbarthumbHorizontal,
     ScrollbarthumbVertical,
     /// The scrollbar track.
     ScrollbartrackHorizontal,
     ScrollbartrackVertical,
     /// The scroll corner
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Scrollcorner,
     /// A searchfield.
     Searchfield,
     /// A separator.  Can be horizontal or vertical.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Separator,
     /// A spin control (up/down control for time/date pickers).
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Spinner,
     /// The up button of a spin control.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     SpinnerUpbutton,
     /// The down button of a spin control.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     SpinnerDownbutton,
     /// The textfield of a spin control
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     SpinnerTextfield,
     /// A splitter.  Can be horizontal or vertical.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Splitter,
     /// A status bar in a main application window.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Statusbar,
     /// A single pane of a status bar.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Statusbarpanel,
     /// A single tab in a tab widget.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Tab,
     /// A single pane (inside the tabpanels container).
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Tabpanel,
     /// The tab panels container.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Tabpanels,
     /// The tabs scroll arrows (left/right).
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     TabScrollArrowBack,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     TabScrollArrowForward,
     /// A textfield or text area.
     Textfield,
     /// A multiline text field.
     TextfieldMultiline,
     /// A toolbar in an application window.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Toolbar,
     /// A single toolbar button (with no associated dropdown).
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Toolbarbutton,
     /// The dropdown portion of a toolbar button
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     ToolbarbuttonDropdown,
     /// The gripper for a toolbar.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Toolbargripper,
     /// The toolbox that contains the toolbars.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Toolbox,
     /// A tooltip.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Tooltip,
     /// A listbox or tree widget header
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Treeheader,
     /// An individual header cell
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Treeheadercell,
     /// The sort arrow for a header.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Treeheadersortarrow,
     /// A tree item.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Treeitem,
     /// A tree widget branch line
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Treeline,
     /// A tree widget twisty.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Treetwisty,
     /// Open tree widget twisty.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Treetwistyopen,
     /// A tree widget.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Treeview,
     /// Window and dialog backgrounds.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Window,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     Dialog,
 
     /// Vista Rebars.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozWinCommunicationsToolbox,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozWinMediaToolbox,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozWinBrowsertabbarToolbox,
     /// Vista glass.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozWinGlass,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozWinBorderlessGlass,
     /// -moz-apperance style used in setting proper glass margins.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozWinExcludeGlass,
 
     /// Titlebar elements on the Mac.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozMacFullscreenButton,
     /// Mac help button.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozMacHelpButton,
 
     /// Windows themed window frame elements.
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozWindowButtonBox,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozWindowButtonBoxMaximized,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozWindowButtonClose,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozWindowButtonMaximize,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozWindowButtonMinimize,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozWindowButtonRestore,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozWindowFrameBottom,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozWindowFrameLeft,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozWindowFrameRight,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozWindowTitlebar,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozWindowTitlebarMaximized,
 
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozGtkInfoBar,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozMacActiveSourceListSelection,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozMacDisclosureButtonClosed,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozMacDisclosureButtonOpen,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozMacSourceList,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozMacSourceListSelection,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozMacVibrancyDark,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozMacVibrancyLight,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozMacVibrantTitlebarDark,
+    #[parse(condition = "in_ua_or_chrome_sheet")]
     MozMacVibrantTitlebarLight,
 
     /// A non-disappearing scrollbar.
     #[css(skip)]
     ScrollbarNonDisappearing,
 
     /// A themed focus outline (for outline:auto).
     ///
--- a/testing/web-platform/meta/MANIFEST.json
+++ b/testing/web-platform/meta/MANIFEST.json
@@ -185954,16 +185954,28 @@
       [
        "/html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-block-margins-ref.html",
        "=="
       ]
      ],
      {}
     ]
    ],
+   "html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-block-margins-2.html": [
+    [
+     "/html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-block-margins-2.html",
+     [
+      [
+       "/html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-block-margins-2-ref.html",
+       "=="
+      ]
+     ],
+     {}
+    ]
+   ],
    "html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-display-none-rendering.html": [
     [
      "/html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-display-none-rendering.html",
      [
       [
        "/html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-display-none-rendering-ref.html",
        "=="
       ]
@@ -288807,16 +288819,21 @@
      {}
     ]
    ],
    "html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-block-margins-ref.html": [
     [
      {}
     ]
    ],
+   "html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-block-margins-2-ref.html": [
+    [
+     {}
+    ]
+   ],
    "html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-display-none-rendering-ref.html": [
     [
      {}
     ]
    ],
    "html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-display-rendering-ref.html": [
     [
      {}
@@ -608834,16 +608851,24 @@
   "html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-block-margins-ref.html": [
    "f2701d26f3213e02eee9d3592bbeae7da7e7b898",
    "support"
   ],
   "html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-block-margins.html": [
    "98cd16c7c147316669eb6c456538f43ae90fbf44",
    "reftest"
   ],
+  "html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-block-margins-2-ref.html": [
+   "f3975502148a936856a25722a1c80d59322c11f3",
+   "support"
+  ],
+  "html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-block-margins-2.html": [
+   "bfca4b86a6ae4399d4f8ea5d0cfadbb234d79b88",
+   "reftest"
+  ],
   "html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-display-none-rendering-ref.html": [
    "e6eff47e53c7a40e973b7f9dc298af2343f59941",
    "support"
   ],
   "html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-display-none-rendering.html": [
    "abf3c45df71ee6617ddb8b6d402a103f54624820",
    "reftest"
   ],
deleted file mode 100644
--- a/testing/web-platform/meta/background-fetch/abort.https.window.js.ini
+++ /dev/null
@@ -1,7 +0,0 @@
-[abort.https.window.html]
-  [Calling BackgroundFetchRegistration.abort sets the correct fields and responses are still available]
-    expected: FAIL
-
-  [Aborting the same registration twice fails]
-    expected: FAIL
-
--- a/testing/web-platform/meta/content-security-policy/generic/only-valid-whitespaces-are-allowed.html.ini
+++ b/testing/web-platform/meta/content-security-policy/generic/only-valid-whitespaces-are-allowed.html.ini
@@ -1,14 +1,7 @@
 [only-valid-whitespaces-are-allowed.html]
-  expected: TIMEOUT
-  [U+00A0 NBSP  should not be parsed between directive name and value - HTTP header]
-    expected: TIMEOUT
-
   [U+00A0 NBSP  should not be parsed inside directive value - meta tag]
-    expected: TIMEOUT
-
-  [U+00A0 NBSP  should not be parsed between directive name and value - meta tag]
-    expected: TIMEOUT
+    expected: FAIL
 
   [U+00A0 NBSP  should not be parsed inside directive value - HTTP header]
-    expected: TIMEOUT
+    expected: FAIL
 
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/meta/content-security-policy/inheritance/blob-url-self-navigate-inherits.sub.html.ini
@@ -0,0 +1,4 @@
+[blob-url-self-navigate-inherits.sub.html]
+  [Violation report status OK.]
+    expected: FAIL
+
--- a/testing/web-platform/meta/content-security-policy/navigate-to/child-navigates-parent-blocked.sub.html.ini
+++ b/testing/web-platform/meta/content-security-policy/navigate-to/child-navigates-parent-blocked.sub.html.ini
@@ -1,4 +1,10 @@
 [child-navigates-parent-blocked.sub.html]
   [Violation report status OK.]
     expected: FAIL
 
+  [Test that the child can't navigate the parent because the relevant policy belongs to the navigation initiator (in this case the child)]
+    expected: FAIL
+
+  [Test that the child can't navigate the parent because the relevant policy belongs to the navigation initiator (in this case the child which has the policy `navigate-to 'none'`)]
+    expected: FAIL
+
--- a/testing/web-platform/meta/content-security-policy/navigate-to/form-blocked.sub.html.ini
+++ b/testing/web-platform/meta/content-security-policy/navigate-to/form-blocked.sub.html.ini
@@ -1,4 +1,7 @@
 [form-blocked.sub.html]
   [Violation report status OK.]
     expected: FAIL
 
+  [Test that the child iframe navigation is not allowed]
+    expected: FAIL
+
--- a/testing/web-platform/meta/content-security-policy/navigate-to/form-cross-origin-blocked.sub.html.ini
+++ b/testing/web-platform/meta/content-security-policy/navigate-to/form-cross-origin-blocked.sub.html.ini
@@ -1,7 +1,10 @@
 [form-cross-origin-blocked.sub.html]
   [Test that the child iframe navigation is blocked]
     expected: FAIL
 
   [Violation report status OK.]
     expected: FAIL
 
+  [Test that the child iframe navigation is not allowed]
+    expected: FAIL
+
--- a/testing/web-platform/meta/content-security-policy/navigate-to/form-redirected-blocked.sub.html.ini
+++ b/testing/web-platform/meta/content-security-policy/navigate-to/form-redirected-blocked.sub.html.ini
@@ -1,7 +1,10 @@
 [form-redirected-blocked.sub.html]
   [Test that the child iframe navigation is blocked]
     expected: FAIL
 
   [Violation report status OK.]
     expected: FAIL
 
+  [Test that the child iframe navigation is not allowed]
+    expected: FAIL
+
--- a/testing/web-platform/meta/content-security-policy/navigate-to/href-location-blocked.sub.html.ini
+++ b/testing/web-platform/meta/content-security-policy/navigate-to/href-location-blocked.sub.html.ini
@@ -1,4 +1,7 @@
 [href-location-blocked.sub.html]
   [Violation report status OK.]
     expected: FAIL
 
+  [Test that the child iframe navigation is not allowed]
+    expected: FAIL
+
--- a/testing/web-platform/meta/content-security-policy/navigate-to/href-location-cross-origin-blocked.sub.html.ini
+++ b/testing/web-platform/meta/content-security-policy/navigate-to/href-location-cross-origin-blocked.sub.html.ini
@@ -1,7 +1,10 @@
 [href-location-cross-origin-blocked.sub.html]
   [Test that the child iframe navigation is blocked]
     expected: FAIL
 
   [Violation report status OK.]
     expected: FAIL
 
+  [Test that the child iframe navigation is not allowed]
+    expected: FAIL
+
--- a/testing/web-platform/meta/content-security-policy/navigate-to/href-location-redirected-blocked.sub.html.ini
+++ b/testing/web-platform/meta/content-security-policy/navigate-to/href-location-redirected-blocked.sub.html.ini
@@ -1,7 +1,10 @@
 [href-location-redirected-blocked.sub.html]
   [Test that the child iframe navigation is blocked]
     expected: FAIL
 
   [Violation report status OK.]
     expected: FAIL
 
+  [Test that the child iframe navigation is not allowed]
+    expected: FAIL
+
--- a/testing/web-platform/meta/content-security-policy/navigate-to/link-click-blocked.sub.html.ini
+++ b/testing/web-platform/meta/content-security-policy/navigate-to/link-click-blocked.sub.html.ini
@@ -1,4 +1,7 @@
 [link-click-blocked.sub.html]
   [Violation report status OK.]
     expected: FAIL
 
+  [Test that the child iframe navigation is not allowed]
+    expected: FAIL
+
--- a/testing/web-platform/meta/content-security-policy/navigate-to/link-click-cross-origin-blocked.sub.html.ini
+++ b/testing/web-platform/meta/content-security-policy/navigate-to/link-click-cross-origin-blocked.sub.html.ini
@@ -1,7 +1,10 @@
 [link-click-cross-origin-blocked.sub.html]
   [Test that the child iframe navigation is blocked]
     expected: FAIL
 
   [Violation report status OK.]
     expected: FAIL
 
+  [Test that the child iframe navigation is not allowed]
+    expected: FAIL
+
--- a/testing/web-platform/meta/content-security-policy/navigate-to/link-click-redirected-blocked.sub.html.ini
+++ b/testing/web-platform/meta/content-security-policy/navigate-to/link-click-redirected-blocked.sub.html.ini
@@ -1,7 +1,10 @@
 [link-click-redirected-blocked.sub.html]
   [Test that the child iframe navigation is blocked]
     expected: FAIL
 
   [Violation report status OK.]
     expected: FAIL
 
+  [Test that the child iframe navigation is not allowed]
+    expected: FAIL
+
--- a/testing/web-platform/meta/content-security-policy/navigate-to/meta-refresh-blocked.sub.html.ini
+++ b/testing/web-platform/meta/content-security-policy/navigate-to/meta-refresh-blocked.sub.html.ini
@@ -1,4 +1,7 @@
 [meta-refresh-blocked.sub.html]
   [Violation report status OK.]
     expected: FAIL
 
+  [Test that the child iframe navigation is not allowed]
+    expected: FAIL
+
--- a/testing/web-platform/meta/content-security-policy/navigate-to/meta-refresh-cross-origin-blocked.sub.html.ini
+++ b/testing/web-platform/meta/content-security-policy/navigate-to/meta-refresh-cross-origin-blocked.sub.html.ini
@@ -1,7 +1,10 @@
 [meta-refresh-cross-origin-blocked.sub.html]
   [Test that the child iframe navigation is blocked]
     expected: FAIL
 
   [Violation report status OK.]
     expected: FAIL
 
+  [Test that the child iframe navigation is not allowed]
+    expected: FAIL
+
--- a/testing/web-platform/meta/content-security-policy/navigate-to/meta-refresh-redirected-blocked.sub.html.ini
+++ b/testing/web-platform/meta/content-security-policy/navigate-to/meta-refresh-redirected-blocked.sub.html.ini
@@ -1,7 +1,10 @@
 [meta-refresh-redirected-blocked.sub.html]
   [Test that the child iframe navigation is blocked]
     expected: FAIL
 
   [Violation report status OK.]
     expected: FAIL
 
+  [Test that the child iframe navigation is not allowed]
+    expected: FAIL
+
--- a/testing/web-platform/meta/content-security-policy/navigate-to/parent-navigates-child-blocked.html.ini
+++ b/testing/web-platform/meta/content-security-policy/navigate-to/parent-navigates-child-blocked.html.ini
@@ -1,7 +1,10 @@
 [parent-navigates-child-blocked.html]
   [Test that the parent can't navigate the child because the relevant policy belongs to the navigation initiator (in this case the parent)]
     expected: FAIL
 
   [Violation report status OK.]
     expected: FAIL
 
+  [Test that the parent can't navigate the child because the relevant policy belongs to the navigation initiator (in this case the parent, which has the policy `navigate-to support/wait_for_navigation.html;`)]
+    expected: FAIL
+
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/meta/content-security-policy/navigate-to/spv-only-sent-to-initiator.html.ini
@@ -0,0 +1,5 @@
+[spv-only-sent-to-initiator.html]
+  expected: TIMEOUT
+  [Test that no spv event is raised]
+    expected: NOTRUN
+
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/meta/content-security-policy/navigate-to/unsafe-allow-redirects/blocked-end-of-chain.sub.html.ini
@@ -0,0 +1,4 @@
+[blocked-end-of-chain.sub.html]
+  [Test that the child iframe navigation is blocked]
+    expected: FAIL
+
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/meta/content-security-policy/script-src/hash-always-converted-to-utf-8/iso-8859-1.html.ini
@@ -0,0 +1,4 @@
+[iso-8859-1.html]
+  [Should convert the script contents to UTF-8 before hashing]
+    expected: FAIL
+
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/meta/content-security-policy/script-src/hash-always-converted-to-utf-8/iso-8859-3.html.ini
@@ -0,0 +1,4 @@
+[iso-8859-3.html]
+  [Should convert the script contents to UTF-8 before hashing]
+    expected: FAIL
+
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/meta/content-security-policy/script-src/hash-always-converted-to-utf-8/iso-8859-7.html.ini
@@ -0,0 +1,4 @@
+[iso-8859-7.html]
+  [Should convert the script contents to UTF-8 before hashing]
+    expected: FAIL
+
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/meta/content-security-policy/script-src/hash-always-converted-to-utf-8/iso-8859-9.html.ini
@@ -0,0 +1,4 @@
+[iso-8859-9.html]
+  [Should convert the script contents to UTF-8 before hashing]
+    expected: FAIL
+
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/meta/css/css-scoping/host-context-specificity-001.html.ini
@@ -0,0 +1,2 @@
+[host-context-specificity-001.html]
+  expected: FAIL
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/meta/css/css-scoping/host-context-specificity-002.html.ini
@@ -0,0 +1,2 @@
+[host-context-specificity-002.html]
+  expected: FAIL
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/meta/css/css-scoping/host-context-specificity-003.html.ini
@@ -0,0 +1,2 @@
+[host-context-specificity-003.html]
+  expected: FAIL
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/meta/css/css-scroll-anchoring/text-anchor-in-vertical-rl.html.ini
@@ -0,0 +1,4 @@
+[text-anchor-in-vertical-rl.html]
+  [Line at edge of scrollport shouldn't jump visually when content is inserted before]
+    expected: FAIL
+
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/meta/css/css-text/text-indent/text-indent-percentage-002.html.ini
@@ -0,0 +1,2 @@
+[text-indent-percentage-002.html]
+  expected: FAIL
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/meta/css/css-text/text-indent/text-indent-percentage-004.html.ini
@@ -0,0 +1,2 @@
+[text-indent-percentage-004.html]
+  expected: FAIL
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/meta/css/css-text/white-space/control-chars-00D.html.ini
@@ -0,0 +1,2 @@
+[control-chars-00D.html]
+  expected: FAIL
--- a/testing/web-platform/meta/feature-policy/payment-allowed-by-feature-policy-attribute-redirect-on-load.https.sub.html.ini
+++ b/testing/web-platform/meta/feature-policy/payment-allowed-by-feature-policy-attribute-redirect-on-load.https.sub.html.ini
@@ -1,9 +1,11 @@
 [payment-allowed-by-feature-policy-attribute-redirect-on-load.https.sub.html]
+  disabled:
+    if not nightly_build: https://bugzilla.mozilla.org/show_bug.cgi?id=1495301
   [Feature-Policy allow="payment" allows same-origin relocation.]
     expected:
       if not e10s: FAIL
 
   [Feature-Policy allow="payment" disallows cross-origin relocation.]
     expected:
       if not e10s: FAIL
 
--- a/testing/web-platform/meta/feature-policy/payment-allowed-by-feature-policy-attribute.https.sub.html.ini
+++ b/testing/web-platform/meta/feature-policy/payment-allowed-by-feature-policy-attribute.https.sub.html.ini
@@ -1,8 +1,10 @@
 [payment-allowed-by-feature-policy-attribute.https.sub.html]
+  disabled:
+    if not nightly_build: https://bugzilla.mozilla.org/show_bug.cgi?id=1495301
   [Feature policy "payment" can be enabled in same-origin iframe using allow="payment" attribute]
     expected:
       if not e10s: FAIL
 
   [Feature policy "payment" can be enabled in cross-origin iframe using allow="payment" attribute]
     expected: FAIL
 
--- a/testing/web-platform/meta/feature-policy/payment-allowed-by-feature-policy.https.sub.html.ini
+++ b/testing/web-platform/meta/feature-policy/payment-allowed-by-feature-policy.https.sub.html.ini
@@ -1,9 +1,11 @@
 [payment-allowed-by-feature-policy.https.sub.html]
+  disabled:
+    if not nightly_build: https://bugzilla.mozilla.org/show_bug.cgi?id=1495301
   [Feature-Policy header {"payment" : ["*"\]} allows the top-level document.]
     expected:
       if not e10s: FAIL
 
   [Feature-Policy header {"payment" : ["*"\]} allows same-origin iframes.]
     expected:
       if not e10s: FAIL
 
--- a/testing/web-platform/meta/feature-policy/payment-default-feature-policy.https.sub.html.ini
+++ b/testing/web-platform/meta/feature-policy/payment-default-feature-policy.https.sub.html.ini
@@ -1,9 +1,11 @@
 [payment-default-feature-policy.https.sub.html]
+  disabled:
+    if not nightly_build: https://bugzilla.mozilla.org/show_bug.cgi?id=1495301
   [Default "payment" feature policy ["self"\] allows the top-level document.]
     expected:
       if not e10s: FAIL
 
   [Default "payment" feature policy ["self"\] allows same-origin iframes.]
     expected:
       if not e10s: FAIL
 
--- a/testing/web-platform/meta/feature-policy/payment-disabled-by-feature-policy.https.sub.html.ini
+++ b/testing/web-platform/meta/feature-policy/payment-disabled-by-feature-policy.https.sub.html.ini
@@ -1,5 +1,7 @@
 [payment-disabled-by-feature-policy.https.sub.html]
+  disabled:
+    if not nightly_build: https://bugzilla.mozilla.org/show_bug.cgi?id=1495301
   [Feature-Policy header {"payment" : [\]} disallows the top-level document.]
     expected:
       if not e10s: FAIL
 
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/meta/feature-policy/reporting/encrypted-media-reporting.https.html.ini
@@ -0,0 +1,4 @@
+[encrypted-media-reporting.https.html]
+  [Encrypted Media report format]
+    expected: FAIL
+
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/meta/feature-policy/reporting/picture-in-picture-reporting.html.ini
@@ -0,0 +1,4 @@
+[picture-in-picture-reporting.html]
+  [Picture-in-Picture Report Format]
+    expected: FAIL
+
deleted file mode 100644
--- a/testing/web-platform/meta/html/rendering/non-replaced-elements/the-fieldset-and-legend-elements/legend-block-margins.html.ini
+++ /dev/null
@@ -1,2 +0,0 @@
-[legend-block-margins.html]
-  expected: FAIL
--- a/testing/web-platform/meta/html/semantics/scripting-1/the-script-element/module/dynamic-import/string-compilation-integrity-classic.sub.html.ini
+++ b/testing/web-platform/meta/html/semantics/scripting-1/the-script-element/module/dynamic-import/string-compilation-integrity-classic.sub.html.ini
@@ -1,2 +1,20 @@
 [string-compilation-integrity-classic.sub.html]
-  expected: TIMEOUT
+  expected:
+    if debug: ERROR
+    if asan: ERROR
+    TIMEOUT
+  [setTimeout should fail to import]
+    expected: TIMEOUT
+
+  [the Function constructor should fail to import]
+    expected: NOTRUN
+
+  [eval should fail to import]
+    expected: FAIL
+
+  [reflected inline event handlers should fail to import]
+    expected: NOTRUN
+
+  [inline event handlers triggered via UA code should fail to import]
+    expected: NOTRUN
+
--- a/testing/web-platform/meta/html/semantics/scripting-1/the-script-element/module/dynamic-import/string-compilation-integrity-module.sub.html.ini
+++ b/testing/web-platform/meta/html/semantics/scripting-1/the-script-element/module/dynamic-import/string-compilation-integrity-module.sub.html.ini
@@ -1,2 +1,20 @@
 [string-compilation-integrity-module.sub.html]
-  expected: TIMEOUT
+  expected:
+    if debug: ERROR
+    if asan: ERROR
+    TIMEOUT
+  [setTimeout should fail to import]
+    expected: TIMEOUT
+
+  [the Function constructor should fail to import]
+    expected: NOTRUN
+
+  [eval should fail to import]
+    expected: FAIL
+
+  [reflected inline event handlers should fail to import]
+    expected: NOTRUN
+
+  [inline event handlers triggered via UA code should fail to import]
+    expected: NOTRUN
+
--- a/testing/web-platform/meta/mozilla-sync
+++ b/testing/web-platform/meta/mozilla-sync
@@ -1,2 +1,2 @@
-local: 728439c5ecb2ba750f2aed0f7cc13f9854c37313
-upstream: edee96b6462be67473a6263f7afca11607aac50f
+local: 1e4ae874837c1012d1d4c982008196cc3bb73e17
+upstream: f6bca7b6218f591edc1bcb87c9ab0837ca41970b
--- a/testing/web-platform/meta/payment-request/__dir__.ini
+++ b/testing/web-platform/meta/payment-request/__dir__.ini
@@ -1,1 +1,3 @@
 prefs: [dom.payments.request.enabled:true]
+disabled:
+  if not nightly_build: https://bugzilla.mozilla.org/show_bug.cgi?id=1495301
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/meta/permissions/test-background-fetch-permission.html.ini
@@ -0,0 +1,7 @@
+[test-background-fetch-permission.html]
+  [Test background-fetch permission]
+    expected: FAIL
+
+  [Test Background Fetch Permission.]
+    expected: FAIL
+
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/meta/picture-in-picture/request-picture-in-picture-twice.html.ini
@@ -0,0 +1,4 @@
+[request-picture-in-picture-twice.html]
+  [request Picture-in-Picture consumes user gesture]
+    expected: FAIL
+
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/meta/webrtc/RTCPeerConnection-remote-track-mute.https.html.ini
@@ -0,0 +1,11 @@
+[RTCPeerConnection-remote-track-mute.https.html]
+  expected: TIMEOUT
+  [Changing transceiver direction to 'sendrecv' unmutes the remote track]
+    expected: NOTRUN
+
+  [pc.close() mutes remote tracks]
+    expected: NOTRUN
+
+  [Changing transceiver direction to 'inactive' mutes the remote track]
+    expected: TIMEOUT
+
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/meta/webstorage/event_initstorageevent.html.ini
@@ -0,0 +1,10 @@
+[event_initstorageevent.html]
+  [initStorageEvent with 8 undefined arguments]
+    expected: FAIL
+
+  [initStorageEvent with 8 null arguments]
+    expected: FAIL
+
+  [initStorageEvent with 1 argument]
+    expected: FAIL
+
--- a/testing/web-platform/tests/.travis.yml
+++ b/testing/web-platform/tests/.travis.yml
@@ -1,95 +1,100 @@
 dist: trusty
 sudo: required
 language: python
 branches:
   only:
     - master
-addons:
-  hosts:
-    - web-platform.test
-    - www.web-platform.test
-    - www1.web-platform.test
-    - www2.web-platform.test
-    - xn--n8j6ds53lwwkrqhv28a.web-platform.test
-    - xn--lve-6lad.web-platform.test
 before_install:
   # This needs be sourced as it sets various env vars
   - . ./tools/ci/before_install.sh
 install:
   - ./tools/ci/install.sh
 matrix:
+  # The use of `if` conditionals to exclude jobs from master should align with
+  # jobs unconditionally listed by `./wpt test-jobs`, regardless of affected
+  # paths. (The reverse is not true, as the manifest job could run on PRs too.)
   fast_finish: true
   include:
     - name: "tag master + upload manifest"
+      if: type = push AND branch = master
       os: linux
       python: "2.7"
       env:
         - JOB=manifest_upload SCRIPT=tools/ci/ci_manifest.sh
         - secure: "FrlMkMZiwggnhJbLiLxZ4imtXxuzFNozty94g1mneMPEVLrnyhb6c/g2SwN37KKU0WSDlGTz26IYnFvo1ftfSOx+sjRz0HqwW7JnrXULKYo7jiPttIcmeJxlSVeW9yS4blbLaBakytHjSnsf+za7bAaf1aS7RRAtAINgifA6Chg="
       deploy:
         provider: releases
         api_key:
           secure: "EljDx50oNpDLs7rzwIv+z1PxIgB5KMnx1W0OQkpNvltR0rBW9g/aQaE+Z/c8M/sPqN1bkvKPybKzGKjb6j9Dw3/EJhah4SskH78r3yMAe2DU/ngxqqjjfXcCc2t5MKxzHAILTAxqScPj2z+lG1jeK1Z+K5hTbSP9lk+AvS0D16w="
         file: $WPT_MANIFEST_FILE.gz
         skip_cleanup: true
     - name: "lint"
+      # lint is run both on master and on PRs
       os: linux
       python: "2.7"
       env: JOB=lint SCRIPT=tools/ci/ci_lint.sh
     - name: "update-built-tests.sh"
+      if: type = pull_request
       os: linux
       python: "2.7"
       env: JOB=update_built SCRIPT=tools/ci/ci_built_diff.sh
     - name: "build-css-testsuites.sh"
+      if: type = pull_request
       os: linux
       python: "2.7"
       env: JOB=build_css SCRIPT=css/build-css-testsuites.sh
     - name: "stability (Firefox Nightly)"
+      if: type = pull_request
       os: linux
       python: "2.7"
       addons:
         apt:
           packages:
             - libnss3-tools
       env:
         - JOB=stability SCRIPT=tools/ci/ci_stability.sh PRODUCT=firefox:nightly
     - name: "stability (Chrome Dev)"
+      if: type = pull_request
       os: linux
-      sudo: required
       python: "2.7"
       addons:
         apt:
           packages:
             - libappindicator1
             - fonts-liberation
       env:
         - JOB=stability SCRIPT=tools/ci/ci_stability.sh PRODUCT=chrome:dev
     - name: "tools/ unittests (Python 2)"
+      if: type = pull_request
       os: linux
       python: "2.7"
       env: JOB=tools_unittest TOXENV=py27 HYPOTHESIS_PROFILE=ci SCRIPT=tools/ci/ci_tools_unittest.sh
     - name: "tools/ unittests (Python 3)"
+      if: type = pull_request
       os: linux
       python: "3.6"
       env: JOB=tools_unittest TOXENV=py36 HYPOTHESIS_PROFILE=ci SCRIPT=tools/ci/ci_tools_unittest.sh
     - name: "tools/wpt/ unittests"
+      if: type = pull_request
       os: linux
       python: "2.7"
       addons:
         apt:
           packages:
             - libnss3-tools
       env: JOB=wpt_integration TOXENV=py27,py27-flake8 SCRIPT=tools/ci/ci_wpt.sh
     - name: "resources/ tests"
+      if: type = pull_request
       os: linux
       python: "2.7"
       env: JOB=resources_unittest TOXENV=py27 SCRIPT=tools/ci/ci_resources_unittest.sh
     - name: "infrastructure/ tests"
+      if: type = pull_request
       os: linux
       python: "2.7"
       env: JOB=wptrunner_infrastructure SCRIPT=tools/ci/ci_wptrunner_infrastructure.sh
       addons:
         apt:
           packages:
             - libnss3-tools
             - libappindicator1
deleted file mode 100644
--- a/testing/web-platform/tests/background-fetch/abort.https.window.js
+++ /dev/null
@@ -1,45 +0,0 @@
-// META: script=/service-workers/service-worker/resources/test-helpers.sub.js
-// META: script=resources/utils.js
-'use strict';
-
-// Covers basic functionality provided by BackgroundFetchManager.abort().
-// https://wicg.github.io/background-fetch/#background-fetch-registration-abort
-
-backgroundFetchTest(async (test, backgroundFetch) => {
-  const registration = await backgroundFetch.fetch(
-      uniqueId(),
-      ['resources/feature-name.txt', '/serviceworker/resources/slow-response.php']);
-
-  assert_true(await registration.abort());
-  assert_false(await registration.abort());
-
-}, 'Aborting the same registration twice fails');
-
-backgroundFetchTest(async (test, backgroundFetch) => {
-  const registration = await backgroundFetch.fetch(
-      uniqueId(),
-      ['resources/feature-name.txt', '/serviceworker/resources/slow-response.php']);
-  const resultPromise = getMessageFromServiceWorker();
-
-  await new Promise(resolve => {
-    // Run the following steps after the first request is complete.
-    registration.onprogress = async () => {
-      assert_true(await registration.abort());
-
-      const {type, eventRegistration, results} = await resultPromise;
-
-      assert_equals(eventRegistration.result, 'failure');
-      assert_equals(eventRegistration.failureReason, 'aborted');
-
-      assert_equals(type, 'backgroundfetchabort');
-      assert_equals(results.length, 1);
-
-      assert_true(results[0].url.includes('resources/feature-name.txt'));
-      assert_equals(results[0].status, 200);
-      assert_equals(results[0].text, 'Background Fetch');
-
-      resolve();
-    };
-  });
-
-}, 'Calling BackgroundFetchRegistration.abort sets the correct fields and responses are still available');
\ No newline at end of file
--- a/testing/web-platform/tests/background-fetch/get.https.window.js
+++ b/testing/web-platform/tests/background-fetch/get.https.window.js
@@ -21,16 +21,17 @@ promise_test(async test => {
   const registration = await serviceWorkerRegistration.backgroundFetch.get('x');
   assert_equals(registration, undefined);
 
 }, 'BackgroundFetchManager.get() does not require an activated worker');
 
 backgroundFetchTest(async (test, backgroundFetch) => {
   // The |id| parameter to the BackgroundFetchManager.get() method is required.
   await promise_rejects(test, new TypeError(), backgroundFetch.get());
+  await promise_rejects(test, new TypeError(), backgroundFetch.get(''));
 
   const registration = await backgroundFetch.get('my-id');
   assert_equals(registration, undefined);
 
 }, 'Getting non-existing registrations yields `undefined`');
 
 backgroundFetchTest(async (test, backgroundFetch) => {
   const registrationId = uniqueId();
--- a/testing/web-platform/tests/background-fetch/service_workers/sw.js
+++ b/testing/web-platform/tests/background-fetch/service_workers/sw.js
@@ -22,9 +22,8 @@ function handleBackgroundFetchUpdateEven
         const registrationCopy = cloneRegistration(event.registration);
         sendMessageToDocument(
           { type: event.type, eventRegistration: registrationCopy, results })
       }));
 }
 
 self.addEventListener('backgroundfetchsuccess', handleBackgroundFetchUpdateEvent);
 self.addEventListener('backgroundfetchfail', handleBackgroundFetchUpdateEvent);
-self.addEventListener('backgroundfetchabort', handleBackgroundFetchUpdateEvent);
deleted file mode 100644
--- a/testing/web-platform/tests/conformance-checkers/html/elements/input/pattern-asterisk-novalid.html
+++ /dev/null
@@ -1,4 +0,0 @@
-<!DOCTYPE html>
-<meta charset=utf-8>
-<title>invalid pattern</title>
-<p><input pattern='*'></p>
\ No newline at end of file
deleted file mode 100644
--- a/testing/web-platform/tests/conformance-checkers/html/elements/input/pattern-paren-novalid.html
+++ /dev/null
@@ -1,4 +0,0 @@
-<!DOCTYPE html>
-<meta charset=utf-8>
-<title>invalid pattern</title>
-<p><input pattern='('></p>
\ No newline at end of file
--- a/testing/web-platform/tests/conformance-checkers/messages.json
+++ b/testing/web-platform/tests/conformance-checkers/messages.json
@@ -962,18 +962,16 @@
     "html/elements/img/src/userinfo-backslash-novalid.html": "Bad value \u201chttp://a\\b:c\\d@foo.com\u201d for attribute \u201csrc\u201d on element \u201cimg\u201d: Bad URL: Backslash (\"\\\") used as path segment delimiter.",
     "html/elements/img/src/userinfo-password-bad-chars-novalid.html": "Bad value \u201chttp://&a:foo(b]c@d:2/\u201d for attribute \u201csrc\u201d on element \u201cimg\u201d: Bad URL: Illegal character in user or password: \u201c]\u201d is not allowed.",
     "html/elements/img/src/userinfo-password-contains-pile-of-poo-novalid.html": "Bad value \u201chttp://foo:\ud83d\udca9@example.com\u201d for attribute \u201csrc\u201d on element \u201cimg\u201d: Bad URL: Illegal character in user or password: \u201c\ud83d\udca9\u201d is not allowed.",
     "html/elements/img/src/userinfo-username-contains-at-sign-novalid.html": "Bad value \u201chttp://::@c@d:2\u201d for attribute \u201csrc\u201d on element \u201cimg\u201d: Bad URL: User or password contains an at symbol (\"@\") not percent-encoded.",
     "html/elements/img/src/userinfo-username-contains-pile-of-poo-novalid.html": "Bad value \u201chttp://\ud83d\udca9:foo@example.com\u201d for attribute \u201csrc\u201d on element \u201cimg\u201d: Bad URL: Illegal character in user or password: \u201c\ud83d\udca9\u201d is not allowed.",
     "html/elements/img/usemap-bad-value-novalid.html": "Bad value \u201c#\u201d for attribute \u201cusemap\u201d on element \u201cimg\u201d: Bad hash-name reference: A hash-name reference must have at least one character after \u201c#\u201d.",
     "html/elements/img/width-height-negative-novalid.html": "Bad value \u201c-1\u201d for attribute \u201cwidth\u201d on element \u201cimg\u201d: Bad non-negative integer: Expected a digit but saw \u201c-\u201d instead.",
     "html/elements/input/list-novalid.html": "The \u201clist\u201d attribute of the \u201cinput\u201d element must refer to a \u201cdatalist\u201d element.",
-    "html/elements/input/pattern-asterisk-novalid.html": "Bad value \u201c*\u201d for attribute \u201cpattern\u201d on element \u201cinput\u201d: Bad pattern: Dangling meta character '*' near index 0",
-    "html/elements/input/pattern-paren-novalid.html": "Bad value \u201c(\u201d for attribute \u201cpattern\u201d on element \u201cinput\u201d: Bad pattern: Unclosed group near index 1",
     "html/elements/input/type-image-formaction-empty-novalid.html": "Bad value \u201c\u201d for attribute \u201cformaction\u201d on element \u201cinput\u201d: Bad URL: Must be non-empty.",
     "html/elements/input/type-image-formaction-whitespace-only-novalid.html": "Bad value \u201c\t \n\u201d for attribute \u201cformaction\u201d on element \u201cinput\u201d: Bad URL: Must be non-empty.",
     "html/elements/input/type-image-formaction/fragment-backslash-novalid.html": "Bad value \u201c#\\\u201d for attribute \u201cformaction\u201d on element \u201cinput\u201d: Bad URL: Illegal character in fragment: \u201c\\\u201d is not allowed.",
     "html/elements/input/type-image-formaction/fragment-contains-hash-novalid.html": "Bad value \u201chttp://foo/path#f#g\u201d for attribute \u201cformaction\u201d on element \u201cinput\u201d: Bad URL: Illegal character in fragment: \u201c#\u201d is not allowed.",
     "html/elements/input/type-image-formaction/fragment-leading-space-novalid.html": "Bad value \u201chttp://f:21/b# e\u201d for attribute \u201cformaction\u201d on element \u201cinput\u201d: Bad URL: Illegal character in fragment: space is not allowed.",
     "html/elements/input/type-image-formaction/host-cr-novalid.html": "Bad value \u201chttp://example.\norg\u201d for attribute \u201cformaction\u201d on element \u201cinput\u201d: Bad URL: Tab, new line or carriage return found.",
     "html/elements/input/type-image-formaction/host-double-percent-encoded-novalid.html": "Bad value \u201chttp://\uff05\uff14\uff11.com\u201d for attribute \u201cformaction\u201d on element \u201cinput\u201d: Bad URL: Invalid host: Illegal character in domain: \u201c%\u201d is not allowed.",
     "html/elements/input/type-image-formaction/host-double-percent-encoded-percent-encoded-novalid.html": "Bad value \u201chttp://%ef%bc%85%ef%bc%94%ef%bc%91.com\u201d for attribute \u201cformaction\u201d on element \u201cinput\u201d: Bad URL: Invalid host: Illegal character in domain: \u201c%\u201d is not allowed.",
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/content-security-policy/generic/directive-name-case-insensitive.sub.html
@@ -0,0 +1,32 @@
+<!DOCTYPE html>
+<html>
+
+<head>
+    <meta http-equiv="Content-Security-Policy" content="
+      IMg-sRC 'self' 'unsafe-inline' http://{{domains[www1]}}:{{ports[http][0]}};
+      img-src 'self' 'unsafe-inline' http://{{domains[www2]}}:{{ports[http][0]}};">
+    <script src="/resources/testharness.js"></script>
+    <script src="/resources/testharnessreport.js"></script>
+</head>
+
+<body>
+  <script>
+    var t1 = async_test("Test that the www1 image is allowed to load");
+    var t2 = async_test("Test that the www2 image is not allowed to load");
+    var t_spv = async_test("Test that the www2 image throws a violation event");
+    window.addEventListener("securitypolicyviolation", t_spv.step_func_done(function(e) {
+      assert_equals(e.violatedDirective, "img-src");
+      assert_equals(e.blockedURI, "http://{{domains[www2]}}:{{ports[http][0]}}/content-security-policy/support/fail.png");
+    }));
+  </script>
+
+  <img src="http://{{domains[www1]}}:{{ports[http][0]}}/content-security-policy/support/pass.png"
+       onload="t1.done();"
+       onerror="t1.step(function() { assert_unreached('www1 image should have loaded'); t1.done(); });">
+
+  <img src="http://{{domains[www2]}}:{{ports[http][0]}}/content-security-policy/support/fail.png"
+       onerror="t2.done();"
+       onload="t2.step(function() { assert_unreached('www2 image should not have loaded'); t2.done(); });">
+</body>
+
+</html>
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/content-security-policy/inheritance/blob-url-self-navigate-inherits.sub.html
@@ -0,0 +1,23 @@
+<!DOCTYPE html>
+<html>
+
+<head>
+    <script nonce="abc" src="/resources/testharness.js"></script>
+    <script nonce="abc" src="/resources/testharnessreport.js"></script>
+</head>
+
+<!-- This tests that navigating a main window to a local scheme preserves the current CSP.
+     We need to test this in a main window with no parent/opener so we use
+     a link with target=_blank and rel=noopener. -->
+<body>
+    <script>
+      const a = document.createElement("a")
+      a.href = "support/navigate-self-to-blob.html?csp=script-src%20%27nonce-abc%27&report_id={{$id:uuid()}}";
+      a.target = "_blank"
+      a.rel = "noopener"
+      a.click()
+    </script>
+    <script async defer src='../support/checkReport.sub.js?reportField=violated-directive&reportValue=script-src%20%27nonce-abc%27&reportID={{$id}}'></script>
+</body>
+
+</html>
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/content-security-policy/inheritance/support/navigate-self-to-blob.html
@@ -0,0 +1,6 @@
+<script nonce="abc">
+  var blob_string = "<script>alert(document.domain)<\/script>";
+  var blob = new Blob([blob_string], {type : 'text/html'});
+  var url = URL.createObjectURL(blob);
+  location.href=url;
+</script>
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/content-security-policy/inheritance/support/navigate-self-to-blob.html.sub.headers
@@ -0,0 +1,4 @@
+Expires: Mon, 26 Jul 1997 05:00:00 GMT
+Cache-Control: no-store, no-cache, must-revalidate
+Pragma: no-cache
+Content-Security-Policy: {{GET[csp]}}; report-uri http://{{host}}:{{ports[http][0]}}/content-security-policy/support/report.py?op=put&reportID={{GET[report_id]}}
--- a/testing/web-platform/tests/content-security-policy/navigate-to/child-navigates-parent-allowed.html
+++ b/testing/web-platform/tests/content-security-policy/navigate-to/child-navigates-parent-allowed.html
@@ -1,19 +1,18 @@
 <!DOCTYPE html>
 
 <head>
-<meta name="timeout" content="long">
 <script src="/resources/testharness.js"></script>
 <script src="/resources/testharnessreport.js"></script>
 </head>
 
 <body>
 <script>
-  var t = async_test("Test that the child can navigate the parent because the relevant policy belongs to the navigation initiator (in this case the child)");
+  var t = async_test("Test that the child can navigate the parent because the relevant policy belongs to the navigation initiator (in this case the child, which has the policy `navigate-to 'self'`)");
   window.onmessage = t.step_func_done(function(e) {
     assert_equals(e.data.result, 'success');
   });
 </script>
 
 <iframe srcdoc="<iframe src='support/navigate_parent.sub.html?csp=navigate-to%20%27self%27'>">
 
 </body>
--- a/testing/web-platform/tests/content-security-policy/navigate-to/child-navigates-parent-blocked.sub.html
+++ b/testing/web-platform/tests/content-security-policy/navigate-to/child-navigates-parent-blocked.sub.html
@@ -1,12 +1,19 @@
 <!DOCTYPE html>
 
 <head>
 <script src="/resources/testharness.js"></script>
 <script src="/resources/testharnessreport.js"></script>
 </head>
 
 <body>
+<script>
+  var t = async_test("Test that the child can't navigate the parent because the relevant policy belongs to the navigation initiator (in this case the child which has the policy `navigate-to 'none'`)");
+  window.onmessage = t.step_func_done(function(e) {
+    assert_equals(e.data.result, 'fail');
+    assert_equals(e.data.violatedDirective, 'navigate-to');
+  });
+</script>
 <iframe srcdoc="<iframe src='support/navigate_parent.sub.html?csp=navigate-to%20%27none%27&report_id={{$id:uuid()}}'>"></iframe>
 
 <script async defer src='../support/checkReport.sub.js?reportField=violated-directive&reportValue=navigate-to%20%27none%27&reportID={{$id}}'></script>
-</body>
\ No newline at end of file
+</body>
--- a/testing/web-platform/tests/content-security-policy/navigate-to/form-action/form-action-allows-navigate-to-allows.html
+++ b/testing/web-platform/tests/content-security-policy/navigate-to/form-action/form-action-allows-navigate-to-allows.html
@@ -7,10 +7,10 @@
 
 <body>
 <script>
   var t = async_test("Test that form-action overrides navigate-to when present.");
   window.onmessage = t.step_func_done(function(e) {
     assert_equals(e.data.result, 'success');
   });
 </script>
-<iframe src="../support/form_action_navigation.sub.html?csp=navigate-to%20%27self%27%3B%20form-action%20%27self%27%3B&action=post_message_to_frame_owner.html">
+<iframe src="../support/form_action_navigation.sub.html?csp=navigate-to%20%27self%27%3B%20form-action%20%27self%27%3B&action=post_message_to_frame_owner.html&report_id=dummy">
 </body>
\ No newline at end of file
--- a/testing/web-platform/tests/content-security-policy/navigate-to/form-action/form-action-allows-navigate-to-blocks.html
+++ b/testing/web-platform/tests/content-security-policy/navigate-to/form-action/form-action-allows-navigate-to-blocks.html
@@ -7,10 +7,10 @@
 
 <body>
 <script>
   var t = async_test("Test that form-action overrides navigate-to when present.");
   window.onmessage = t.step_func_done(function(e) {
     assert_equals(e.data.result, 'success');
   });
 </script>
-<iframe src="../support/form_action_navigation.sub.html?csp=navigate-to%20%27none%27%3B%20form-action%20%27self%27%3B&action=post_message_to_frame_owner.html">
+<iframe src="../support/form_action_navigation.sub.html?csp=navigate-to%20%27none%27%3B%20form-action%20%27self%27%3B&action=post_message_to_frame_owner.html&report_id=dummy">
 </body>
\ No newline at end of file
--- a/testing/web-platform/tests/content-security-policy/navigate-to/form-action/form-action-blocks-navigate-to-allows.html
+++ b/testing/web-platform/tests/content-security-policy/navigate-to/form-action/form-action-blocks-navigate-to-allows.html
@@ -8,10 +8,10 @@
 <body>
 <script>
   var t = async_test("Test that form-action overrides navigate-to when present.");
   window.onmessage = t.step_func_done(function(e) {
     assert_equals(e.data.result, 'fail');
     assert_equals(e.data.violatedDirective, 'form-action');
   });
 </script>
-<iframe src="../support/form_action_navigation.sub.html?csp=navigate-to%20%27self%27%3B%20form-action%20%27none%27%3B&action=post_message_to_frame_owner.html">
+<iframe src="../support/form_action_navigation.sub.html?csp=navigate-to%20%27self%27%3B%20form-action%20%27none%27%3B&action=post_message_to_frame_owner.html&report_id=dummy">
 </body>
\ No newline at end of file
--- a/testing/web-platform/tests/content-security-policy/navigate-to/form-action/form-action-blocks-navigate-to-blocks.html
+++ b/testing/web-platform/tests/content-security-policy/navigate-to/form-action/form-action-blocks-navigate-to-blocks.html
@@ -8,10 +8,10 @@
 <body>
 <script>
   var t = async_test("Test that form-action overrides navigate-to when present.");
   window.onmessage = t.step_func_done(function(e) {
     assert_equals(e.data.result, 'fail');
     assert_equals(e.data.violatedDirective, 'form-action');
   });
 </script>
-<iframe src="../support/form_action_navigation.sub.html?csp=navigate-to%20%27none%27%3B%20form-action%20%27none%27%3B&action=post_message_to_frame_owner.html">
+<iframe src="../support/form_action_navigation.sub.html?csp=navigate-to%20%27none%27%3B%20form-action%20%27none%27%3B&action=post_message_to_frame_owner.html&report_id=dummy">
 </body>
\ No newline at end of file
--- a/testing/web-platform/tests/content-security-policy/navigate-to/form-blocked.sub.html
+++ b/testing/web-platform/tests/content-security-policy/navigate-to/form-blocked.sub.html
@@ -1,12 +1,19 @@
 <!DOCTYPE html>
 
 <head>
 <script src="/resources/testharness.js"></script>
 <script src="/resources/testharnessreport.js"></script>
 </head>
 
 <body>
+<script>
+  var t = async_test("Test that the child iframe navigation is not allowed");
+  window.onmessage = t.step_func_done(function(e) {
+    assert_equals(e.data.result, 'fail');
+    assert_equals(e.data.violatedDirective, 'navigate-to');
+  });
+</script>
 <iframe src="support/form_action_navigation.sub.html?csp=navigate-to%20%27none%27&report_id={{$id:uuid()}}&action=post_message_to_frame_owner.html"></iframe>
 
 <script async defer src='../support/checkReport.sub.js?reportField=violated-directive&reportValue=navigate-to%20%27none%27&reportID={{$id}}'></script>
 </body>
\ No newline at end of file
--- a/testing/web-platform/tests/content-security-policy/navigate-to/form-cross-origin-blocked.sub.html
+++ b/testing/web-platform/tests/content-security-policy/navigate-to/form-cross-origin-blocked.sub.html
@@ -1,12 +1,19 @@
 <!DOCTYPE html>
 
 <head>
 <script src="/resources/testharness.js"></script>
 <script src="/resources/testharnessreport.js"></script>
 </head>
 
 <body>
+<script>
+  var t = async_test("Test that the child iframe navigation is not allowed");
+  window.onmessage = t.step_func_done(function(e) {
+    assert_equals(e.data.result, 'fail');
+    assert_equals(e.data.violatedDirective, 'navigate-to');
+  });
+</script>
 <iframe src="support/form_action_navigation.sub.html?csp=navigate-to%20%27self%27&report_id={{$id:uuid()}}&action=http%3A%2F%2F{{domains[www1]}}:{{ports[http][0]}}%2Fcontent-security-policy%2Fnavigate-to%2Fsupport%2Fpost_message_to_frame_owner.html"></iframe>
 
 <script async defer src='../support/checkReport.sub.js?reportField=violated-directive&reportValue=navigate-to%20%27self%27&reportID={{$id}}'></script>
 </body>
\ No newline at end of file
--- a/testing/web-platform/tests/content-security-policy/navigate-to/form-redirected-blocked.sub.html
+++ b/testing/web-platform/tests/content-security-policy/navigate-to/form-redirected-blocked.sub.html
@@ -1,12 +1,20 @@
 <!DOCTYPE html>
 
 <head>
 <script src="/resources/testharness.js"></script>
 <script src="/resources/testharnessreport.js"></script>
 </head>
 
 <body>
+<script>
+  var t = async_test("Test that the child iframe navigation is not allowed");
+  window.onmessage = t.step_func_done(function(e) {
+    assert_equals(e.data.result, 'fail');
+    assert_equals(e.data.violatedDirective, 'navigate-to');
+  });
+</script>
+
 <iframe src="support/form_action_navigation.sub.html?csp=navigate-to%20%27self%27&report_id={{$id:uuid()}}&action=redirect_to_post_message_to_frame_owner.py%3Flocation%3Dhttp%3A%2F%2F{{domains[www1]}}%3A{{ports[http][0]}}%2Fcontent-security-policy%2Fnavigate-to%2Fsupport%2Fpost_message_to_frame_owner.html"></iframe>
 
 <script async defer src='../support/checkReport.sub.js?reportField=violated-directive&reportValue=navigate-to%20%27self%27&reportID={{$id}}'></script>
 </body>
\ No newline at end of file
--- a/testing/web-platform/tests/content-security-policy/navigate-to/href-location-blocked.sub.html
+++ b/testing/web-platform/tests/content-security-policy/navigate-to/href-location-blocked.sub.html
@@ -2,13 +2,19 @@
 
 <head>
 <script src="/resources/testharness.js"></script>
 <script src="/resources/testharnessreport.js"></script>
 </head>
 
 <body>
 <script>
+  var t = async_test("Test that the child iframe navigation is not allowed");
+  window.onmessage = t.step_func_done(function(e) {
+    assert_equals(e.data.result, 'fail');
+    assert_equals(e.data.violatedDirective, 'navigate-to');
+  });
+
   window.open("support/href_location_navigation.sub.html?csp=navigate-to%20%27none%27&report_id={{$id:uuid()}}&target=post_message_to_frame_owner.html", "_blank");
 </script>
 
 <script async defer src='../support/checkReport.sub.js?reportField=violated-directive&reportValue=navigate-to%20%27none%27&reportID={{$id}}'></script>
 </body>
\ No newline at end of file
--- a/testing/web-platform/tests/content-security-policy/navigate-to/href-location-cross-origin-blocked.sub.html
+++ b/testing/web-platform/tests/content-security-policy/navigate-to/href-location-cross-origin-blocked.sub.html
@@ -2,13 +2,19 @@
 
 <head>
 <script src="/resources/testharness.js"></script>
 <script src="/resources/testharnessreport.js"></script>
 </head>
 
 <body>
 <script>
+  var t = async_test("Test that the child iframe navigation is not allowed");
+  window.onmessage = t.step_func_done(function(e) {
+    assert_equals(e.data.result, 'fail');
+    assert_equals(e.data.violatedDirective, 'navigate-to');
+  });
+
   window.open("support/href_location_navigation.sub.html?csp=navigate-to%20%27self%27&report_id={{$id:uuid()}}&target=http%3A%2F%2F{{domains[www1]}}:{{ports[http][0]}}%2Fcontent-security-policy%2Fnavigate-to%2Fsupport%2Fpost_message_to_frame_owner.html", "_blank");
 </script>
 
 <script async defer src='../support/checkReport.sub.js?reportField=violated-directive&reportValue=navigate-to%20%27self%27&repo