Merge autoland to central, a=merge
authorWes Kocher <wkocher@mozilla.com>
Thu, 01 Dec 2016 15:07:11 -0800
changeset 324947 9b8bf5feb0b52aa4b03aa5fa3d4f0727b2974663
parent 324911 3853c539a1b7c803f1075d2c3ecefbdd314af1d8 (current diff)
parent 324946 aa1833690db2659c17144b0d5e5b1900fe1a1af0 (diff)
child 325034 b49684127ce464141b0a989cd621cb4794b6a85f
push id24
push usermaklebus@msu.edu
push dateTue, 20 Dec 2016 03:11:33 +0000
reviewersmerge
milestone53.0a1
Merge autoland to central, a=merge
--- a/browser/base/content/urlbarBindings.xml
+++ b/browser/base/content/urlbarBindings.xml
@@ -1254,94 +1254,16 @@ file, You can obtain one at http://mozil
         Cc["@mozilla.org/widget/clipboardhelper;1"]
           .getService(Ci.nsIClipboardHelper)
           .copyStringToClipboard(val, Ci.nsIClipboard.kSelectionClipboard);
       ]]></handler>
     </handlers>
 
   </binding>
 
-  <!-- Note: this binding is applied to the autocomplete popup used in web page content and extended in search.xml for the searchbar. -->
-  <binding id="browser-autocomplete-result-popup" extends="chrome://global/content/bindings/autocomplete.xml#autocomplete-result-popup">
-    <implementation>
-      <field name="AppConstants" readonly="true">
-        (Components.utils.import("resource://gre/modules/AppConstants.jsm", {})).AppConstants;
-      </field>
-
-      <method name="openAutocompletePopup">
-        <parameter name="aInput"/>
-        <parameter name="aElement"/>
-        <body>
-          <![CDATA[
-          // initially the panel is hidden
-          // to avoid impacting startup / new window performance
-          aInput.popup.hidden = false;
-
-          // this method is defined on the base binding
-          this._openAutocompletePopup(aInput, aElement);
-        ]]></body>
-      </method>
-
-      <method name="onPopupClick">
-        <parameter name="aEvent"/>
-        <body><![CDATA[
-          // Ignore all right-clicks
-          if (aEvent.button == 2)
-            return;
-
-          var controller = this.view.QueryInterface(Components.interfaces.nsIAutoCompleteController);
-
-          var searchBar = BrowserSearch.searchBar;
-          var popupForSearchBar = searchBar && searchBar.textbox == this.mInput;
-          if (popupForSearchBar) {
-            searchBar.telemetrySearchDetails = {
-              index: controller.selection.currentIndex,
-              kind: "mouse"
-            };
-          }
-
-          // Check for unmodified left-click, and use default behavior
-          if (aEvent.button == 0 && !aEvent.shiftKey && !aEvent.ctrlKey &&
-              !aEvent.altKey && !aEvent.metaKey) {
-            controller.handleEnter(true, aEvent);
-            return;
-          }
-
-          // Check for middle-click or modified clicks on the search bar
-          if (popupForSearchBar) {
-            // Handle search bar popup clicks
-            var search = controller.getValueAt(this.selectedIndex);
-
-            // close the autocomplete popup and revert the entered search term
-            this.closePopup();
-            controller.handleEscape();
-
-            // open the search results according to the clicking subtlety
-            var where = whereToOpenLink(aEvent, false, true);
-            let params = {};
-
-            // But open ctrl/cmd clicks on autocomplete items in a new background tab.
-            let modifier = this.AppConstants.platform == "macosx" ?
-                           aEvent.metaKey :
-                           aEvent.ctrlKey;
-            if (where == "tab" && (aEvent instanceof MouseEvent) &&
-                (aEvent.button == 1 || modifier))
-              params.inBackground = true;
-
-            searchBar.doSearch(search, where, null, params);
-            if (where == "tab" && params.inBackground)
-              searchBar.focus();
-            else
-              searchBar.value = search;
-          }
-        ]]></body>
-      </method>
-    </implementation>
-  </binding>
-
   <binding id="urlbar-rich-result-popup" extends="chrome://global/content/bindings/autocomplete.xml#autocomplete-rich-result-popup">
 
     <resources>
       <stylesheet src="chrome://browser/content/search/searchbarBindings.css"/>
       <stylesheet src="chrome://browser/skin/searchbar.css"/>
     </resources>
 
     <content ignorekeys="true" level="top" consumeoutsideclicks="never"
--- a/browser/components/search/content/search.xml
+++ b/browser/components/search/content/search.xml
@@ -687,18 +687,18 @@
       -->
       <method name="openPopup">
         <body><![CDATA[
           var popup = this.popup;
           if (!popup.mPopupOpen) {
             // Initially the panel used for the searchbar (PopupSearchAutoComplete
             // in browser.xul) is hidden to avoid impacting startup / new
             // window performance. The base binding's openPopup would normally
-            // call the overriden openAutocompletePopup in urlbarBindings.xml's
-            // browser-autocomplete-result-popup binding to unhide the popup,
+            // call the overriden openAutocompletePopup in
+            // browser-search-autocomplete-result-popup binding to unhide the popup,
             // but since we're overriding openPopup we need to unhide the panel
             // ourselves.
             popup.hidden = false;
 
             // Don't roll up on mouse click in the anchor for the search UI.
             if (popup.id == "PopupSearchAutoComplete") {
               popup.setAttribute("norolluponanchor", "true");
             }
@@ -899,17 +899,17 @@
           document.getBindingParent(this).openSuggestionsPanel();
         }
       ]]>
       </handler>
 
     </handlers>
   </binding>
 
-  <binding id="browser-search-autocomplete-result-popup" extends="chrome://browser/content/urlbarBindings.xml#browser-autocomplete-result-popup">
+  <binding id="browser-search-autocomplete-result-popup" extends="chrome://global/content/bindings/autocomplete.xml#autocomplete-result-popup">
     <resources>
       <stylesheet src="chrome://browser/content/search/searchbarBindings.css"/>
       <stylesheet src="chrome://browser/skin/searchbar.css"/>
     </resources>
     <content ignorekeys="true" level="top" consumeoutsideclicks="never">
       <xul:hbox anonid="searchbar-engine" xbl:inherits="showonlysettings"
                 class="search-panel-header search-panel-current-engine">
         <xul:image class="searchbar-engine-image" xbl:inherits="src"/>
@@ -922,16 +922,88 @@
         <xul:treecols anonid="treecols">
           <xul:treecol id="treecolAutoCompleteValue" class="autocomplete-treecol" flex="1" overflow="true"/>
         </xul:treecols>
         <xul:treechildren class="autocomplete-treebody"/>
       </xul:tree>
       <xul:vbox anonid="search-one-off-buttons" class="search-one-offs"/>
     </content>
     <implementation>
+      <field name="AppConstants" readonly="true">
+        (Components.utils.import("resource://gre/modules/AppConstants.jsm", {})).AppConstants;
+      </field>
+
+      <method name="openAutocompletePopup">
+        <parameter name="aInput"/>
+        <parameter name="aElement"/>
+        <body><![CDATA[
+          // initially the panel is hidden
+          // to avoid impacting startup / new window performance
+          aInput.popup.hidden = false;
+
+          // this method is defined on the base binding
+          this._openAutocompletePopup(aInput, aElement);
+        ]]></body>
+      </method>
+
+      <method name="onPopupClick">
+        <parameter name="aEvent"/>
+        <body><![CDATA[
+          // Ignore all right-clicks
+          if (aEvent.button == 2)
+            return;
+
+          var controller = this.view.QueryInterface(Components.interfaces.nsIAutoCompleteController);
+
+          var searchBar = BrowserSearch.searchBar;
+          var popupForSearchBar = searchBar && searchBar.textbox == this.mInput;
+          if (popupForSearchBar) {
+            searchBar.telemetrySearchDetails = {
+              index: controller.selection.currentIndex,
+              kind: "mouse"
+            };
+          }
+
+          // Check for unmodified left-click, and use default behavior
+          if (aEvent.button == 0 && !aEvent.shiftKey && !aEvent.ctrlKey &&
+              !aEvent.altKey && !aEvent.metaKey) {
+            controller.handleEnter(true, aEvent);
+            return;
+          }
+
+          // Check for middle-click or modified clicks on the search bar
+          if (popupForSearchBar) {
+            // Handle search bar popup clicks
+            var search = controller.getValueAt(this.selectedIndex);
+
+            // close the autocomplete popup and revert the entered search term
+            this.closePopup();
+            controller.handleEscape();
+
+            // open the search results according to the clicking subtlety
+            var where = whereToOpenLink(aEvent, false, true);
+            let params = {};
+
+            // But open ctrl/cmd clicks on autocomplete items in a new background tab.
+            let modifier = this.AppConstants.platform == "macosx" ?
+                           aEvent.metaKey :
+                           aEvent.ctrlKey;
+            if (where == "tab" && (aEvent instanceof MouseEvent) &&
+                (aEvent.button == 1 || modifier))
+              params.inBackground = true;
+
+            searchBar.doSearch(search, where, null, params);
+            if (where == "tab" && params.inBackground)
+              searchBar.focus();
+            else
+              searchBar.value = search;
+          }
+        ]]></body>
+      </method>
+
       <!-- Popup rollup is triggered by native events before the mousedown event
            reaches the DOM. The will be set to true by the popuphiding event and
            false after the mousedown event has been triggered to detect what
            caused rollup. -->
       <field name="_isHiding">false</field>
       <field name="_bundle">null</field>
       <property name="bundle" readonly="true">
         <getter>
--- a/devtools/client/inspector/layout/layout.js
+++ b/devtools/client/inspector/layout/layout.js
@@ -121,25 +121,25 @@ LayoutView.prototype = {
         }
       },
 
     });
 
     let provider = createElement(Provider, {
       store,
       id: "layoutview",
-      title: INSPECTOR_L10N.getStr("inspector.sidebar.layoutViewTitle"),
+      title: INSPECTOR_L10N.getStr("inspector.sidebar.layoutViewTitle2"),
       key: "layoutview",
     }, app);
 
     let defaultTab = Services.prefs.getCharPref("devtools.inspector.activeSidebar");
 
     this.inspector.addSidebarTab(
       "layoutview",
-      INSPECTOR_L10N.getStr("inspector.sidebar.layoutViewTitle"),
+      INSPECTOR_L10N.getStr("inspector.sidebar.layoutViewTitle2"),
       provider,
       defaultTab == "layoutview"
     );
   }),
 
   /**
    * Destruction function called when the inspector is destroyed. Removes event listeners
    * and cleans up references.
--- a/devtools/client/locales/en-US/inspector.properties
+++ b/devtools/client/locales/en-US/inspector.properties
@@ -316,20 +316,20 @@ inspector.sidebar.fontInspectorTitle=Fon
 inspector.sidebar.ruleViewTitle=Rules
 
 # LOCALIZATION NOTE (inspector.sidebar.computedViewTitle):
 # This is the title shown in a tab in the side panel of the Inspector panel
 # that corresponds to the tool displaying the list of computed CSS values
 # used in the page.
 inspector.sidebar.computedViewTitle=Computed
 
-# LOCALIZATION NOTE (inspector.sidebar.computedViewTitle):
+# LOCALIZATION NOTE (inspector.sidebar.layoutViewTitle2):
 # This is the title shown in a tab in the side panel of the Inspector panel
 # that corresponds to the tool displaying layout information defined in the page.
-inspector.sidebar.layoutViewTitle=Layout
+inspector.sidebar.layoutViewTitle2=Layout
 
 # LOCALIZATION NOTE (inspector.sidebar.animationInspectorTitle):
 # This is the title shown in a tab in the side panel of the Inspector panel
 # that corresponds to the tool displaying animations defined in the page.
 inspector.sidebar.animationInspectorTitle=Animations
 
 # LOCALIZATION NOTE (inspector.eyedropper.label): A string displayed as the tooltip of
 # a button in the inspector which toggles the Eyedropper tool
--- a/dom/base/nsContentUtils.cpp
+++ b/dom/base/nsContentUtils.cpp
@@ -3753,16 +3753,26 @@ nsContentUtils::IsPlainTextType(const ns
   return aContentType.EqualsLiteral(TEXT_PLAIN) ||
          aContentType.EqualsLiteral(TEXT_CSS) ||
          aContentType.EqualsLiteral(TEXT_CACHE_MANIFEST) ||
          aContentType.EqualsLiteral(TEXT_VTT) ||
          IsScriptType(aContentType);
 }
 
 bool
+nsContentUtils::IsUtf8OnlyPlainTextType(const nsACString& aContentType)
+{
+  // NOTE: This must be a subset of the list in IsPlainTextType().
+  return aContentType.EqualsLiteral(TEXT_CACHE_MANIFEST) ||
+         aContentType.EqualsLiteral(APPLICATION_JSON) ||
+         aContentType.EqualsLiteral(TEXT_JSON) ||
+         aContentType.EqualsLiteral(TEXT_VTT);
+}
+
+bool
 nsContentUtils::GetWrapperSafeScriptFilename(nsIDocument* aDocument,
                                              nsIURI* aURI,
                                              nsACString& aScriptURI,
                                              nsresult* aRv)
 {
   MOZ_ASSERT(aRv);
   bool scriptFileNameModified = false;
   *aRv = NS_OK;
--- a/dom/base/nsContentUtils.h
+++ b/dom/base/nsContentUtils.h
@@ -1021,26 +1021,32 @@ public:
   static bool IsChromeDoc(nsIDocument *aDocument);
 
   /**
    * Returns true if aDocument is in a docshell whose parent is the same type
    */
   static bool IsChildOfSameType(nsIDocument* aDoc);
 
   /**
-  '* Returns true if the content-type is any of the supported script types.
+   * Returns true if the content-type is any of the supported script types.
    */
   static bool IsScriptType(const nsACString& aContentType);
 
   /**
-  '* Returns true if the content-type will be rendered as plain-text.
+   * Returns true if the content-type will be rendered as plain-text.
    */
   static bool IsPlainTextType(const nsACString& aContentType);
 
   /**
+   * Returns true iff the type is rendered as plain text and doesn't support
+   * non-UTF-8 encodings.
+   */
+  static bool IsUtf8OnlyPlainTextType(const nsACString& aContentType);
+
+  /**
    * Get the script file name to use when compiling the script
    * referenced by aURI. In cases where there's no need for any extra
    * security wrapper automation the script file name that's returned
    * will be the spec in aURI, else it will be the spec in aDocument's
    * URI followed by aURI's spec, separated by " -> ". Returns true
    * if the script file name was modified, false if it's aURI's
    * spec.
    */
--- a/dom/html/nsHTMLDocument.cpp
+++ b/dom/html/nsHTMLDocument.cpp
@@ -545,16 +545,19 @@ nsHTMLDocument::StartDocumentLoad(const 
   bool html = contentType.EqualsLiteral(TEXT_HTML);
   bool xhtml = !html && (contentType.EqualsLiteral(APPLICATION_XHTML_XML) || contentType.EqualsLiteral(APPLICATION_WAPXHTML_XML));
   bool plainText = !html && !xhtml && nsContentUtils::IsPlainTextType(contentType);
   if (!(html || xhtml || plainText || viewSource)) {
     MOZ_ASSERT(false, "Channel with bad content type.");
     return NS_ERROR_INVALID_ARG;
   }
 
+  bool forceUtf8 = plainText &&
+    nsContentUtils::IsUtf8OnlyPlainTextType(contentType);
+
   bool loadAsHtml5 = true;
 
   if (!viewSource && xhtml) {
       // We're parsing XHTML as XML, remember that.
       mType = eXHTML;
       mCompatMode = eCompatibility_FullStandards;
       loadAsHtml5 = false;
   }
@@ -664,17 +667,22 @@ nsHTMLDocument::StartDocumentLoad(const 
     executor = static_cast<nsHtml5TreeOpExecutor*> (mParser->GetContentSink());
     if (mReferrerPolicySet) {
       // CSP may have set the referrer policy, so a speculative parser should
       // start with the new referrer policy.
       executor->SetSpeculationReferrerPolicy(static_cast<ReferrerPolicy>(mReferrerPolicy));
     }
   }
 
-  if (!IsHTMLDocument() || !docShell) { // no docshell for text/html XHR
+  if (forceUtf8) {
+    charsetSource = kCharsetFromUtf8OnlyMime;
+    charset.AssignLiteral("UTF-8");
+    parserCharsetSource = charsetSource;
+    parserCharset = charset;
+  } else if (!IsHTMLDocument() || !docShell) { // no docshell for text/html XHR
     charsetSource = IsHTMLDocument() ? kCharsetFromFallback
                                      : kCharsetFromDocTypeDefault;
     charset.AssignLiteral("UTF-8");
     TryChannelCharset(aChannel, charsetSource, charset, executor);
     parserCharsetSource = charsetSource;
     parserCharset = charset;
   } else {
     NS_ASSERTION(docShell, "Unexpected null value");
@@ -3613,17 +3621,17 @@ nsHTMLDocument::DocAddSizeOfExcludingThi
 
 bool
 nsHTMLDocument::WillIgnoreCharsetOverride()
 {
   if (mType != eHTML) {
     MOZ_ASSERT(mType == eXHTML);
     return true;
   }
-  if (mCharacterSetSource == kCharsetFromByteOrderMark) {
+  if (mCharacterSetSource >= kCharsetFromByteOrderMark) {
     return true;
   }
   if (!EncodingUtils::IsAsciiCompatible(mCharacterSet)) {
     return true;
   }
   nsCOMPtr<nsIWyciwygChannel> wyciwyg = do_QueryInterface(mChannel);
   if (wyciwyg) {
     return true;
new file mode 100644
--- /dev/null
+++ b/dom/html/reftests/741776-1-ref.html
@@ -0,0 +1,1 @@
+<meta charset=utf-8><pre>ää
new file mode 100644
--- /dev/null
+++ b/dom/html/reftests/741776-1.vtt
@@ -0,0 +1,1 @@
+ää
--- a/dom/html/reftests/reftest.list
+++ b/dom/html/reftests/reftest.list
@@ -22,16 +22,17 @@ include toblob-todataurl/reftest.list
 == 573322-no-quirks.html 573322-no-quirks-ref.html
 == 596455-1a.html 596455-ref-1.html
 == 596455-1b.html 596455-ref-1.html
 == 596455-2a.html 596455-ref-2.html
 == 596455-2b.html 596455-ref-2.html
 == 610935.html 610935-ref.html
 == 649134-1.html 649134-ref.html
 skip-if(Android) == 649134-2.html 649134-2-ref.html
+== 741776-1.vtt 741776-1-ref.html
 
 == bug448564-1_malformed.html bug448564-1_well-formed.html
 == bug448564-1_malformed.html bug448564-1_ideal.html
 
 == bug448564-4a.html          bug448564-4b.html
 == bug502168-1_malformed.html bug502168-1_well-formed.html
 
 == responsive-image-load-shortcircuit.html responsive-image-load-shortcircuit-ref.html
--- a/dom/media/AccurateSeekTask.cpp
+++ b/dom/media/AccurateSeekTask.cpp
@@ -35,23 +35,16 @@ AccurateSeekTask::AccurateSeekTask(const
                                    int64_t aCurrentMediaTime)
   : SeekTask(aDecoderID, aThread, aReader, aTarget)
   , mCurrentTimeBeforeSeek(media::TimeUnit::FromMicroseconds(aCurrentMediaTime))
   , mAudioRate(aInfo.mAudio.mRate)
   , mDoneAudioSeeking(!aInfo.HasAudio() || aTarget.IsVideoOnly())
   , mDoneVideoSeeking(!aInfo.HasVideo())
 {
   AssertOwnerThread();
-
-  // Bound the seek time to be inside the media range.
-  NS_ASSERTION(aEnd.ToMicroseconds() != -1, "Should know end time by now");
-  mTarget.SetTime(std::max(media::TimeUnit(), std::min(mTarget.GetTime(), aEnd)));
-
-  // Configure MediaDecoderReaderWrapper.
-  SetCallbacks();
 }
 
 AccurateSeekTask::~AccurateSeekTask()
 {
   AssertOwnerThread();
   MOZ_ASSERT(mIsDiscarded);
 }
 
@@ -60,17 +53,16 @@ AccurateSeekTask::Discard()
 {
   AssertOwnerThread();
 
   // Disconnect MDSM.
   RejectIfExist(NS_ERROR_DOM_MEDIA_CANCELED, __func__);
 
   // Disconnect MediaDecoderReaderWrapper.
   mSeekRequest.DisconnectIfExists();
-  CancelCallbacks();
 
   mIsDiscarded = true;
 }
 
 bool
 AccurateSeekTask::NeedToResetMDSM() const
 {
   AssertOwnerThread();
@@ -107,16 +99,169 @@ AccurateSeekTask::CalculateNewCurrentTim
     const int64_t videoGap = std::abs(videoStart - seekTime);
     return audioGap <= videoGap ? audioStart : videoStart;
   }
 
   MOZ_ASSERT(false, "AccurateSeekTask doesn't handle other seek types.");
   return 0;
 }
 
+void
+AccurateSeekTask::HandleAudioDecoded(MediaData* aAudio)
+{
+  AssertOwnerThread();
+  MOZ_ASSERT(!mSeekTaskPromise.IsEmpty(), "Seek shouldn't be finished");
+
+  RefPtr<MediaData> audio(aAudio);
+  MOZ_ASSERT(audio);
+
+  // The MDSM::mDecodedAudioEndTime will be updated once the whole SeekTask is
+  // resolved.
+
+  SAMPLE_LOG("OnAudioDecoded [%lld,%lld]", audio->mTime, audio->GetEndTime());
+
+  // Video-only seek doesn't reset audio decoder. There might be pending audio
+  // requests when AccurateSeekTask::Seek() begins. We will just store the data
+  // without checking |mDiscontinuity| or calling DropAudioUpToSeekTarget().
+  if (mTarget.IsVideoOnly()) {
+    mSeekedAudioData = audio.forget();
+    return;
+  }
+
+  AdjustFastSeekIfNeeded(audio);
+
+  if (mTarget.IsFast()) {
+    // Non-precise seek; we can stop the seek at the first sample.
+    mSeekedAudioData = audio;
+    mDoneAudioSeeking = true;
+  } else {
+    nsresult rv = DropAudioUpToSeekTarget(audio);
+    if (NS_FAILED(rv)) {
+      RejectIfExist(rv, __func__);
+      return;
+    }
+  }
+
+  if (!mDoneAudioSeeking) {
+    RequestAudioData();
+    return;
+  }
+  MaybeFinishSeek();
+}
+
+void
+AccurateSeekTask::HandleVideoDecoded(MediaData* aVideo, TimeStamp aDecodeStart)
+{
+  AssertOwnerThread();
+  MOZ_ASSERT(!mSeekTaskPromise.IsEmpty(), "Seek shouldn't be finished");
+
+  RefPtr<MediaData> video(aVideo);
+  MOZ_ASSERT(video);
+
+  // The MDSM::mDecodedVideoEndTime will be updated once the whole SeekTask is
+  // resolved.
+
+  SAMPLE_LOG("OnVideoDecoded [%lld,%lld]", video->mTime, video->GetEndTime());
+
+  AdjustFastSeekIfNeeded(video);
+
+  if (mTarget.IsFast()) {
+    // Non-precise seek. We can stop the seek at the first sample.
+    mSeekedVideoData = video;
+    mDoneVideoSeeking = true;
+  } else {
+    nsresult rv = DropVideoUpToSeekTarget(video.get());
+    if (NS_FAILED(rv)) {
+      RejectIfExist(rv, __func__);
+      return;
+    }
+  }
+
+  if (!mDoneVideoSeeking) {
+    RequestVideoData();
+    return;
+  }
+  MaybeFinishSeek();
+}
+
+void
+AccurateSeekTask::HandleNotDecoded(MediaData::Type aType, const MediaResult& aError)
+{
+  AssertOwnerThread();
+  MOZ_ASSERT(!mSeekTaskPromise.IsEmpty(), "Seek shouldn't be finished");
+
+  SAMPLE_LOG("OnNotDecoded type=%d reason=%u", aType, aError.Code());
+
+  // Ignore pending requests from video-only seek.
+  if (aType == MediaData::AUDIO_DATA && mTarget.IsVideoOnly()) {
+    return;
+  }
+
+  // If the decoder is waiting for data, we tell it to call us back when the
+  // data arrives.
+  if (aError == NS_ERROR_DOM_MEDIA_WAITING_FOR_DATA) {
+    mReader->WaitForData(aType);
+    return;
+  }
+
+  if (aError == NS_ERROR_DOM_MEDIA_CANCELED) {
+    if (aType == MediaData::AUDIO_DATA) {
+      RequestAudioData();
+    } else {
+      RequestVideoData();
+    }
+    return;
+  }
+
+  if (aError == NS_ERROR_DOM_MEDIA_END_OF_STREAM) {
+    if (aType == MediaData::AUDIO_DATA) {
+      mIsAudioQueueFinished = true;
+      mDoneAudioSeeking = true;
+    } else {
+      mIsVideoQueueFinished = true;
+      mDoneVideoSeeking = true;
+      if (mFirstVideoFrameAfterSeek) {
+        // Hit the end of stream. Move mFirstVideoFrameAfterSeek into
+        // mSeekedVideoData so we have something to display after seeking.
+        mSeekedVideoData = mFirstVideoFrameAfterSeek.forget();
+      }
+    }
+    MaybeFinishSeek();
+    return;
+  }
+
+  // This is a decode error, delegate to the generic error path.
+  RejectIfExist(aError, __func__);
+}
+
+void
+AccurateSeekTask::HandleAudioWaited(MediaData::Type aType)
+{
+  AssertOwnerThread();
+
+  // Ignore pending requests from video-only seek.
+  if (mTarget.IsVideoOnly()) {
+    return;
+  }
+  RequestAudioData();
+}
+
+void
+AccurateSeekTask::HandleVideoWaited(MediaData::Type aType)
+{
+  AssertOwnerThread();
+  RequestVideoData();
+}
+
+void
+AccurateSeekTask::HandleNotWaited(const WaitForDataRejectValue& aRejection)
+{
+  AssertOwnerThread();
+}
+
 RefPtr<AccurateSeekTask::SeekTaskPromise>
 AccurateSeekTask::Seek(const media::TimeUnit& aDuration)
 {
   AssertOwnerThread();
 
   // Do the seek.
   mSeekRequest.Begin(mReader->Seek(mTarget, aDuration)
     ->Then(OwnerThread(), __func__, this,
@@ -310,195 +455,9 @@ AccurateSeekTask::AdjustFastSeekIfNeeded
     // seek and decode to the seek target. This is not conformant to the
     // spec, fastSeek should always be fast, but until we get the time to
     // change all Readers to seek to the keyframe after the currentTime
     // in this case, we'll just decode forward. Bug 1026330.
     mTarget.SetType(SeekTarget::Accurate);
   }
 }
 
-void
-AccurateSeekTask::OnAudioDecoded(MediaData* aAudioSample)
-{
-  AssertOwnerThread();
-  MOZ_ASSERT(!mSeekTaskPromise.IsEmpty(), "Seek shouldn't be finished");
-
-  RefPtr<MediaData> audio(aAudioSample);
-  MOZ_ASSERT(audio);
-
-  // The MDSM::mDecodedAudioEndTime will be updated once the whole SeekTask is
-  // resolved.
-
-  SAMPLE_LOG("OnAudioDecoded [%lld,%lld]", audio->mTime, audio->GetEndTime());
-
-  // Video-only seek doesn't reset audio decoder. There might be pending audio
-  // requests when AccurateSeekTask::Seek() begins. We will just store the data
-  // without checking |mDiscontinuity| or calling DropAudioUpToSeekTarget().
-  if (mTarget.IsVideoOnly()) {
-    mSeekedAudioData = audio.forget();
-    return;
-  }
-
-  AdjustFastSeekIfNeeded(audio);
-
-  if (mTarget.IsFast()) {
-    // Non-precise seek; we can stop the seek at the first sample.
-    mSeekedAudioData = audio;
-    mDoneAudioSeeking = true;
-  } else {
-    nsresult rv = DropAudioUpToSeekTarget(audio);
-    if (NS_FAILED(rv)) {
-      CancelCallbacks();
-      RejectIfExist(rv, __func__);
-      return;
-    }
-  }
-
-  if (!mDoneAudioSeeking) {
-    RequestAudioData();
-    return;
-  }
-  MaybeFinishSeek();
-}
-
-void
-AccurateSeekTask::OnNotDecoded(MediaData::Type aType,
-                               const MediaResult& aError)
-{
-  AssertOwnerThread();
-  MOZ_ASSERT(!mSeekTaskPromise.IsEmpty(), "Seek shouldn't be finished");
-
-  SAMPLE_LOG("OnNotDecoded type=%d reason=%u", aType, aError.Code());
-
-  // Ignore pending requests from video-only seek.
-  if (aType == MediaData::AUDIO_DATA && mTarget.IsVideoOnly()) {
-    return;
-  }
-
-  // If the decoder is waiting for data, we tell it to call us back when the
-  // data arrives.
-  if (aError == NS_ERROR_DOM_MEDIA_WAITING_FOR_DATA) {
-    mReader->WaitForData(aType);
-    return;
-  }
-
-  if (aError == NS_ERROR_DOM_MEDIA_CANCELED) {
-    if (aType == MediaData::AUDIO_DATA) {
-      RequestAudioData();
-    } else {
-      RequestVideoData();
-    }
-    return;
-  }
-
-  if (aError == NS_ERROR_DOM_MEDIA_END_OF_STREAM) {
-    if (aType == MediaData::AUDIO_DATA) {
-      mIsAudioQueueFinished = true;
-      mDoneAudioSeeking = true;
-    } else {
-      mIsVideoQueueFinished = true;
-      mDoneVideoSeeking = true;
-      if (mFirstVideoFrameAfterSeek) {
-        // Hit the end of stream. Move mFirstVideoFrameAfterSeek into
-        // mSeekedVideoData so we have something to display after seeking.
-        mSeekedVideoData = mFirstVideoFrameAfterSeek.forget();
-      }
-    }
-    MaybeFinishSeek();
-    return;
-  }
-
-  // This is a decode error, delegate to the generic error path.
-  CancelCallbacks();
-  RejectIfExist(aError, __func__);
-}
-
-void
-AccurateSeekTask::OnVideoDecoded(MediaData* aVideoSample)
-{
-  AssertOwnerThread();
-  MOZ_ASSERT(!mSeekTaskPromise.IsEmpty(), "Seek shouldn't be finished");
-
-  RefPtr<MediaData> video(aVideoSample);
-  MOZ_ASSERT(video);
-
-  // The MDSM::mDecodedVideoEndTime will be updated once the whole SeekTask is
-  // resolved.
-
-  SAMPLE_LOG("OnVideoDecoded [%lld,%lld]", video->mTime, video->GetEndTime());
-
-  AdjustFastSeekIfNeeded(video);
-
-  if (mTarget.IsFast()) {
-    // Non-precise seek. We can stop the seek at the first sample.
-    mSeekedVideoData = video;
-    mDoneVideoSeeking = true;
-  } else {
-    nsresult rv = DropVideoUpToSeekTarget(video.get());
-    if (NS_FAILED(rv)) {
-      CancelCallbacks();
-      RejectIfExist(rv, __func__);
-      return;
-    }
-  }
-
-  if (!mDoneVideoSeeking) {
-    RequestVideoData();
-    return;
-  }
-  MaybeFinishSeek();
-}
-
-void
-AccurateSeekTask::SetCallbacks()
-{
-  AssertOwnerThread();
-
-  mAudioCallback = mReader->AudioCallback().Connect(
-    OwnerThread(), [this] (AudioCallbackData aData) {
-    if (aData.is<MediaData*>()) {
-      OnAudioDecoded(aData.as<MediaData*>());
-    } else {
-      OnNotDecoded(MediaData::AUDIO_DATA,
-        aData.as<MediaResult>());
-    }
-  });
-
-  mVideoCallback = mReader->VideoCallback().Connect(
-    OwnerThread(), [this] (VideoCallbackData aData) {
-    typedef Tuple<MediaData*, TimeStamp> Type;
-    if (aData.is<Type>()) {
-      OnVideoDecoded(Get<0>(aData.as<Type>()));
-    } else {
-      OnNotDecoded(MediaData::VIDEO_DATA,
-        aData.as<MediaResult>());
-    }
-  });
-
-  mAudioWaitCallback = mReader->AudioWaitCallback().Connect(
-    OwnerThread(), [this] (WaitCallbackData aData) {
-    // Ignore pending requests from video-only seek.
-    if (mTarget.IsVideoOnly()) {
-      return;
-    }
-    if (aData.is<MediaData::Type>()) {
-      RequestAudioData();
-    }
-  });
-
-  mVideoWaitCallback = mReader->VideoWaitCallback().Connect(
-    OwnerThread(), [this] (WaitCallbackData aData) {
-    if (aData.is<MediaData::Type>()) {
-      RequestVideoData();
-    }
-  });
-}
-
-void
-AccurateSeekTask::CancelCallbacks()
-{
-  AssertOwnerThread();
-  mAudioCallback.DisconnectIfExists();
-  mVideoCallback.DisconnectIfExists();
-  mAudioWaitCallback.DisconnectIfExists();
-  mVideoWaitCallback.DisconnectIfExists();
-}
 } // namespace mozilla
--- a/dom/media/AccurateSeekTask.h
+++ b/dom/media/AccurateSeekTask.h
@@ -26,16 +26,28 @@ public:
   void Discard() override;
 
   RefPtr<SeekTaskPromise> Seek(const media::TimeUnit& aDuration) override;
 
   bool NeedToResetMDSM() const override;
 
   int64_t CalculateNewCurrentTime() const override;
 
+  void HandleAudioDecoded(MediaData* aAudio) override;
+
+  void HandleVideoDecoded(MediaData* aVideo, TimeStamp aDecodeStart) override;
+
+  void HandleNotDecoded(MediaData::Type aType, const MediaResult& aError) override;
+
+  void HandleAudioWaited(MediaData::Type aType) override;
+
+  void HandleVideoWaited(MediaData::Type aType) override;
+
+  void HandleNotWaited(const WaitForDataRejectValue& aRejection) override;
+
 private:
   ~AccurateSeekTask();
 
   void RequestVideoData();
 
   void RequestAudioData();
 
   nsresult DropAudioUpToSeekTarget(MediaData* aSample);
@@ -43,26 +55,16 @@ private:
   nsresult DropVideoUpToSeekTarget(MediaData* aSample);
 
   void MaybeFinishSeek();
 
   void OnSeekResolved(media::TimeUnit);
 
   void OnSeekRejected(nsresult aResult);
 
-  void OnAudioDecoded(MediaData* aAudioSample);
-
-  void OnVideoDecoded(MediaData* aVideoSample);
-
-  void OnNotDecoded(MediaData::Type, const MediaResult&);
-
-  void SetCallbacks();
-
-  void CancelCallbacks();
-
   void AdjustFastSeekIfNeeded(MediaData* aSample);
 
   /*
    * Internal state.
    */
   const media::TimeUnit mCurrentTimeBeforeSeek;
   const uint32_t mAudioRate;  // Audio sample rate.
   bool mDoneAudioSeeking;
@@ -73,18 +75,13 @@ private:
   // the seek target, we will still have a frame that we can display as the
   // last frame in the media.
   RefPtr<MediaData> mFirstVideoFrameAfterSeek;
 
   /*
    * Track the current seek promise made by the reader.
    */
   MozPromiseRequestHolder<MediaDecoderReader::SeekPromise> mSeekRequest;
-
-  MediaEventListener mAudioCallback;
-  MediaEventListener mVideoCallback;
-  MediaEventListener mAudioWaitCallback;
-  MediaEventListener mVideoWaitCallback;
 };
 
 } // namespace mozilla
 
 #endif /* ACCURATE_SEEK_TASK_H */
--- a/dom/media/MediaDecoderStateMachine.cpp
+++ b/dom/media/MediaDecoderStateMachine.cpp
@@ -191,16 +191,20 @@ public:
   virtual void Exit() {};  // Exit action.
   virtual void Step() {}   // Perform a 'cycle' of this state object.
   virtual State GetState() const = 0;
 
   // Event handlers for various events.
   virtual void HandleCDMProxyReady() {}
   virtual void HandleAudioDecoded(MediaData* aAudio) {}
   virtual void HandleVideoDecoded(MediaData* aVideo, TimeStamp aDecodeStart) {}
+  virtual void HandleNotDecoded(MediaData::Type aType, const MediaResult& aError);
+  virtual void HandleAudioWaited(MediaData::Type aType);
+  virtual void HandleVideoWaited(MediaData::Type aType);
+  virtual void HandleNotWaited(const WaitForDataRejectValue& aRejection);
   virtual void HandleEndOfStream() {}
   virtual void HandleWaitingForData() {}
   virtual void HandleAudioCaptured() {}
 
   virtual RefPtr<MediaDecoder::SeekPromise> HandleSeek(SeekTarget aTarget);
 
   virtual RefPtr<ShutdownPromise> HandleShutdown();
 
@@ -751,30 +755,26 @@ class MediaDecoderStateMachine::SeekingS
 {
 public:
   explicit SeekingState(Master* aPtr) : StateObject(aPtr) {}
 
   RefPtr<MediaDecoder::SeekPromise> Enter(SeekJob aSeekJob,
                                           EventVisibility aVisibility)
   {
     mSeekJob = Move(aSeekJob);
-    mVisibility = aVisibility;
 
     // Always switch off the blank decoder otherwise we might become visible
     // in the middle of seeking and won't have a valid video frame to show
     // when seek is done.
     if (mMaster->mVideoDecodeSuspended) {
       mMaster->mVideoDecodeSuspended = false;
       mMaster->mOnPlaybackEvent.Notify(MediaEventType::ExitVideoSuspend);
       Reader()->SetVideoBlankDecode(false);
     }
 
-    // SeekTask will register its callbacks to MediaDecoderReaderWrapper.
-    mMaster->CancelMediaDecoderReaderWrapperCallback();
-
     // Create a new SeekTask instance for the incoming seek task.
     if (mSeekJob.mTarget.IsAccurate() ||
         mSeekJob.mTarget.IsFast()) {
       mSeekTask = new AccurateSeekTask(
         mMaster->mDecoderID, OwnerThread(), Reader(), mSeekJob.mTarget,
         Info(), mMaster->Duration(), mMaster->GetMediaTime());
     } else if (mSeekJob.mTarget.IsNextFrame()) {
       mSeekTask = new NextFrameSeekTask(
@@ -785,24 +785,19 @@ public:
       MOZ_DIAGNOSTIC_ASSERT(false, "Cannot handle this seek task.");
     }
 
     // Don't stop playback for a video-only seek since audio is playing.
     if (!mSeekJob.mTarget.IsVideoOnly()) {
       mMaster->StopPlayback();
     }
 
-    // mSeekJob.mTarget.mTime might be different from
-    // mSeekTask->GetSeekTarget().mTime because the seek task might clamp the
-    // seek target to [0, duration]. We want to update the playback position to
-    // the clamped value.
-    mMaster->UpdatePlaybackPositionInternal(
-      mSeekTask->GetSeekTarget().GetTime().ToMicroseconds());
-
-    if (mVisibility == EventVisibility::Observable) {
+    mMaster->UpdatePlaybackPositionInternal(mSeekJob.mTarget.GetTime().ToMicroseconds());
+
+    if (aVisibility == EventVisibility::Observable) {
       mMaster->mOnPlaybackEvent.Notify(MediaEventType::SeekStarted);
       // We want dormant actions to be transparent to the user.
       // So we only notify the change when the seek request is from the user.
       mMaster->UpdateNextFrameStatus(MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE_SEEKING);
     }
 
     // Reset our state machine and decoding pipeline before seeking.
     if (mSeekTask->NeedToResetMDSM()) {
@@ -826,34 +821,51 @@ public:
     return mSeekJob.mPromise.Ensure(__func__);
   }
 
   void Exit() override
   {
     mSeekTaskRequest.DisconnectIfExists();
     mSeekJob.RejectIfExists(__func__);
     mSeekTask->Discard();
-
-    // Reset the MediaDecoderReaderWrapper's callbask.
-    mMaster->SetMediaDecoderReaderWrapperCallback();
   }
 
   State GetState() const override
   {
     return DECODER_STATE_SEEKING;
   }
 
   void HandleAudioDecoded(MediaData* aAudio) override
   {
-    MOZ_ASSERT(false);
+    mSeekTask->HandleAudioDecoded(aAudio);
   }
 
   void HandleVideoDecoded(MediaData* aVideo, TimeStamp aDecodeStart) override
   {
-    MOZ_ASSERT(false);
+    mSeekTask->HandleVideoDecoded(aVideo, aDecodeStart);
+  }
+
+  void HandleNotDecoded(MediaData::Type aType, const MediaResult& aError) override
+  {
+    mSeekTask->HandleNotDecoded(aType, aError);
+  }
+
+  void HandleAudioWaited(MediaData::Type aType) override
+  {
+    mSeekTask->HandleAudioWaited(aType);
+  }
+
+  void HandleVideoWaited(MediaData::Type aType) override
+  {
+    mSeekTask->HandleVideoWaited(aType);
+  }
+
+  void HandleNotWaited(const WaitForDataRejectValue& aRejection) override
+  {
+    mSeekTask->HandleNotWaited(aRejection);
   }
 
   void HandleVideoSuspendTimeout() override
   {
     // Do nothing since we want a valid video frame to show when seek is done.
   }
 
   void HandleResumeVideoDecoding() override
@@ -903,17 +915,16 @@ private:
     }
 
     mMaster->DecodeError(aValue.mError);
   }
 
   void SeekCompleted();
 
   SeekJob mSeekJob;
-  EventVisibility mVisibility = EventVisibility::Observable;
   MozPromiseRequestHolder<SeekTask::SeekTaskPromise> mSeekTaskRequest;
   RefPtr<SeekTask> mSeekTask;
 };
 
 /**
  * Purpose: stop playback until enough data is decoded to continue playback.
  *
  * Transition to:
@@ -1112,16 +1123,21 @@ public:
     MOZ_DIAGNOSTIC_ASSERT(false, "Shouldn't escape the SHUTDOWN state.");
   }
 
   State GetState() const override
   {
     return DECODER_STATE_SHUTDOWN;
   }
 
+  void HandleNotDecoded(MediaData::Type aType, const MediaResult& aError) override
+  {
+    return;
+  }
+
   RefPtr<MediaDecoder::SeekPromise> HandleSeek(SeekTarget aTarget) override
   {
     MOZ_DIAGNOSTIC_ASSERT(false, "Can't seek in shutdown state.");
     return MediaDecoder::SeekPromise::CreateAndReject(true, __func__);
   }
 
   RefPtr<ShutdownPromise> HandleShutdown() override
   {
@@ -1135,16 +1151,80 @@ public:
   }
 
   void HandleResumeVideoDecoding() override
   {
     MOZ_DIAGNOSTIC_ASSERT(false, "Already shutting down.");
   }
 };
 
+void
+MediaDecoderStateMachine::
+StateObject::HandleNotDecoded(MediaData::Type aType, const MediaResult& aError)
+{
+  bool isAudio = aType == MediaData::AUDIO_DATA;
+  MOZ_ASSERT_IF(!isAudio, aType == MediaData::VIDEO_DATA);
+
+  // If the decoder is waiting for data, we tell it to call us back when the
+  // data arrives.
+  if (aError == NS_ERROR_DOM_MEDIA_WAITING_FOR_DATA) {
+    MOZ_ASSERT(Reader()->IsWaitForDataSupported(),
+               "Readers that send WAITING_FOR_DATA need to implement WaitForData");
+    Reader()->WaitForData(aType);
+    HandleWaitingForData();
+    return;
+  }
+
+  if (aError == NS_ERROR_DOM_MEDIA_CANCELED) {
+    if (isAudio) {
+      mMaster->EnsureAudioDecodeTaskQueued();
+    } else {
+      mMaster->EnsureVideoDecodeTaskQueued();
+    }
+    return;
+  }
+
+  // If this is a decode error, delegate to the generic error path.
+  if (aError != NS_ERROR_DOM_MEDIA_END_OF_STREAM) {
+    mMaster->DecodeError(aError);
+    return;
+  }
+
+  // This is an EOS. Finish off the queue, and then handle things based on our
+  // state.
+  if (isAudio) {
+    AudioQueue().Finish();
+  } else {
+    VideoQueue().Finish();
+  }
+
+  HandleEndOfStream();
+}
+
+void
+MediaDecoderStateMachine::
+StateObject::HandleAudioWaited(MediaData::Type aType)
+{
+  mMaster->EnsureAudioDecodeTaskQueued();
+}
+
+void
+MediaDecoderStateMachine::
+StateObject::HandleVideoWaited(MediaData::Type aType)
+{
+  mMaster->EnsureVideoDecodeTaskQueued();
+}
+
+void
+MediaDecoderStateMachine::
+StateObject::HandleNotWaited(const WaitForDataRejectValue& aRejection)
+{
+
+}
+
 RefPtr<MediaDecoder::SeekPromise>
 MediaDecoderStateMachine::
 StateObject::HandleSeek(SeekTarget aTarget)
 {
   SLOG("Changed state to SEEKING (to %lld)", aTarget.GetTime().ToMicroseconds());
   SeekJob seekJob;
   seekJob.mTarget = aTarget;
   return SetState<SeekingState>(Move(seekJob), EventVisibility::Observable);
@@ -1922,61 +2002,20 @@ MediaDecoderStateMachine::OnVideoPopped(
   DispatchVideoDecodeTaskIfNeeded();
 }
 
 void
 MediaDecoderStateMachine::OnNotDecoded(MediaData::Type aType,
                                        const MediaResult& aError)
 {
   MOZ_ASSERT(OnTaskQueue());
-  MOZ_ASSERT(mState != DECODER_STATE_SEEKING);
 
   SAMPLE_LOG("OnNotDecoded (aType=%u, aError=%u)", aType, aError.Code());
-  bool isAudio = aType == MediaData::AUDIO_DATA;
-  MOZ_ASSERT_IF(!isAudio, aType == MediaData::VIDEO_DATA);
-
-  if (IsShutdown()) {
-    // Already shutdown;
-    return;
-  }
-
-  // If the decoder is waiting for data, we tell it to call us back when the
-  // data arrives.
-  if (aError == NS_ERROR_DOM_MEDIA_WAITING_FOR_DATA) {
-    MOZ_ASSERT(mReader->IsWaitForDataSupported(),
-               "Readers that send WAITING_FOR_DATA need to implement WaitForData");
-    mReader->WaitForData(aType);
-    mStateObj->HandleWaitingForData();
-    return;
-  }
-
-  if (aError == NS_ERROR_DOM_MEDIA_CANCELED) {
-    if (isAudio) {
-      EnsureAudioDecodeTaskQueued();
-    } else {
-      EnsureVideoDecodeTaskQueued();
-    }
-    return;
-  }
-
-  // If this is a decode error, delegate to the generic error path.
-  if (aError != NS_ERROR_DOM_MEDIA_END_OF_STREAM) {
-    DecodeError(aError);
-    return;
-  }
-
-  // This is an EOS. Finish off the queue, and then handle things based on our
-  // state.
-  if (isAudio) {
-    AudioQueue().Finish();
-  } else {
-    VideoQueue().Finish();
-  }
-
-  mStateObj->HandleEndOfStream();
+
+  mStateObj->HandleNotDecoded(aType, aError);
 }
 
 void
 MediaDecoderStateMachine::OnVideoDecoded(MediaData* aVideo,
                                          TimeStamp aDecodeStartTime)
 {
   MOZ_ASSERT(OnTaskQueue());
   MOZ_ASSERT(aVideo);
@@ -1984,16 +2023,39 @@ MediaDecoderStateMachine::OnVideoDecoded
   // Handle abnormal or negative timestamps.
   mDecodedVideoEndTime = std::max(mDecodedVideoEndTime, aVideo->GetEndTime());
 
   SAMPLE_LOG("OnVideoDecoded [%lld,%lld]", aVideo->mTime, aVideo->GetEndTime());
 
   mStateObj->HandleVideoDecoded(aVideo, aDecodeStartTime);
 }
 
+void
+MediaDecoderStateMachine::OnAudioWaited(MediaData::Type aType)
+{
+  MOZ_ASSERT(OnTaskQueue());
+  MOZ_ASSERT(aType == MediaData::AUDIO_DATA);
+  mStateObj->HandleAudioWaited(aType);
+}
+
+void
+MediaDecoderStateMachine::OnVideoWaited(MediaData::Type aType)
+{
+  MOZ_ASSERT(OnTaskQueue());
+  MOZ_ASSERT(aType == MediaData::VIDEO_DATA);
+  mStateObj->HandleVideoWaited(aType);
+}
+
+void
+MediaDecoderStateMachine::OnNotWaited(const WaitForDataRejectValue& aRejection)
+{
+  MOZ_ASSERT(OnTaskQueue());
+  mStateObj->HandleNotWaited(aRejection);
+}
+
 bool
 MediaDecoderStateMachine::IsAudioDecoding()
 {
   MOZ_ASSERT(OnTaskQueue());
   return HasAudio() && !AudioQueue().IsFinished();
 }
 
 bool
@@ -2086,24 +2148,28 @@ MediaDecoderStateMachine::SetMediaDecode
     } else {
       OnNotDecoded(MediaData::VIDEO_DATA, aData.as<MediaResult>());
     }
   });
 
   mAudioWaitCallback = mReader->AudioWaitCallback().Connect(
     mTaskQueue, [this] (WaitCallbackData aData) {
     if (aData.is<MediaData::Type>()) {
-      EnsureAudioDecodeTaskQueued();
+      OnAudioWaited(aData.as<MediaData::Type>());
+    } else {
+      OnNotWaited(aData.as<WaitForDataRejectValue>());
     }
   });
 
   mVideoWaitCallback = mReader->VideoWaitCallback().Connect(
     mTaskQueue, [this] (WaitCallbackData aData) {
     if (aData.is<MediaData::Type>()) {
-      EnsureVideoDecodeTaskQueued();
+      OnVideoWaited(aData.as<MediaData::Type>());
+    } else {
+      OnNotWaited(aData.as<WaitForDataRejectValue>());
     }
   });
 }
 
 void
 MediaDecoderStateMachine::CancelMediaDecoderReaderWrapperCallback()
 {
   MOZ_ASSERT(OnTaskQueue());
--- a/dom/media/MediaDecoderStateMachine.h
+++ b/dom/media/MediaDecoderStateMachine.h
@@ -324,16 +324,19 @@ private:
 
   // Returns true if we're currently playing. The decoder monitor must
   // be held.
   bool IsPlaying() const;
 
   void OnAudioDecoded(MediaData* aAudio);
   void OnVideoDecoded(MediaData* aVideo, TimeStamp aDecodeStartTime);
   void OnNotDecoded(MediaData::Type aType, const MediaResult& aError);
+  void OnAudioWaited(MediaData::Type aType);
+  void OnVideoWaited(MediaData::Type aType);
+  void OnNotWaited(const WaitForDataRejectValue& aRejection);
 
   // Resets all state related to decoding and playback, emptying all buffers
   // and aborting all pending operations on the decode task queue.
   void Reset(TrackSet aTracks = TrackSet(TrackInfo::kAudioTrack,
                                          TrackInfo::kVideoTrack));
 
 protected:
   virtual ~MediaDecoderStateMachine();
--- a/dom/media/NextFrameSeekTask.cpp
+++ b/dom/media/NextFrameSeekTask.cpp
@@ -31,38 +31,32 @@ NextFrameSeekTask::NextFrameSeekTask(con
   : SeekTask(aDecoderID, aThread, aReader, aTarget)
   , mAudioQueue(aAudioQueue)
   , mVideoQueue(aVideoQueue)
   , mCurrentTime(aCurrentTime)
   , mDuration(aDuration)
 {
   AssertOwnerThread();
   MOZ_ASSERT(aInfo.HasVideo());
-
-  // Configure MediaDecoderReaderWrapper.
-  SetCallbacks();
 }
 
 NextFrameSeekTask::~NextFrameSeekTask()
 {
   AssertOwnerThread();
   MOZ_ASSERT(mIsDiscarded);
 }
 
 void
 NextFrameSeekTask::Discard()
 {
   AssertOwnerThread();
 
   // Disconnect MDSM.
   RejectIfExist(NS_ERROR_DOM_MEDIA_CANCELED, __func__);
 
-  // Disconnect MediaDecoderReader.
-  CancelCallbacks();
-
   mIsDiscarded = true;
 }
 
 bool
 NextFrameSeekTask::NeedToResetMDSM() const
 {
   AssertOwnerThread();
   return false;
@@ -73,16 +67,164 @@ NextFrameSeekTask::CalculateNewCurrentTi
 {
   AssertOwnerThread();
 
   // The HTMLMediaElement.currentTime should be updated to the seek target
   // which has been updated to the next frame's time.
   return mTarget.GetTime().ToMicroseconds();
 }
 
+void
+NextFrameSeekTask::HandleAudioDecoded(MediaData* aAudio)
+{
+  AssertOwnerThread();
+  MOZ_ASSERT(aAudio);
+  MOZ_ASSERT(!mSeekTaskPromise.IsEmpty(), "Seek shouldn't be finished");
+
+  // The MDSM::mDecodedAudioEndTime will be updated once the whole SeekTask is
+  // resolved.
+
+  SAMPLE_LOG("OnAudioDecoded [%lld,%lld]", aAudio->mTime, aAudio->GetEndTime());
+
+  // We accept any audio data here.
+  mSeekedAudioData = aAudio;
+
+  MaybeFinishSeek();
+}
+
+void
+NextFrameSeekTask::HandleVideoDecoded(MediaData* aVideo, TimeStamp aDecodeStart)
+{
+  AssertOwnerThread();
+  MOZ_ASSERT(aVideo);
+  MOZ_ASSERT(!mSeekTaskPromise.IsEmpty(), "Seek shouldn't be finished");
+
+  // The MDSM::mDecodedVideoEndTime will be updated once the whole SeekTask is
+  // resolved.
+
+  SAMPLE_LOG("OnVideoDecoded [%lld,%lld]", aVideo->mTime, aVideo->GetEndTime());
+
+  if (aVideo->mTime > mCurrentTime) {
+    mSeekedVideoData = aVideo;
+  }
+
+  if (NeedMoreVideo()) {
+    RequestVideoData();
+    return;
+  }
+
+  MaybeFinishSeek();
+}
+
+void
+NextFrameSeekTask::HandleNotDecoded(MediaData::Type aType, const MediaResult& aError)
+{
+  AssertOwnerThread();
+  switch (aType) {
+  case MediaData::AUDIO_DATA:
+  {
+    MOZ_ASSERT(!mSeekTaskPromise.IsEmpty(), "Seek shouldn't be finished");
+
+    SAMPLE_LOG("OnAudioNotDecoded (aError=%u)", aError.Code());
+
+    // We don't really handle audio deocde error here. Let MDSM to trigger further
+    // audio decoding tasks if it needs to play audio, and MDSM will then receive
+    // the decoding state from MediaDecoderReader.
+
+    MaybeFinishSeek();
+    break;
+  }
+  case MediaData::VIDEO_DATA:
+  {
+    MOZ_ASSERT(!mSeekTaskPromise.IsEmpty(), "Seek shouldn't be finished");
+
+    SAMPLE_LOG("OnVideoNotDecoded (aError=%u)", aError.Code());
+
+    if (aError == NS_ERROR_DOM_MEDIA_END_OF_STREAM) {
+      mIsVideoQueueFinished = true;
+    }
+
+    // Video seek not finished.
+    if (NeedMoreVideo()) {
+      switch (aError.Code()) {
+        case NS_ERROR_DOM_MEDIA_WAITING_FOR_DATA:
+          mReader->WaitForData(MediaData::VIDEO_DATA);
+          break;
+        case NS_ERROR_DOM_MEDIA_CANCELED:
+          RequestVideoData();
+          break;
+        case NS_ERROR_DOM_MEDIA_END_OF_STREAM:
+          MOZ_ASSERT(false, "Shouldn't want more data for ended video.");
+          break;
+        default:
+          // Reject the promise since we can't finish video seek anyway.
+          RejectIfExist(aError, __func__);
+          break;
+      }
+      return;
+    }
+
+    MaybeFinishSeek();
+    break;
+  }
+  default:
+    MOZ_ASSERT_UNREACHABLE("We cannot handle RAW_DATA or NULL_DATA here.");
+  }
+}
+
+void
+NextFrameSeekTask::HandleAudioWaited(MediaData::Type aType)
+{
+  AssertOwnerThread();
+
+  // We don't make an audio decode request here, instead, let MDSM to
+  // trigger further audio decode tasks if MDSM itself needs to play audio.
+  MaybeFinishSeek();
+}
+
+void
+NextFrameSeekTask::HandleVideoWaited(MediaData::Type aType)
+{
+  AssertOwnerThread();
+
+  if (NeedMoreVideo()) {
+    RequestVideoData();
+    return;
+  }
+  MaybeFinishSeek();
+}
+
+void
+NextFrameSeekTask::HandleNotWaited(const WaitForDataRejectValue& aRejection)
+{
+  AssertOwnerThread();
+
+  switch(aRejection.mType) {
+  case MediaData::AUDIO_DATA:
+  {
+    // We don't make an audio decode request here, instead, let MDSM to
+    // trigger further audio decode tasks if MDSM itself needs to play audio.
+    MaybeFinishSeek();
+    break;
+  }
+  case MediaData::VIDEO_DATA:
+  {
+    if (NeedMoreVideo()) {
+      // Reject if we can't finish video seeking.
+      RejectIfExist(NS_ERROR_DOM_MEDIA_CANCELED, __func__);
+      return;
+    }
+    MaybeFinishSeek();
+    break;
+  }
+  default:
+    MOZ_ASSERT_UNREACHABLE("We cannot handle RAW_DATA or NULL_DATA here.");
+  }
+}
+
 /*
  * Remove samples from the queue until aCompare() returns false.
  * aCompare A function object with the signature bool(int64_t) which returns
  *          true for samples that should be removed.
  */
 template <typename Function> static void
 DiscardFrames(MediaQueue<MediaData>& aQueue, const Function& aCompare)
 {
@@ -168,175 +310,16 @@ NextFrameSeekTask::MaybeFinishSeek()
       return aSampleTime < time;
     });
 
     Resolve(__func__); // Call to MDSM::SeekCompleted();
   }
 }
 
 void
-NextFrameSeekTask::OnAudioDecoded(MediaData* aAudioSample)
-{
-  AssertOwnerThread();
-  MOZ_ASSERT(aAudioSample);
-  MOZ_ASSERT(!mSeekTaskPromise.IsEmpty(), "Seek shouldn't be finished");
-
-  // The MDSM::mDecodedAudioEndTime will be updated once the whole SeekTask is
-  // resolved.
-
-  SAMPLE_LOG("OnAudioDecoded [%lld,%lld]",
-             aAudioSample->mTime,
-             aAudioSample->GetEndTime());
-
-  // We accept any audio data here.
-  mSeekedAudioData = aAudioSample;
-
-  MaybeFinishSeek();
-}
-
-void
-NextFrameSeekTask::OnAudioNotDecoded(const MediaResult& aError)
-{
-  AssertOwnerThread();
-  MOZ_ASSERT(!mSeekTaskPromise.IsEmpty(), "Seek shouldn't be finished");
-
-  SAMPLE_LOG("OnAudioNotDecoded (aError=%u)", aError.Code());
-
-  // We don't really handle audio deocde error here. Let MDSM to trigger further
-  // audio decoding tasks if it needs to play audio, and MDSM will then receive
-  // the decoding state from MediaDecoderReader.
-
-  MaybeFinishSeek();
-}
-
-void
-NextFrameSeekTask::OnVideoDecoded(MediaData* aVideoSample)
-{
-  AssertOwnerThread();
-  MOZ_ASSERT(aVideoSample);
-  MOZ_ASSERT(!mSeekTaskPromise.IsEmpty(), "Seek shouldn't be finished");
-
-  // The MDSM::mDecodedVideoEndTime will be updated once the whole SeekTask is
-  // resolved.
-
-  SAMPLE_LOG("OnVideoDecoded [%lld,%lld]",
-             aVideoSample->mTime,
-             aVideoSample->GetEndTime());
-
-  if (aVideoSample->mTime > mCurrentTime) {
-    mSeekedVideoData = aVideoSample;
-  }
-
-  if (NeedMoreVideo()) {
-    RequestVideoData();
-    return;
-  }
-
-  MaybeFinishSeek();
-}
-
-void
-NextFrameSeekTask::OnVideoNotDecoded(const MediaResult& aError)
-{
-  AssertOwnerThread();
-  MOZ_ASSERT(!mSeekTaskPromise.IsEmpty(), "Seek shouldn't be finished");
-
-  SAMPLE_LOG("OnVideoNotDecoded (aError=%u)", aError.Code());
-
-  if (aError == NS_ERROR_DOM_MEDIA_END_OF_STREAM) {
-    mIsVideoQueueFinished = true;
-  }
-
-  // Video seek not finished.
-  if (NeedMoreVideo()) {
-    switch (aError.Code()) {
-      case NS_ERROR_DOM_MEDIA_WAITING_FOR_DATA:
-        mReader->WaitForData(MediaData::VIDEO_DATA);
-        break;
-      case NS_ERROR_DOM_MEDIA_CANCELED:
-        RequestVideoData();
-        break;
-      case NS_ERROR_DOM_MEDIA_END_OF_STREAM:
-        MOZ_ASSERT(false, "Shouldn't want more data for ended video.");
-        break;
-      default:
-        // We might lose the audio sample after canceling the callbacks.
-        // However it doesn't really matter because MDSM is gonna shut down
-        // when seek fails.
-        CancelCallbacks();
-        // Reject the promise since we can't finish video seek anyway.
-        RejectIfExist(aError, __func__);
-        break;
-    }
-    return;
-  }
-
-  MaybeFinishSeek();
-}
-
-void
-NextFrameSeekTask::SetCallbacks()
-{
-  AssertOwnerThread();
-
-  // Register dummy callbcak for audio decoding since we don't need to handle
-  // the decoded audio samples.
-  mAudioCallback = mReader->AudioCallback().Connect(
-    OwnerThread(), [this] (AudioCallbackData aData) {
-    if (aData.is<MediaData*>()) {
-      OnAudioDecoded(aData.as<MediaData*>());
-    } else {
-      OnAudioNotDecoded(aData.as<MediaResult>());
-    }
-  });
-
-  mVideoCallback = mReader->VideoCallback().Connect(
-    OwnerThread(), [this] (VideoCallbackData aData) {
-    typedef Tuple<MediaData*, TimeStamp> Type;
-    if (aData.is<Type>()) {
-      OnVideoDecoded(Get<0>(aData.as<Type>()));
-    } else {
-      OnVideoNotDecoded(aData.as<MediaResult>());
-    }
-  });
-
-  mAudioWaitCallback = mReader->AudioWaitCallback().Connect(
-    OwnerThread(), [this] (WaitCallbackData aData) {
-    // We don't make an audio decode request here, instead, let MDSM to
-    // trigger further audio decode tasks if MDSM itself needs to play audio.
-    MaybeFinishSeek();
-  });
-
-  mVideoWaitCallback = mReader->VideoWaitCallback().Connect(
-    OwnerThread(), [this] (WaitCallbackData aData) {
-    if (NeedMoreVideo()) {
-      if (aData.is<MediaData::Type>()) {
-        RequestVideoData();
-      } else {
-        // Reject if we can't finish video seeking.
-        CancelCallbacks();
-        RejectIfExist(NS_ERROR_DOM_MEDIA_CANCELED, __func__);
-      }
-      return;
-    }
-    MaybeFinishSeek();
-  });
-}
-
-void
-NextFrameSeekTask::CancelCallbacks()
-{
-  AssertOwnerThread();
-  mAudioCallback.DisconnectIfExists();
-  mVideoCallback.DisconnectIfExists();
-  mAudioWaitCallback.DisconnectIfExists();
-  mVideoWaitCallback.DisconnectIfExists();
-}
-
-void
 NextFrameSeekTask::UpdateSeekTargetTime()
 {
   AssertOwnerThread();
 
   RefPtr<MediaData> data = mVideoQueue.PeekFront();
   if (data) {
     mTarget.SetTime(TimeUnit::FromMicroseconds(data->mTime));
   } else if (mSeekedVideoData) {
--- a/dom/media/NextFrameSeekTask.h
+++ b/dom/media/NextFrameSeekTask.h
@@ -37,61 +37,56 @@ public:
   void Discard() override;
 
   RefPtr<SeekTaskPromise> Seek(const media::TimeUnit& aDuration) override;
 
   bool NeedToResetMDSM() const override;
 
   int64_t CalculateNewCurrentTime() const override;
 
+  void HandleAudioDecoded(MediaData* aAudio) override;
+
+  void HandleVideoDecoded(MediaData* aVideo, TimeStamp aDecodeStart) override;
+
+  void HandleNotDecoded(MediaData::Type aType, const MediaResult& aError) override;
+
+  void HandleAudioWaited(MediaData::Type aType) override;
+
+  void HandleVideoWaited(MediaData::Type aType) override;
+
+  void HandleNotWaited(const WaitForDataRejectValue& aRejection) override;
+
 private:
   ~NextFrameSeekTask();
 
   void RequestVideoData();
 
   bool NeedMoreVideo() const;
 
   bool IsVideoRequestPending() const;
 
   bool IsAudioSeekComplete() const;
 
   bool IsVideoSeekComplete() const;
 
   void MaybeFinishSeek();
 
-  void OnAudioDecoded(MediaData* aAudioSample);
-
-  void OnAudioNotDecoded(const MediaResult& aError);
-
-  void OnVideoDecoded(MediaData* aVideoSample);
-
-  void OnVideoNotDecoded(const MediaResult& aError);
-
-  void SetCallbacks();
-
-  void CancelCallbacks();
-
   // Update the seek target's time before resolving this seek task, the updated
   // time will be used in the MDSM::SeekCompleted() to update the MDSM's position.
   void UpdateSeekTargetTime();
 
   /*
    * Data shared with MDSM.
    */
   MediaQueue<MediaData>& mAudioQueue;
   MediaQueue<MediaData>& mVideoQueue;
 
   /*
    * Internal state.
    */
   const int64_t mCurrentTime;
   media::TimeUnit mDuration;
-
-  MediaEventListener mAudioCallback;
-  MediaEventListener mVideoCallback;
-  MediaEventListener mAudioWaitCallback;
-  MediaEventListener mVideoWaitCallback;
 };
 
 } // namespace media
 } // namespace mozilla
 
 #endif /* NEXTFRAME_SEEK_TASK_H */
--- a/dom/media/SeekTask.cpp
+++ b/dom/media/SeekTask.cpp
@@ -66,16 +66,9 @@ SeekTask::AssertOwnerThread() const
 
 AbstractThread*
 SeekTask::OwnerThread() const
 {
   AssertOwnerThread();
   return mOwnerThread;
 }
 
-const SeekTarget&
-SeekTask::GetSeekTarget()
-{
-  AssertOwnerThread();
-  return mTarget;
-}
-
 } // namespace mozilla
--- a/dom/media/SeekTask.h
+++ b/dom/media/SeekTask.h
@@ -55,17 +55,27 @@ public:
   virtual void Discard() = 0;
 
   virtual RefPtr<SeekTaskPromise> Seek(const media::TimeUnit& aDuration) = 0;
 
   virtual bool NeedToResetMDSM() const = 0;
 
   virtual int64_t CalculateNewCurrentTime() const = 0;
 
-  const SeekTarget& GetSeekTarget();
+  virtual void HandleAudioDecoded(MediaData* aAudio) = 0;
+
+  virtual void HandleVideoDecoded(MediaData* aVideo, TimeStamp aDecodeStart) = 0;
+
+  virtual void HandleNotDecoded(MediaData::Type aType, const MediaResult& aError) = 0;
+
+  virtual void HandleAudioWaited(MediaData::Type aType) = 0;
+
+  virtual void HandleVideoWaited(MediaData::Type aType) = 0;
+
+  virtual void HandleNotWaited(const WaitForDataRejectValue& aRejection) = 0;
 
 protected:
   SeekTask(const void* aDecoderID,
            AbstractThread* aThread,
            MediaDecoderReaderWrapper* aReader,
            const SeekTarget& aTarget);
 
   virtual ~SeekTask();
--- a/dom/media/platforms/android/MediaCodecDataDecoder.cpp
+++ b/dom/media/platforms/android/MediaCodecDataDecoder.cpp
@@ -118,17 +118,17 @@ public:
                                  presentationTimeUs,
                                  gfx::IntRect(0, 0,
                                               mConfig.mDisplay.width,
                                               mConfig.mDisplay.height));
     INVOKE_CALLBACK(Output, v);
     return NS_OK;
   }
 
-  bool SupportDecoderRecycling() const override { return true; }
+  bool SupportDecoderRecycling() const override { return mIsCodecSupportAdaptivePlayback; }
 
 protected:
   layers::ImageContainer* mImageContainer;
   const VideoInfo& mConfig;
   RefPtr<AndroidSurfaceTexture> mSurfaceTexture;
   RefPtr<SamplesWaitingForKey> mSamplesWaitingForKey;
 };
 
@@ -378,21 +378,25 @@ MediaCodecDataDecoder::InitDecoder(Surfa
 
   if (!mDecoder) {
     INVOKE_CALLBACK(Error,
                     MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__));
     return NS_ERROR_FAILURE;
   }
 
   // Check if the video codec supports adaptive playback or not.
-  if (aSurface && java::HardwareCodecCapabilityUtils::CheckSupportsAdaptivePlayback(
-                    mDecoder, nsCString(TranslateMimeType(mMimeType)))) {
-      // TODO: may need to find a way to not use hard code to decide the max w/h.
-      mFormat->SetInteger(MediaFormat::KEY_MAX_WIDTH, 1920);
-      mFormat->SetInteger(MediaFormat::KEY_MAX_HEIGHT, 1080);
+  if (aSurface) {
+    mIsCodecSupportAdaptivePlayback =
+      java::HardwareCodecCapabilityUtils::CheckSupportsAdaptivePlayback(mDecoder,
+        nsCString(TranslateMimeType(mMimeType)));
+    if (mIsCodecSupportAdaptivePlayback) {
+        // TODO: may need to find a way to not use hard code to decide the max w/h.
+        mFormat->SetInteger(MediaFormat::KEY_MAX_WIDTH, 1920);
+        mFormat->SetInteger(MediaFormat::KEY_MAX_HEIGHT, 1080);
+    }
   }
 
   MediaCrypto::LocalRef crypto = MediaDrmProxy::GetMediaCrypto(mDrmStubId);
   bool hascrypto = !!crypto;
   LOG("Has(%d) MediaCrypto (%s)", hascrypto, NS_ConvertUTF16toUTF8(mDrmStubId).get());
   nsresult rv;
   NS_ENSURE_SUCCESS(rv = mDecoder->Configure(mFormat, aSurface, crypto, 0), rv);
   NS_ENSURE_SUCCESS(rv = mDecoder->Start(), rv);
--- a/dom/media/platforms/android/MediaCodecDataDecoder.h
+++ b/dom/media/platforms/android/MediaCodecDataDecoder.h
@@ -123,13 +123,15 @@ protected:
 
   ModuleState mState;
 
   SampleQueue mQueue;
   // Durations are stored in microseconds.
   std::deque<media::TimeUnit> mDurations;
 
   nsString mDrmStubId;
+
+  bool mIsCodecSupportAdaptivePlayback = false;
 };
 
 } // namespace mozilla
 
 #endif
--- a/dom/media/platforms/android/RemoteDataDecoder.cpp
+++ b/dom/media/platforms/android/RemoteDataDecoder.cpp
@@ -216,17 +216,17 @@ public:
 
     mJavaDecoder = CodecProxy::Create(mFormat,
                                       mSurfaceTexture->JavaSurface(),
                                       mJavaCallbacks,
                                       mDrmStubId);
     if (mJavaDecoder == nullptr) {
       return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__);
     }
-
+    mIsCodecSupportAdaptivePlayback = mJavaDecoder->IsAdaptivePlaybackSupported();
     mInputDurations.Clear();
 
     return InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, __func__);
   }
 
   void Flush() override
   {
     mInputDurations.Clear();
@@ -240,17 +240,17 @@ public:
   }
 
   void Input(MediaRawData* aSample) override
   {
     RemoteDataDecoder::Input(aSample);
     mInputDurations.Put(aSample->mDuration);
   }
 
-  bool SupportDecoderRecycling() const override { return true; }
+  bool SupportDecoderRecycling() const override { return mIsCodecSupportAdaptivePlayback; }
 
 private:
   class DurationQueue {
   public:
 
     void Clear()
     {
       mValues.clear();
@@ -276,16 +276,17 @@ private:
   private:
     std::deque<int64_t> mValues;
   };
 
   layers::ImageContainer* mImageContainer;
   const VideoInfo& mConfig;
   RefPtr<AndroidSurfaceTexture> mSurfaceTexture;
   DurationQueue mInputDurations;
+  bool mIsCodecSupportAdaptivePlayback = false;
 };
 
 class RemoteAudioDecoder final : public RemoteDataDecoder
 {
 public:
   RemoteAudioDecoder(const AudioInfo& aConfig,
                      MediaFormat::Param aFormat,
                      MediaDataDecoderCallback* aCallback,
--- a/layout/generic/DetailsFrame.cpp
+++ b/layout/generic/DetailsFrame.cpp
@@ -23,16 +23,18 @@ NS_QUERYFRAME_HEAD(DetailsFrame)
 NS_QUERYFRAME_TAIL_INHERITING(nsBlockFrame)
 
 nsBlockFrame*
 NS_NewDetailsFrame(nsIPresShell* aPresShell, nsStyleContext* aContext)
 {
   return new (aPresShell) DetailsFrame(aContext);
 }
 
+namespace mozilla {
+
 DetailsFrame::DetailsFrame(nsStyleContext* aContext)
   : nsBlockFrame(aContext)
 {
 }
 
 DetailsFrame::~DetailsFrame()
 {
 }
@@ -124,8 +126,10 @@ DetailsFrame::CreateAnonymousContent(nsT
 void
 DetailsFrame::AppendAnonymousContentTo(nsTArray<nsIContent*>& aElements,
                                        uint32_t aFilter)
 {
   if (mDefaultSummary) {
     aElements.AppendElement(mDefaultSummary);
   }
 }
+
+} // namespace mozilla
--- a/layout/generic/DetailsFrame.h
+++ b/layout/generic/DetailsFrame.h
@@ -8,16 +8,18 @@
 #define DetailsFrame_h
 
 #include "nsBlockFrame.h"
 #include "nsIAnonymousContentCreator.h"
 
 class nsContainerFrame;
 class nsStyleContext;
 
+namespace mozilla {
+
 // DetailsFrame is generated by HTMLDetailsElement. See
 // nsCSSFrameConstructor::ConstructDetailsFrame for the structure of a
 // DetailsFrame.
 //
 class DetailsFrame final : public nsBlockFrame
                          , public nsIAnonymousContentCreator
 {
 public:
@@ -54,9 +56,11 @@ public:
 
   void AppendAnonymousContentTo(nsTArray<nsIContent*>& aElements,
                                 uint32_t aFilter) override;
 
 private:
   nsCOMPtr<nsIContent> mDefaultSummary;
 };
 
+} // namespace mozilla
+
 #endif // DetailsFrame_h
--- a/mobile/android/app/mobile.js
+++ b/mobile/android/app/mobile.js
@@ -834,19 +834,16 @@ pref("reader.color_scheme.values", "[\"d
 
 // Whether to use a vertical or horizontal toolbar.
 pref("reader.toolbar.vertical", false);
 
 // Telemetry settings.
 // Whether to use the unified telemetry behavior, requires a restart.
 pref("toolkit.telemetry.unified", false);
 
-// Unified AccessibleCarets (touch-caret and selection-carets).
-pref("layout.accessiblecaret.enabled", true);
-
 // AccessibleCaret CSS for the Android L style assets.
 pref("layout.accessiblecaret.width", "22.0");
 pref("layout.accessiblecaret.height", "22.0");
 pref("layout.accessiblecaret.margin-left", "-11.5");
 
 // Android needs to show the caret when long tapping on an empty content.
 pref("layout.accessiblecaret.caret_shown_when_long_tapping_on_empty_content", true);
 
--- a/mobile/android/base/aidl/org/mozilla/gecko/media/ICodec.aidl
+++ b/mobile/android/base/aidl/org/mozilla/gecko/media/ICodec.aidl
@@ -9,16 +9,17 @@ import android.os.Bundle;
 import android.view.Surface;
 import org.mozilla.gecko.media.FormatParam;
 import org.mozilla.gecko.media.ICodecCallbacks;
 import org.mozilla.gecko.media.Sample;
 
 interface ICodec {
     void setCallbacks(in ICodecCallbacks callbacks);
     boolean configure(in FormatParam format, inout Surface surface, int flags, in String drmStubId);
+    boolean isAdaptivePlaybackSupported();
     oneway void start();
     oneway void stop();
     oneway void flush();
     oneway void release();
 
     Sample dequeueInput(int size);
     oneway void queueInput(in Sample sample);
 
--- a/mobile/android/base/java/org/mozilla/gecko/media/AsyncCodec.java
+++ b/mobile/android/base/java/org/mozilla/gecko/media/AsyncCodec.java
@@ -20,16 +20,17 @@ public interface AsyncCodec {
         void onInputBufferAvailable(AsyncCodec codec, int index);
         void onOutputBufferAvailable(AsyncCodec codec, int index, BufferInfo info);
         void onError(AsyncCodec codec, int error);
         void onOutputFormatChanged(AsyncCodec codec, MediaFormat format);
     }
 
     public abstract void setCallbacks(Callbacks callbacks, Handler handler);
     public abstract void configure(MediaFormat format, Surface surface, MediaCrypto crypto, int flags);
+    public abstract boolean isAdaptivePlaybackSupported(String mimeType);
     public abstract void start();
     public abstract void stop();
     public abstract void flush();
     public abstract void release();
     public abstract ByteBuffer getInputBuffer(int index);
     public abstract ByteBuffer getOutputBuffer(int index);
     public abstract void queueInputBuffer(int index, int offset, int size, long presentationTimeUs, int flags);
     public abstract void queueSecureInputBuffer(int index, int offset, CryptoInfo info, long presentationTimeUs, int flags);
--- a/mobile/android/base/java/org/mozilla/gecko/media/Codec.java
+++ b/mobile/android/base/java/org/mozilla/gecko/media/Codec.java
@@ -198,16 +198,18 @@ import java.util.concurrent.ConcurrentLi
    }
 
     private volatile ICodecCallbacks mCallbacks;
     private AsyncCodec mCodec;
     private InputProcessor mInputProcessor;
     private volatile boolean mFlushing = false;
     private SamplePool mSamplePool;
     private Queue<Sample> mSentOutputs = new ConcurrentLinkedQueue<>();
+    // Value will be updated after configure called.
+    private volatile boolean mIsAdaptivePlaybackSupported = false;
 
     public synchronized void setCallbacks(ICodecCallbacks callbacks) throws RemoteException {
         mCallbacks = callbacks;
         callbacks.asBinder().linkToDeath(this, 0);
     }
 
     // IBinder.DeathRecipient
     @Override
@@ -249,29 +251,47 @@ import java.util.concurrent.ConcurrentLi
 
             MediaCrypto crypto = RemoteMediaDrmBridgeStub.getMediaCrypto(drmStubId);
             if (DEBUG) {
                 boolean hasCrypto = crypto != null;
                 Log.d(LOGTAG, "configure mediacodec with crypto(" + hasCrypto + ") / Id :" + drmStubId);
             }
 
             codec.setCallbacks(new Callbacks(mCallbacks), null);
+
+            // Video decoder should config with adaptive playback capability.
+            if (surface != null) {
+                mIsAdaptivePlaybackSupported = codec.isAdaptivePlaybackSupported(
+                                                   fmt.getString(MediaFormat.KEY_MIME));
+                if (mIsAdaptivePlaybackSupported) {
+                    if (DEBUG) Log.d(LOGTAG, "codec supports adaptive playback  = " + mIsAdaptivePlaybackSupported);
+                    // TODO: may need to find a way to not use hard code to decide the max w/h.
+                    fmt.setInteger(MediaFormat.KEY_MAX_WIDTH, 1920);
+                    fmt.setInteger(MediaFormat.KEY_MAX_HEIGHT, 1080);
+                }
+            }
+
             codec.configure(fmt, surface, crypto, flags);
             mCodec = codec;
             mInputProcessor = new InputProcessor();
             mSamplePool = new SamplePool(codecName);
             if (DEBUG) Log.d(LOGTAG, codec.toString() + " created");
             return true;
         } catch (Exception e) {
             if (DEBUG) Log.d(LOGTAG, "FAIL: cannot create codec -- " + codecName);
             e.printStackTrace();
             return false;
         }
     }
 
+    @Override
+    public synchronized boolean isAdaptivePlaybackSupported() {
+        return mIsAdaptivePlaybackSupported;
+    }
+
     private void releaseCodec() {
         mInputProcessor.reset();
         try {
             mCodec.release();
         } catch (Exception e) {
             reportError(Error.FATAL, e);
         }
         mCodec = null;
--- a/mobile/android/base/java/org/mozilla/gecko/media/CodecProxy.java
+++ b/mobile/android/base/java/org/mozilla/gecko/media/CodecProxy.java
@@ -126,16 +126,31 @@ public final class CodecProxy {
             return true;
         } catch (RemoteException e) {
             e.printStackTrace();
             return false;
         }
     }
 
     @WrapForJNI
+    public synchronized boolean isAdaptivePlaybackSupported()
+    {
+      if (mRemote == null) {
+          Log.e(LOGTAG, "cannot check isAdaptivePlaybackSupported with an ended codec");
+          return false;
+      }
+      try {
+            return mRemote.isAdaptivePlaybackSupported();
+        } catch (RemoteException e) {
+            e.printStackTrace();
+            return false;
+        }
+    }
+
+    @WrapForJNI
     public synchronized boolean input(ByteBuffer bytes, BufferInfo info, CryptoInfo cryptoInfo) {
         if (mRemote == null) {
             Log.e(LOGTAG, "cannot send input to an ended codec");
             return false;
         }
         try {
             Sample sample = (info.flags == MediaCodec.BUFFER_FLAG_END_OF_STREAM) ?
                     Sample.EOS : mRemote.dequeueInput(info.size).set(bytes, info, cryptoInfo);
--- a/mobile/android/base/java/org/mozilla/gecko/media/JellyBeanAsyncCodec.java
+++ b/mobile/android/base/java/org/mozilla/gecko/media/JellyBeanAsyncCodec.java
@@ -293,28 +293,24 @@ final class JellyBeanAsyncCodec implemen
         mCallbackSender = new CallbackSender(looper, callbacks);
         if (DEBUG) Log.d(LOGTAG, "setCallbacks(): sender=" + mCallbackSender);
     }
 
     @Override
     public void configure(MediaFormat format, Surface surface, MediaCrypto crypto, int flags) {
         assertCallbacks();
 
-        // Video decoder should config with adaptive playback capability.
-        if (surface != null) {
-            if (HardwareCodecCapabilityUtils.checkSupportsAdaptivePlayback(
-                    mCodec, format.getString(MediaFormat.KEY_MIME))) {
-                // TODO: may need to find a way to not use hard code to decide the max w/h.
-                format.setInteger(MediaFormat.KEY_MAX_WIDTH, 1920);
-                format.setInteger(MediaFormat.KEY_MAX_HEIGHT, 1080);
-            }
-        }
         mCodec.configure(format, surface, crypto, flags);
     }
 
+    @Override
+    public boolean isAdaptivePlaybackSupported(String mimeType) {
+        return HardwareCodecCapabilityUtils.checkSupportsAdaptivePlayback(mCodec, mimeType);
+    }
+
     private void assertCallbacks() {
         if (mCallbackSender == null) {
             throw new IllegalStateException(LOGTAG + ": callback must be supplied with setCallbacks().");
         }
     }
 
     @Override
     public void start() {
--- a/mobile/android/base/java/org/mozilla/gecko/telemetry/stores/TelemetryJSONFilePingStore.java
+++ b/mobile/android/base/java/org/mozilla/gecko/telemetry/stores/TelemetryJSONFilePingStore.java
@@ -190,17 +190,23 @@ public class TelemetryJSONFilePingStore 
             Log.w(LOGTAG, "Unexpected empty file: " + file.getName() + ". Ignoring");
             return null;
         }
 
         final FileInputStream inputStream;
         try {
             inputStream = new FileInputStream(file);
         } catch (final FileNotFoundException e) {
-            throw new IllegalStateException("Expected file to exist");
+            // permission problem might also cause same exception. To get more debug information.
+            String fileInfo = String.format("existence: %b, can write: %b, size: %l.",
+                    file.exists(), file.canWrite(), file.length());
+            String msg = String.format(
+                    "Expected file to exist but got exception in thread: %s. File info - %s",
+                    Thread.currentThread().getName(), fileInfo);
+            throw new IllegalStateException(msg);
         }
 
         final JSONObject obj;
         try {
             // Potential optimization: re-use the same buffer for reading from files.
             obj = lockAndReadFileAndCloseStream(inputStream, (int) file.length());
         } catch (final IOException | JSONException e) {
             // We couldn't read this file so let's just skip it. These potentially
--- a/mobile/android/tests/browser/chrome/test_session_zombification.html
+++ b/mobile/android/tests/browser/chrome/test_session_zombification.html
@@ -28,32 +28,41 @@ https://bugzilla.mozilla.org/show_bug.cg
   Cu.import("resource://gre/modules/PrivateBrowsingUtils.jsm");
   XPCOMUtils.defineLazyGetter(this, "MemoryObserver", function() {
     let sandbox = {};
     Services.scriptloader.loadSubScript("chrome://browser/content/MemoryObserver.js", sandbox);
     return sandbox["MemoryObserver"];
   });
 
   // The chrome window
-  let chromeWin;
+  let chromeWin = Services.wm.getMostRecentWindow("navigator:browser");
+  let BrowserApp = chromeWin.BrowserApp;
 
   // Track the tabs where the tests are happening
   let tabBlank;
   let tabTest;
 
+  function cleanupTabs() {
+    if (tabBlank) {
+      BrowserApp.closeTab(tabBlank);
+      tabBlank = null;
+    }
+
+    if (tabTest) {
+      BrowserApp.closeTab(tabTest);
+      tabTest = null;
+    }
+  }
+
   const url1 = "data:text/html;charset=utf-8,It%20was%20a%20dark%20and%20stormy%20night.";
   const url2 = "data:text/html;charset=utf-8,Suddenly%2C%20a%20tab%20was%20zombified.";
 
   add_task(function* test_sessionStoreZombify() {
-    chromeWin = Services.wm.getMostRecentWindow("navigator:browser");
-    let BrowserApp = chromeWin.BrowserApp;
-
     SimpleTest.registerCleanupFunction(function() {
-      BrowserApp.closeTab(tabBlank);
-      BrowserApp.closeTab(tabTest);
+      cleanupTabs();
     });
 
     // Add a new tab with some content
     tabTest = BrowserApp.addTab(url1 , { selected: true, parentId: BrowserApp.selectedTab.id });
     yield promiseBrowserEvent(tabTest.browser, "DOMContentLoaded");
 
     // Add a new tab with a blank page
     tabBlank = BrowserApp.addTab("about:blank", { selected: true, parentId: BrowserApp.selectedTab.id });
@@ -89,26 +98,25 @@ https://bugzilla.mozilla.org/show_bug.cg
     ok(tabTest.browser.__SS_restore, "Test tab is set for delay loading.");
 
     // Switch back to the test tab and wait for it to reload
     BrowserApp.selectTab(tabTest);
     yield promiseBrowserEvent(tabTest.browser, "DOMContentLoaded");
 
     // Check that the test tab has loaded the correct url
     is(tabTest.browser.currentURI.spec, url2, "Test tab is showing the second URL.");
+
+    cleanupTabs();
   });
 
   add_task(function* test_sessionStoreKeepAsZombie() {
-    chromeWin = Services.wm.getMostRecentWindow("navigator:browser");
-    let BrowserApp = chromeWin.BrowserApp;
     let observerService = Services.obs;
 
     SimpleTest.registerCleanupFunction(function() {
-      BrowserApp.closeTab(tabBlank);
-      BrowserApp.closeTab(tabTest);
+      cleanupTabs();
     });
 
     // Add a new tab with some content
     tabTest = BrowserApp.addTab(url1 , { selected: true, parentId: BrowserApp.selectedTab.id });
     yield promiseBrowserEvent(tabTest.browser, "DOMContentLoaded");
 
     // Add a new tab with a blank page
     tabBlank = BrowserApp.addTab("about:blank", { selected: true, parentId: BrowserApp.selectedTab.id });
@@ -164,16 +172,18 @@ https://bugzilla.mozilla.org/show_bug.cg
 
     // Fake an "application-foreground" notification
     observerService.notifyObservers(null, "application-foreground", null);
 
     // The test tab should now start reloading
     yield promiseBrowserEvent(tabTest.browser, "DOMContentLoaded");
     ok(!tabTest.browser.__SS_restore, "Test tab is no longer set for delay loading.");
     is(tabTest.browser.currentURI.spec, url1, "Test tab is showing the test URL.");
+
+    cleanupTabs();
   });
 
   </script>
 </head>
 <body>
 <a target="_blank" href="https://bugzilla.mozilla.org/show_bug.cgi?id=1044556">Mozilla Bug 1044556</a>
 <p id="display"></p>
 <div id="content" style="display: none">
--- a/modules/libpref/init/all.js
+++ b/modules/libpref/init/all.js
@@ -5160,17 +5160,19 @@ pref("browser.safebrowsing.allowOverride
 pref("browser.safebrowsing.id", "navclient-auto-ffox");
 #else
 pref("browser.safebrowsing.id", "Firefox");
 #endif
 
 // Turn off Spatial navigation by default.
 pref("snav.enabled", false);
 
-// New implementation to unify touch-caret and selection-carets.
+// Debug-only pref to force enable the AccessibleCaret. If you want to
+// control AccessibleCaret by mouse, you'll need to set
+// "layout.accessiblecaret.hide_carets_for_mouse_input" to false.
 pref("layout.accessiblecaret.enabled", false);
 
 // Enable the accessible caret on platforms/devices
 // that we detect have touch support. Note that this pref is an
 // additional way to enable the accessible carets, rather than
 // overriding the layout.accessiblecaret.enabled pref.
 pref("layout.accessiblecaret.enabled_on_touch", true);
 
--- a/parser/html/nsHtml5StreamParser.cpp
+++ b/parser/html/nsHtml5StreamParser.cpp
@@ -976,23 +976,25 @@ nsHtml5StreamParser::OnStartRequest(nsIR
     mInitialEncodingWasFromParentFrame = true;
   }
 
   if (mCharsetSource >= kCharsetFromAutoDetection) {
     mFeedChardet = false;
   }
   
   nsCOMPtr<nsIWyciwygChannel> wyciwygChannel(do_QueryInterface(mRequest));
-  if (!wyciwygChannel) {
+  if (mCharsetSource < kCharsetFromUtf8OnlyMime && !wyciwygChannel) {
     // we aren't ready to commit to an encoding yet
     // leave converter uninstantiated for now
     return NS_OK;
   }
 
-  // We are reloading a document.open()ed doc.
+  // We are reloading a document.open()ed doc or loading JSON/WebVTT/etc. into
+  // a browsing context. In the latter case, there's no need to remove the
+  // BOM manually here, because the UTF-8 decoder removes it.
   mReparseForbidden = true;
   mFeedChardet = false;
 
   // Instantiate the converter here to avoid BOM sniffing.
   mUnicodeDecoder = EncodingUtils::DecoderForEncoding(mCharset);
   return NS_OK;
 }
 
--- a/parser/nsCharsetSource.h
+++ b/parser/nsCharsetSource.h
@@ -17,10 +17,11 @@
 #define kCharsetFromMetaPrescan         8 // this one and smaller: HTML5 Tentative
 #define kCharsetFromMetaTag             9 // this one and greater: HTML5 Confident
 #define kCharsetFromIrreversibleAutoDetection 10
 #define kCharsetFromChannel            11
 #define kCharsetFromOtherComponent     12
 #define kCharsetFromParentForced       13 // propagates to child frames
 #define kCharsetFromUserForced         14 // propagates to child frames
 #define kCharsetFromByteOrderMark      15
+#define kCharsetFromUtf8OnlyMime       16 // For JSON, WebVTT and such
 
 #endif /* nsCharsetSource_h_ */
--- a/security/certverifier/CTKnownLogs.h
+++ b/security/certverifier/CTKnownLogs.h
@@ -4,91 +4,121 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 /* This file was automatically generated by getCTKnownLogs.py. */
 
 #ifndef CTKnownLogs_h
 #define CTKnownLogs_h
 
+#include "CTLog.h"
+
 #include <stddef.h>
 
-struct CTLogInfo {
-  const char* const logName;
-  const char* const logUrl;
-  const char* const logKey;
-  const size_t logKeyLength;
+struct CTLogInfo
+{
+  const char* const name;
+  // Index within kCTLogOperatorList.
+  const mozilla::ct::CTLogStatus status;
+  // 0 for qualified logs, disqualification time for disqualified logs
+  // (in milliseconds, measured since the epoch, ignoring leap seconds).
+  const uint64_t disqualificationTime;
+  const size_t operatorIndex;
+  const char* const key;
+  const size_t keyLength;
+};
+
+struct CTLogOperatorInfo
+{
+  const char* const name;
+  const mozilla::ct::CTLogOperatorId id;
 };
 
 const CTLogInfo kCTLogList[] = {
   { "Google 'Pilot' log",
-    "https://ct.googleapis.com/pilot/",
+    mozilla::ct::CTLogStatus::Included,
+    0, // no disqualification time
+    0, // operated by Google
     "\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48"
     "\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x7d\xa8\x4b\x12\x29\x80\xa3\x3d\xad"
     "\xd3\x5a\x77\xb8\xcc\xe2\x88\xb3\xa5\xfd\xf1\xd3\x0c\xcd\x18\x0c\xe8\x41"
     "\x46\xe8\x81\x01\x1b\x15\xe1\x4b\xf1\x1b\x62\xdd\x36\x0a\x08\x18\xba\xed"
     "\x0b\x35\x84\xd0\x9e\x40\x3c\x2d\x9e\x9b\x82\x65\xbd\x1f\x04\x10\x41\x4c"
     "\xa0",
     91 },
   { "Google 'Aviator' log",
-    "https://ct.googleapis.com/aviator/",
+    mozilla::ct::CTLogStatus::Included,
+    0, // no disqualification time
+    0, // operated by Google
     "\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48"
     "\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xd7\xf4\xcc\x69\xb2\xe4\x0e\x90\xa3"
     "\x8a\xea\x5a\x70\x09\x4f\xef\x13\x62\xd0\x8d\x49\x60\xff\x1b\x40\x50\x07"
     "\x0c\x6d\x71\x86\xda\x25\x49\x8d\x65\xe1\x08\x0d\x47\x34\x6b\xbd\x27\xbc"
     "\x96\x21\x3e\x34\xf5\x87\x76\x31\xb1\x7f\x1d\xc9\x85\x3b\x0d\xf7\x1f\x3f"
     "\xe9",
     91 },
   { "DigiCert Log Server",
-    "https://ct1.digicert-ct.com/log/",
+    mozilla::ct::CTLogStatus::Included,
+    0, // no disqualification time
+    1, // operated by DigiCert
     "\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48"
     "\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x02\x46\xc5\xbe\x1b\xbb\x82\x40\x16"
     "\xe8\xc1\xd2\xac\x19\x69\x13\x59\xf8\xf8\x70\x85\x46\x40\xb9\x38\xb0\x23"
     "\x82\xa8\x64\x4c\x7f\xbf\xbb\x34\x9f\x4a\x5f\x28\x8a\xcf\x19\xc4\x00\xf6"
     "\x36\x06\x93\x65\xed\x4c\xf5\xa9\x21\x62\x5a\xd8\x91\xeb\x38\x24\x40\xac"
     "\xe8",
     91 },
   { "Google 'Rocketeer' log",
-    "https://ct.googleapis.com/rocketeer/",
+    mozilla::ct::CTLogStatus::Included,
+    0, // no disqualification time
+    0, // operated by Google
     "\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48"
     "\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x20\x5b\x18\xc8\x3c\xc1\x8b\xb3\x31"
     "\x08\x00\xbf\xa0\x90\x57\x2b\xb7\x47\x8c\x6f\xb5\x68\xb0\x8e\x90\x78\xe9"
     "\xa0\x73\xea\x4f\x28\x21\x2e\x9c\xc0\xf4\x16\x1b\xaa\xf9\xd5\xd7\xa9\x80"
     "\xc3\x4e\x2f\x52\x3c\x98\x01\x25\x46\x24\x25\x28\x23\x77\x2d\x05\xc2\x40"
     "\x7a",
     91 },
   { "Certly.IO log",
-    "https://log.certly.io/",
+    mozilla::ct::CTLogStatus::Disqualified,
+    1460678400000, // Date.parse("2016-04-15T00:00:00Z")
+    2, // operated by Certly
     "\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48"
     "\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x0b\x23\xcb\x85\x62\x98\x61\x48\x04"
     "\x73\xeb\x54\x5d\xf3\xd0\x07\x8c\x2d\x19\x2d\x8c\x36\xf5\xeb\x8f\x01\x42"
     "\x0a\x7c\x98\x26\x27\xc1\xb5\xdd\x92\x93\xb0\xae\xf8\x9b\x3d\x0c\xd8\x4c"
     "\x4e\x1d\xf9\x15\xfb\x47\x68\x7b\xba\x66\xb7\x25\x9c\xd0\x4a\xc2\x66\xdb"
     "\x48",
     91 },
   { "Izenpe log",
-    "https://ct.izenpe.com/",
+    mozilla::ct::CTLogStatus::Disqualified,
+    1464566400000, // Date.parse("2016-05-30T00:00:00Z")
+    3, // operated by Izenpe
     "\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48"
     "\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x27\x64\x39\x0c\x2d\xdc\x50\x18\xf8"
     "\x21\x00\xa2\x0e\xed\x2c\xea\x3e\x75\xba\x9f\x93\x64\x09\x00\x11\xc4\x11"
     "\x17\xab\x5c\xcf\x0f\x74\xac\xb5\x97\x90\x93\x00\x5b\xb8\xeb\xf7\x27\x3d"
     "\xd9\xb2\x0a\x81\x5f\x2f\x0d\x75\x38\x94\x37\x99\x1e\xf6\x07\x76\xe0\xee"
     "\xbe",
     91 },
   { "Symantec log",
-    "https://ct.ws.symantec.com/",
+    mozilla::ct::CTLogStatus::Included,
+    0, // no disqualification time
+    4, // operated by Symantec
     "\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48"
     "\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x96\xea\xac\x1c\x46\x0c\x1b\x55\xdc"
     "\x0d\xfc\xb5\x94\x27\x46\x57\x42\x70\x3a\x69\x18\xe2\xbf\x3b\xc4\xdb\xab"
     "\xa0\xf4\xb6\x6c\xc0\x53\x3f\x4d\x42\x10\x33\xf0\x58\x97\x8f\x6b\xbe\x72"
     "\xf4\x2a\xec\x1c\x42\xaa\x03\x2f\x1a\x7e\x28\x35\x76\x99\x08\x3d\x21\x14"
     "\x86",
     91 },
   { "Venafi log",
-    "https://ctlog.api.venafi.com/",
+    mozilla::ct::CTLogStatus::Included,
+    0, // no disqualification time
+    5, // operated by Venafi
     "\x30\x82\x01\x22\x30\x0d\x06\x09\x2a\x86\x48\x86\xf7\x0d\x01\x01\x01\x05"
     "\x00\x03\x82\x01\x0f\x00\x30\x82\x01\x0a\x02\x82\x01\x01\x00\xa2\x5a\x48"
     "\x1f\x17\x52\x95\x35\xcb\xa3\x5b\x3a\x1f\x53\x82\x76\x94\xa3\xff\x80\xf2"
     "\x1c\x37\x3c\xc0\xb1\xbd\xc1\x59\x8b\xab\x2d\x65\x93\xd7\xf3\xe0\x04\xd5"
     "\x9a\x6f\xbf\xd6\x23\x76\x36\x4f\x23\x99\xcb\x54\x28\xad\x8c\x15\x4b\x65"
     "\x59\x76\x41\x4a\x9c\xa6\xf7\xb3\x3b\x7e\xb1\xa5\x49\xa4\x17\x51\x6c\x80"
     "\xdc\x2a\x90\x50\x4b\x88\x24\xe9\xa5\x12\x32\x93\x04\x48\x90\x02\xfa\x5f"
     "\x0e\x30\x87\x8e\x55\x76\x05\xee\x2a\x4c\xce\xa3\x6a\x69\x09\x6e\x25\xad"
@@ -98,26 +128,30 @@ const CTLogInfo kCTLogList[] = {
     "\x5b\xe8\x81\xcd\xfd\x92\x68\xe7\xf3\x06\xf0\xe9\x72\x84\xee\x01\xa5\xb1"
     "\xd8\x33\xda\xce\x83\xa5\xdb\xc7\xcf\xd6\x16\x7e\x90\x75\x18\xbf\x16\xdc"
     "\x32\x3b\x6d\x8d\xab\x82\x17\x1f\x89\x20\x8d\x1d\x9a\xe6\x4d\x23\x08\xdf"
     "\x78\x6f\xc6\x05\xbf\x5f\xae\x94\x97\xdb\x5f\x64\xd4\xee\x16\x8b\xa3\x84"
     "\x6c\x71\x2b\xf1\xab\x7f\x5d\x0d\x32\xee\x04\xe2\x90\xec\x41\x9f\xfb\x39"
     "\xc1\x02\x03\x01\x00\x01",
     294 },
   { "Symantec 'Vega' log",
-    "https://vega.ws.symantec.com/",
+    mozilla::ct::CTLogStatus::Included,
+    0, // no disqualification time
+    4, // operated by Symantec
     "\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48"
     "\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xea\x95\x9e\x02\xff\xee\xf1\x33\x6d"
     "\x4b\x87\xbc\xcd\xfd\x19\x17\x62\xff\x94\xd3\xd0\x59\x07\x3f\x02\x2d\x1c"
     "\x90\xfe\xc8\x47\x30\x3b\xf1\xdd\x0d\xb8\x11\x0c\x5d\x1d\x86\xdd\xab\xd3"
     "\x2b\x46\x66\xfb\x6e\x65\xb7\x3b\xfd\x59\x68\xac\xdf\xa6\xf8\xce\xd2\x18"
     "\x4d",
     91 },
   { "CNNIC CT log",
-    "https://ctserver.cnnic.cn/",
+    mozilla::ct::CTLogStatus::Included,
+    0, // no disqualification time
+    6, // operated by CNNIC
     "\x30\x82\x01\x22\x30\x0d\x06\x09\x2a\x86\x48\x86\xf7\x0d\x01\x01\x01\x05"
     "\x00\x03\x82\x01\x0f\x00\x30\x82\x01\x0a\x02\x82\x01\x01\x00\xbf\xb5\x08"
     "\x61\x9a\x29\x32\x04\xd3\x25\x63\xe9\xd8\x85\xe1\x86\xe0\x1f\xd6\x5e\x9a"
     "\xf7\x33\x3b\x80\x1b\xe7\xb6\x3e\x5f\x2d\xa1\x66\xf6\x95\x4a\x84\xa6\x21"
     "\x56\x79\xe8\xf7\x85\xee\x5d\xe3\x7c\x12\xc0\xe0\x89\x22\x09\x22\x3e\xba"
     "\x16\x95\x06\xbd\xa8\xb9\xb1\xa9\xb2\x7a\xd6\x61\x2e\x87\x11\xb9\x78\x40"
     "\x89\x75\xdb\x0c\xdc\x90\xe0\xa4\x79\xd6\xd5\x5e\x6e\xd1\x2a\xdb\x34\xf4"
     "\x99\x3f\x65\x89\x3b\x46\xc2\x29\x2c\x15\x07\x1c\xc9\x4b\x1a\x54\xf8\x6c"
@@ -127,28 +161,44 @@ const CTLogInfo kCTLogList[] = {
     "\xb2\xe5\x9a\x6c\x0d\xc5\x1c\xa5\x8b\xf7\x3f\x30\xaf\xb9\x01\x91\xb7\x69"
     "\x12\x12\xe5\x83\x61\xfe\x34\x00\xbe\xf6\x71\x8a\xc7\xeb\x50\x92\xe8\x59"
     "\xfe\x15\x91\xeb\x96\x97\xf8\x23\x54\x3f\x2d\x8e\x07\xdf\xee\xda\xb3\x4f"
     "\xc8\x3c\x9d\x6f\xdf\x3c\x2c\x43\x57\xa1\x47\x0c\x91\x04\xf4\x75\x4d\xda"
     "\x89\x81\xa4\x14\x06\x34\xb9\x98\xc3\xda\xf1\xfd\xed\x33\x36\xd3\x16\x2d"
     "\x35\x02\x03\x01\x00\x01",
     294 },
   { "WoSign log",
-    "https://ctlog.wosign.com/",
+    mozilla::ct::CTLogStatus::Included,
+    0, // no disqualification time
+    7, // operated by WoSign
     "\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48"
     "\xce\x3d\x03\x01\x07\x03\x42\x00\x04\xcc\x11\x88\x7b\x2d\x66\xcb\xae\x8f"
     "\x4d\x30\x66\x27\x19\x25\x22\x93\x21\x46\xb4\x2f\x01\xd3\xc6\xf9\x2b\xd5"
     "\xc8\xba\x73\x9b\x06\xa2\xf0\x8a\x02\x9c\xd0\x6b\x46\x18\x30\x85\xba\xe9"
     "\x24\x8b\x0e\xd1\x5b\x70\x28\x0c\x7e\xf1\x3a\x45\x7f\x5a\xf3\x82\x42\x60"
     "\x31",
     91 },
   { "StartCom log",
-    "https://ct.startssl.com/",
+    mozilla::ct::CTLogStatus::Included,
+    0, // no disqualification time
+    8, // operated by StartCom
     "\x30\x59\x30\x13\x06\x07\x2a\x86\x48\xce\x3d\x02\x01\x06\x08\x2a\x86\x48"
     "\xce\x3d\x03\x01\x07\x03\x42\x00\x04\x48\xf3\x59\xf3\xf6\x05\x18\xd3\xdb"
     "\xb2\xed\x46\x7e\xcf\xc8\x11\xb5\x57\xb1\xa8\xd6\x4c\xe6\x9f\xb7\x4a\x1a"
     "\x14\x86\x43\xa9\x48\xb0\xcb\x5a\x3f\x3c\x4a\xca\xdf\xc4\x82\x14\x55\x9a"
     "\xf8\xf7\x8e\x40\x55\xdc\xf4\xd2\xaf\xea\x75\x74\xfb\x4e\x7f\x60\x86\x2e"
     "\x51",
     91 }
 };
 
+const CTLogOperatorInfo kCTLogOperatorList[] = {
+  { "Google", 0 },
+  { "DigiCert", 1 },
+  { "Certly", 2 },
+  { "Izenpe", 3 },
+  { "Symantec", 4 },
+  { "Venafi", 5 },
+  { "CNNIC", 7 },
+  { "WoSign", 8 },
+  { "StartCom", 9 }
+};
+
 #endif // CTKnownLogs_h
new file mode 100644
--- /dev/null
+++ b/security/certverifier/CTLog.h
@@ -0,0 +1,34 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef CTLog_h
+#define CTLog_h
+
+#include <stdint.h>
+
+namespace mozilla { namespace ct {
+
+// Signed integer sufficient to store the numeric ID of CT log operators
+// as assigned at https://www.certificate-transparency.org/known-logs .
+// The assigned IDs are 0-based positive integers, so you can use special
+// values (such as -1) to indicate a "null" or unknown log ID.
+typedef int16_t CTLogOperatorId;
+
+// Current status of a CT log in regard to its inclusion in the
+// Known Logs List such as https://www.certificate-transparency.org/known-logs
+enum class CTLogStatus
+{
+  // Status unknown or unavailable.
+  Unknown,
+  // Included in the list of known logs.
+  Included,
+  // Previously included, but disqualified at some point of time.
+  Disqualified,
+};
+
+} } // namespace mozilla::ct
+
+#endif // CTLog_h
--- a/security/certverifier/CTLogVerifier.cpp
+++ b/security/certverifier/CTLogVerifier.cpp
@@ -1,16 +1,18 @@
 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "CTLogVerifier.h"
 
+#include <stdint.h>
+
 #include "CTSerialization.h"
 #include "hasht.h"
 #include "mozilla/ArrayUtils.h"
 #include "mozilla/Assertions.h"
 #include "pkix/pkixnss.h"
 #include "pkixutil.h"
 
 namespace mozilla { namespace ct {
@@ -115,22 +117,43 @@ public:
   }
 
   DigitallySigned::SignatureAlgorithm mSignatureAlgorithm;
 };
 
 
 CTLogVerifier::CTLogVerifier()
   : mSignatureAlgorithm(DigitallySigned::SignatureAlgorithm::Anonymous)
+  , mOperatorId(-1)
+  , mDisqualified(false)
+  , mDisqualificationTime(UINT64_MAX)
 {
 }
 
 Result
-CTLogVerifier::Init(Input subjectPublicKeyInfo)
+CTLogVerifier::Init(Input subjectPublicKeyInfo,
+                    CTLogOperatorId operatorId,
+                    CTLogStatus logStatus,
+                    uint64_t disqualificationTime)
 {
+  switch (logStatus) {
+    case CTLogStatus::Included:
+      mDisqualified = false;
+      mDisqualificationTime = UINT64_MAX;
+      break;
+    case CTLogStatus::Disqualified:
+      mDisqualified = true;
+      mDisqualificationTime = disqualificationTime;
+      break;
+    case CTLogStatus::Unknown:
+    default:
+      MOZ_ASSERT_UNREACHABLE("Unsupported CTLogStatus");
+      return Result::FATAL_ERROR_INVALID_ARGS;
+  }
+
   SignatureParamsTrustDomain trustDomain;
   Result rv = CheckSubjectPublicKeyInfo(subjectPublicKeyInfo, trustDomain,
                                         EndEntityOrCA::MustBeEndEntity);
   if (rv != Success) {
     return rv;
   }
   mSignatureAlgorithm = trustDomain.mSignatureAlgorithm;
 
@@ -143,16 +166,17 @@ CTLogVerifier::Init(Input subjectPublicK
     return Result::FATAL_ERROR_NO_MEMORY;
   }
   rv = DigestBufNSS(subjectPublicKeyInfo, DigestAlgorithm::sha256,
                     mKeyId.begin(), mKeyId.length());
   if (rv != Success) {
     return rv;
   }
 
+  mOperatorId = operatorId;
   return Success;
 }
 
 Result
 CTLogVerifier::Verify(const LogEntry& entry,
                       const SignedCertificateTimestamp& sct)
 {
   if (mKeyId.empty() || sct.logId != mKeyId) {
--- a/security/certverifier/CTLogVerifier.h
+++ b/security/certverifier/CTLogVerifier.h
@@ -2,16 +2,17 @@
 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef CTLogVerifier_h
 #define CTLogVerifier_h
 
+#include "CTLog.h"
 #include "pkix/Input.h"
 #include "pkix/pkix.h"
 #include "SignedCertificateTimestamp.h"
 #include "SignedTreeHead.h"
 
 namespace mozilla { namespace ct {
 
 // Verifies Signed Certificate Timestamps (SCTs) provided by a specific log
@@ -21,26 +22,38 @@ namespace mozilla { namespace ct {
 // The verification functions return Success if the provided SCT has passed
 // verification, ERROR_BAD_SIGNATURE if failed verification, or other result
 // on error.
 class CTLogVerifier
 {
 public:
   CTLogVerifier();
 
-  // Initializes the verifier with log-specific information.
+  // Initializes the verifier with log-specific information. Only the public
+  // key is used for verification, other parameters are purely informational.
   // |subjectPublicKeyInfo| is a DER-encoded SubjectPublicKeyInfo.
+  // |operatorId| The numeric ID of the log operator as assigned at
+  // https://www.certificate-transparency.org/known-logs .
+  // |logStatus| Either "Included" or "Disqualified".
+  // |disqualificationTime| Disqualification timestamp (for disqualified logs).
   // An error is returned if |subjectPublicKeyInfo| refers to an unsupported
   // public key.
-  pkix::Result Init(pkix::Input subjectPublicKeyInfo);
+  pkix::Result Init(pkix::Input subjectPublicKeyInfo,
+                    CTLogOperatorId operatorId,
+                    CTLogStatus logStatus,
+                    uint64_t disqualificationTime);
 
   // Returns the log's key ID, which is a SHA256 hash of its public key.
   // See RFC 6962, Section 3.2.
   const Buffer& keyId() const { return mKeyId; }
 
+  CTLogOperatorId operatorId() const { return mOperatorId; }
+  bool isDisqualified() const { return mDisqualified; }
+  uint64_t disqualificationTime() const { return mDisqualificationTime; }
+
   // Verifies that |sct| contains a valid signature for |entry|.
   // |sct| must be signed by the verifier's log.
   pkix::Result Verify(const LogEntry& entry,
                       const SignedCertificateTimestamp& sct);
 
   // Verifies the signature in |sth|.
   // |sth| must be signed by the verifier's log.
   pkix::Result VerifySignedTreeHead(const SignedTreeHead& sth);
@@ -56,13 +69,16 @@ private:
   // Returns Success if passed verification, ERROR_BAD_SIGNATURE if failed
   // verification, or other result on error.
   pkix::Result VerifySignature(pkix::Input data, pkix::Input signature);
   pkix::Result VerifySignature(const Buffer& data, const Buffer& signature);
 
   Buffer mSubjectPublicKeyInfo;
   Buffer mKeyId;
   DigitallySigned::SignatureAlgorithm mSignatureAlgorithm;
+  CTLogOperatorId mOperatorId;
+  bool mDisqualified;
+  uint64_t mDisqualificationTime;
 };
 
 } } // namespace mozilla::ct
 
 #endif // CTLogVerifier_h
--- a/security/certverifier/CTSerialization.cpp
+++ b/security/certverifier/CTSerialization.cpp
@@ -506,20 +506,16 @@ DecodeSignedCertificateTimestamp(Reader&
     return rv;
   }
   rv = InputToBuffer(extensions, result.extensions);
   if (rv != Success) {
     return rv;
   }
   result.timestamp = timestamp;
 
-  result.origin = SignedCertificateTimestamp::Origin::Unknown;
-  result.verificationStatus =
-    SignedCertificateTimestamp::VerificationStatus::None;
-
   output = Move(result);
   return Success;
 }
 
 Result
 EncodeSCTList(const Vector<pkix::Input>& scts, Buffer& output)
 {
   // Find out the total size of the SCT list to be written so we can
--- a/security/certverifier/CTVerifyResult.cpp
+++ b/security/certverifier/CTVerifyResult.cpp
@@ -1,18 +1,28 @@
 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "CTVerifyResult.h"
 
+#include <stdint.h>
+
 namespace mozilla { namespace ct {
 
+VerifiedSCT::VerifiedSCT()
+  : status(Status::None)
+  , origin(Origin::Unknown)
+  , logOperatorId(-1)
+  , logDisqualificationTime(UINT64_MAX)
+{
+}
+
 void
 CTVerifyResult::Reset()
 {
-  scts.clear();
+  verifiedScts.clear();
   decodingErrors = 0;
 }
 
 } } // namespace mozilla::ct
--- a/security/certverifier/CTVerifyResult.h
+++ b/security/certverifier/CTVerifyResult.h
@@ -2,30 +2,72 @@
 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef CTVerifyResult_h
 #define CTVerifyResult_h
 
+#include "CTLog.h"
 #include "mozilla/Vector.h"
 #include "SignedCertificateTimestamp.h"
 
 namespace mozilla { namespace ct {
 
-typedef Vector<SignedCertificateTimestamp> SCTList;
+// Holds a verified Signed Certificate Timestamp along with the verification
+// status (e.g. valid/invalid) and additional information related to the
+// verification.
+struct VerifiedSCT
+{
+  VerifiedSCT();
+
+  // The original SCT.
+  SignedCertificateTimestamp sct;
+
+  enum class Status
+  {
+    None,
+    // The SCT is from a known log, and the signature is valid.
+    Valid,
+    // The SCT is from a known disqualified log, and the signature is valid.
+    // For the disqualification time of the log see |logDisqualificationTime|.
+    ValidFromDisqualifiedLog,
+    // The SCT is from an unknown log and can not be verified.
+    UnknownLog,
+    // The SCT is from a known log, but the signature is invalid.
+    InvalidSignature,
+    // The SCT signature is valid, but the timestamp is in the future.
+    // Such SCTs are considered invalid (see RFC 6962, Section 5.2).
+    InvalidTimestamp,
+  };
+
+  enum class Origin
+  {
+    Unknown,
+    Embedded,
+    TLSExtension,
+    OCSPResponse,
+  };
+
+  Status status;
+  Origin origin;
+  CTLogOperatorId logOperatorId;
+  uint64_t logDisqualificationTime;
+};
+
+typedef Vector<VerifiedSCT> VerifiedSCTList;
 
 // Holds Signed Certificate Timestamps verification results.
 class CTVerifyResult
 {
 public:
-  // SCTs that were processed during the verification. For each SCT,
-  // the verification result is stored in its |verificationStatus| field.
-  SCTList scts;
+  // SCTs that were processed during the verification along with their
+  // verification results.
+  VerifiedSCTList verifiedScts;
 
   // The verifier makes the best effort to extract the available SCTs
   // from the binary sources provided to it.
   // If some SCT cannot be extracted due to encoding errors, the verifier
   // proceeds to the next available one. In other words, decoding errors are
   // effectively ignored.
   // Note that a serialized SCT may fail to decode for a "legitimate" reason,
   // e.g. if the SCT is from a future version of the Certificate Transparency
--- a/security/certverifier/CertVerifier.cpp
+++ b/security/certverifier/CertVerifier.cpp
@@ -4,16 +4,17 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "CertVerifier.h"
 
 #include <stdint.h>
 
 #include "CTKnownLogs.h"
+#include "CTLogVerifier.h"
 #include "ExtendedValidation.h"
 #include "MultiLogCTVerifier.h"
 #include "NSSCertDBTrustDomain.h"
 #include "NSSErrorsService.h"
 #include "cert.h"
 #include "mozilla/Assertions.h"
 #include "mozilla/Casting.h"
 #include "nsNSSComponent.h"
@@ -154,26 +155,37 @@ BuildCertChainForOneKeyUsage(NSSCertDBTr
 
 void
 CertVerifier::LoadKnownCTLogs()
 {
   mCTVerifier = MakeUnique<MultiLogCTVerifier>();
   for (const CTLogInfo& log : kCTLogList) {
     Input publicKey;
     Result rv = publicKey.Init(
-      BitwiseCast<const uint8_t*, const char*>(log.logKey), log.logKeyLength);
+      BitwiseCast<const uint8_t*, const char*>(log.key), log.keyLength);
     if (rv != Success) {
       MOZ_ASSERT_UNREACHABLE("Failed reading a log key for a known CT Log");
       continue;
     }
-    rv = mCTVerifier->AddLog(publicKey);
+
+    CTLogVerifier logVerifier;
+    const CTLogOperatorInfo& logOperator =
+      kCTLogOperatorList[log.operatorIndex];
+    rv = logVerifier.Init(publicKey, logOperator.id, log.status,
+                          log.disqualificationTime);
     if (rv != Success) {
       MOZ_ASSERT_UNREACHABLE("Failed initializing a known CT Log");
       continue;
     }
+
+    rv = mCTVerifier->AddLog(Move(logVerifier));
+    if (rv != Success) {
+      MOZ_ASSERT_UNREACHABLE("Failed activating a known CT Log");
+      continue;
+    }
   }
 }
 
 Result
 CertVerifier::VerifySignedCertificateTimestamps(
   NSSCertDBTrustDomain& trustDomain, const UniqueCERTCertList& builtChain,
   Input sctsFromTLS, Time time,
   /*optional out*/ CertificateTransparencyInfo* ctInfo)
@@ -253,45 +265,49 @@ CertVerifier::VerifySignedCertificateTim
                            result);
   if (rv != Success) {
     MOZ_LOG(gCertVerifierLog, LogLevel::Debug,
             ("SCT verification failed with fatal error %i\n", rv));
     return rv;
   }
 
   if (MOZ_LOG_TEST(gCertVerifierLog, LogLevel::Debug)) {
-    size_t verifiedCount = 0;
+    size_t validCount = 0;
     size_t unknownLogCount = 0;
+    size_t disqualifiedLogCount = 0;
     size_t invalidSignatureCount = 0;
     size_t invalidTimestampCount = 0;
-    for (const SignedCertificateTimestamp& sct : result.scts) {
-      switch (sct.verificationStatus) {
-        case SignedCertificateTimestamp::VerificationStatus::OK:
-          verifiedCount++;
+    for (const VerifiedSCT& verifiedSct : result.verifiedScts) {
+      switch (verifiedSct.status) {
+        case VerifiedSCT::Status::Valid:
+          validCount++;
           break;
-        case SignedCertificateTimestamp::VerificationStatus::UnknownLog:
+        case VerifiedSCT::Status::ValidFromDisqualifiedLog:
+          disqualifiedLogCount++;
+          break;
+        case VerifiedSCT::Status::UnknownLog:
           unknownLogCount++;
           break;
-        case SignedCertificateTimestamp::VerificationStatus::InvalidSignature:
+        case VerifiedSCT::Status::InvalidSignature:
           invalidSignatureCount++;
           break;
-        case SignedCertificateTimestamp::VerificationStatus::InvalidTimestamp:
+        case VerifiedSCT::Status::InvalidTimestamp:
           invalidTimestampCount++;
           break;
-        case SignedCertificateTimestamp::VerificationStatus::None:
+        case VerifiedSCT::Status::None:
         default:
-          MOZ_ASSERT_UNREACHABLE("Unexpected SCT verificationStatus");
+          MOZ_ASSERT_UNREACHABLE("Unexpected SCT verification status");
       }
     }
     MOZ_LOG(gCertVerifierLog, LogLevel::Debug,
             ("SCT verification result: "
-             "verified=%zu unknownLog=%zu "
+             "valid=%zu unknownLog=%zu disqualifiedLog=%zu "
              "invalidSignature=%zu invalidTimestamp=%zu "
              "decodingErrors=%zu\n",
-             verifiedCount, unknownLogCount,
+             validCount, unknownLogCount, disqualifiedLogCount,
              invalidSignatureCount, invalidTimestampCount,
              result.decodingErrors));
   }
 
   if (ctInfo) {
     ctInfo->processedSCTs = true;
     ctInfo->verifyResult = Move(result);
   }
--- a/security/certverifier/MultiLogCTVerifier.cpp
+++ b/security/certverifier/MultiLogCTVerifier.cpp
@@ -10,37 +10,32 @@
 #include "CTSerialization.h"
 #include "mozilla/Assertions.h"
 #include "mozilla/Move.h"
 
 namespace mozilla { namespace ct {
 
 using namespace mozilla::pkix;
 
-// Note: this moves |sct| to the target list in |result|, invalidating |sct|.
+// Note: this moves |verifiedSct| to the target list in |result|.
 static Result
 StoreVerifiedSct(CTVerifyResult& result,
-                 SignedCertificateTimestamp&& sct,
-                 SignedCertificateTimestamp::VerificationStatus status)
+                 VerifiedSCT&& verifiedSct,
+                 VerifiedSCT::Status status)
 {
-  sct.verificationStatus = status;
-  if (!result.scts.append(Move(sct))) {
+  verifiedSct.status = status;
+  if (!result.verifiedScts.append(Move(verifiedSct))) {
     return Result::FATAL_ERROR_NO_MEMORY;
   }
   return Success;
 }
 
 Result
-MultiLogCTVerifier::AddLog(Input publicKey)
+MultiLogCTVerifier::AddLog(CTLogVerifier&& log)
 {
-  CTLogVerifier log;
-  Result rv = log.Init(publicKey);
-  if (rv != Success) {
-    return rv;
-  }
   if (!mLogs.append(Move(log))) {
     return Result::FATAL_ERROR_NO_MEMORY;
   }
   return Success;
 }
 
 Result
 MultiLogCTVerifier::Verify(Input cert,
@@ -60,55 +55,52 @@ MultiLogCTVerifier::Verify(Input cert,
   if (issuerSubjectPublicKeyInfo.GetLength() > 0 &&
       sctListFromCert.GetLength() > 0) {
     LogEntry precertEntry;
     rv = GetPrecertLogEntry(cert, issuerSubjectPublicKeyInfo, precertEntry);
     if (rv != Success) {
       return rv;
     }
     rv = VerifySCTs(sctListFromCert, precertEntry,
-                    SignedCertificateTimestamp::Origin::Embedded, time,
-                    result);
+                    VerifiedSCT::Origin::Embedded, time, result);
     if (rv != Success) {
       return rv;
     }
   }
 
   LogEntry x509Entry;
   rv = GetX509LogEntry(cert, x509Entry);
   if (rv != Success) {
     return rv;
   }
 
   // Verify SCTs from a stapled OCSP response
   if (sctListFromOCSPResponse.GetLength() > 0) {
     rv = VerifySCTs(sctListFromOCSPResponse, x509Entry,
-                    SignedCertificateTimestamp::Origin::OCSPResponse, time,
-                    result);
+                    VerifiedSCT::Origin::OCSPResponse, time, result);
     if (rv != Success) {
       return rv;
     }
   }
 
   // Verify SCTs from a TLS extension
   if (sctListFromTLSExtension.GetLength() > 0) {
     rv = VerifySCTs(sctListFromTLSExtension, x509Entry,
-                    SignedCertificateTimestamp::Origin::TLSExtension, time,
-                    result);
+                    VerifiedSCT::Origin::TLSExtension, time, result);
     if (rv != Success) {
       return rv;
     }
   }
   return Success;
 }
 
 Result
 MultiLogCTVerifier::VerifySCTs(Input encodedSctList,
                                const LogEntry& expectedEntry,
-                               SignedCertificateTimestamp::Origin origin,
+                               VerifiedSCT::Origin origin,
                                Time time,
                                CTVerifyResult& result)
 {
   Reader listReader;
   Result rv = DecodeSCTList(encodedSctList, listReader);
   if (rv != Success) {
     result.decodingErrors++;
     return Success;
@@ -124,70 +116,86 @@ MultiLogCTVerifier::VerifySCTs(Input enc
 
     Reader encodedSctReader(encodedSct);
     SignedCertificateTimestamp sct;
     rv = DecodeSignedCertificateTimestamp(encodedSctReader, sct);
     if (rv != Success) {
       result.decodingErrors++;
       continue;
     }
-    sct.origin = origin;
 
-    rv = VerifySingleSCT(Move(sct), expectedEntry, time, result);
+    rv = VerifySingleSCT(Move(sct), expectedEntry, origin, time, result);
     if (rv != Success) {
       return rv;
     }
   }
   return Success;
 }
 
 Result
 MultiLogCTVerifier::VerifySingleSCT(SignedCertificateTimestamp&& sct,
                                     const LogEntry& expectedEntry,
+                                    VerifiedSCT::Origin origin,
                                     Time time,
                                     CTVerifyResult& result)
 {
+  VerifiedSCT verifiedSct;
+  verifiedSct.origin = origin;
+  verifiedSct.sct = Move(sct);
+
   CTLogVerifier* matchingLog = nullptr;
   for (auto& log : mLogs) {
-    if (log.keyId() == sct.logId) {
+    if (log.keyId() == verifiedSct.sct.logId) {
       matchingLog = &log;
       break;
     }
   }
 
   if (!matchingLog) {
     // SCT does not match any known log.
-    return StoreVerifiedSct(result, Move(sct),
-      SignedCertificateTimestamp::VerificationStatus::UnknownLog);
+    return StoreVerifiedSct(result, Move(verifiedSct),
+                            VerifiedSCT::Status::UnknownLog);
   }
 
-  if (!matchingLog->SignatureParametersMatch(sct.signature)) {
+  verifiedSct.logOperatorId = matchingLog->operatorId();
+
+  if (!matchingLog->SignatureParametersMatch(verifiedSct.sct.signature)) {
     // SCT signature parameters do not match the log's.
-    return StoreVerifiedSct(result, Move(sct),
-      SignedCertificateTimestamp::VerificationStatus::InvalidSignature);
+    return StoreVerifiedSct(result, Move(verifiedSct),
+                            VerifiedSCT::Status::InvalidSignature);
   }
 
-  Result rv = matchingLog->Verify(expectedEntry, sct);
+  Result rv = matchingLog->Verify(expectedEntry, verifiedSct.sct);
   if (rv != Success) {
     if (rv == Result::ERROR_BAD_SIGNATURE) {
-      return StoreVerifiedSct(result, Move(sct),
-        SignedCertificateTimestamp::VerificationStatus::InvalidSignature);
+      return StoreVerifiedSct(result, Move(verifiedSct),
+                              VerifiedSCT::Status::InvalidSignature);
     }
     return rv;
   }
 
-  // |sct.timestamp| is measured in milliseconds since the epoch,
+  // Make sure the timestamp is legitimate (not in the future).
+  // SCT's |timestamp| is measured in milliseconds since the epoch,
   // ignoring leap seconds. When converting it to a second-level precision
   // pkix::Time, we need to round it either up or down. In our case, rounding up
-  // is more "secure", although practically it does not matter.
-  Time sctTime = TimeFromEpochInSeconds((sct.timestamp + 999u) / 1000u);
-
-  // SCT verified ok, just make sure the timestamp is legitimate.
+  // (towards the future) is more "secure", although practically
+  // it does not matter.
+  Time sctTime =
+    TimeFromEpochInSeconds((verifiedSct.sct.timestamp + 999u) / 1000u);
   if (sctTime > time) {
-    return StoreVerifiedSct(result, Move(sct),
-      SignedCertificateTimestamp::VerificationStatus::InvalidTimestamp);
+    return StoreVerifiedSct(result, Move(verifiedSct),
+                            VerifiedSCT::Status::InvalidTimestamp);
   }
 
-  return StoreVerifiedSct(result, Move(sct),
-    SignedCertificateTimestamp::VerificationStatus::OK);
+  // SCT verified ok, see if the log is qualified. Since SCTs from
+  // disqualified logs are treated as valid under certain circumstances (see
+  // the CT Policy), the log qualification check must be the last one we do.
+  if (matchingLog->isDisqualified()) {
+    verifiedSct.logDisqualificationTime = matchingLog->disqualificationTime();
+    return StoreVerifiedSct(result, Move(verifiedSct),
+                            VerifiedSCT::Status::ValidFromDisqualifiedLog);
+  }
+
+  return StoreVerifiedSct(result, Move(verifiedSct),
+                          VerifiedSCT::Status::Valid);
 }
 
 } } // namespace mozilla::ct
--- a/security/certverifier/MultiLogCTVerifier.h
+++ b/security/certverifier/MultiLogCTVerifier.h
@@ -18,17 +18,17 @@
 namespace mozilla { namespace ct {
 
 // A Certificate Transparency verifier that can verify Signed Certificate
 // Timestamps from multiple logs.
 class MultiLogCTVerifier
 {
 public:
   // Adds a new log to the list of known logs to verify against.
-  pkix::Result AddLog(pkix::Input publicKey);
+  pkix::Result AddLog(CTLogVerifier&& log);
 
   // Verifies SCTs embedded in the certificate itself, SCTs embedded in a
   // stapled OCSP response, and SCTs obtained via the
   // signed_certificate_timestamp TLS extension on the given |cert|.
   //
   // A certificate is permitted but not required to use multiple sources for
   // SCTs. It is expected that most certificates will use only one source
   // (embedding, TLS extension or OCSP stapling).
@@ -61,24 +61,25 @@ public:
                       CTVerifyResult& result);
 
 private:
   // Verifies a list of SCTs from |encodedSctList| over |expectedEntry|,
   // placing the verification results in |result|. The SCTs in the list
   // come from |origin| (as will be reflected in the origin field of each SCT).
   pkix::Result VerifySCTs(pkix::Input encodedSctList,
                           const LogEntry& expectedEntry,
-                          SignedCertificateTimestamp::Origin origin,
+                          VerifiedSCT::Origin origin,
                           pkix::Time time,
                           CTVerifyResult& result);
 
   // Verifies a single, parsed SCT against all known logs.
   // Note: moves |sct| to the target list in |result|, invalidating |sct|.
   pkix::Result VerifySingleSCT(SignedCertificateTimestamp&& sct,
                                const ct::LogEntry& expectedEntry,
+                               VerifiedSCT::Origin origin,
                                pkix::Time time,
                                CTVerifyResult& result);
 
   // The list of known logs.
   Vector<CTLogVerifier> mLogs;
 };
 
 } } // namespace mozilla::ct
--- a/security/certverifier/SignedCertificateTimestamp.cpp
+++ b/security/certverifier/SignedCertificateTimestamp.cpp
@@ -33,13 +33,14 @@ namespace mozilla {
 bool
 operator==(const ct::Buffer& a, const ct::Buffer& b)
 {
   return (a.empty() && b.empty()) ||
     (a.length() == b.length() && memcmp(a.begin(), b.begin(), a.length()) == 0);
 }
 
 bool
-operator!=(const ct::Buffer& a, const ct::Buffer& b) {
+operator!=(const ct::Buffer& a, const ct::Buffer& b)
+{
   return !(a == b);
 }
 
 } // namespace mozilla
--- a/security/certverifier/SignedCertificateTimestamp.h
+++ b/security/certverifier/SignedCertificateTimestamp.h
@@ -80,45 +80,18 @@ struct SignedCertificateTimestamp
 
   Version version;
   Buffer logId;
   // "timestamp" is the current time in milliseconds, measured since the epoch,
   // ignoring leap seconds. See RFC 6962, Section 3.2.
   uint64_t timestamp;
   Buffer extensions;
   DigitallySigned signature;
-
-  // Supplementary fields, not defined in CT RFC. Set during the various
-  // stages of processing the received SCTs.
-
-  enum class Origin {
-    Unknown,
-    Embedded,
-    TLSExtension,
-    OCSPResponse
-  };
-
-  enum class VerificationStatus {
-    None,
-    // The SCT is from a known log, and the signature is valid.
-    OK,
-    // The SCT is from an unknown log and can not be verified.
-    UnknownLog,
-    // The SCT is from a known log, but the signature is invalid.
-    InvalidSignature,
-    // The SCT signature is valid, but the timestamp is in the future.
-    // Such SCT are considered invalid (see RFC 6962, Section 5.2).
-    InvalidTimestamp
-  };
-
-  Origin origin;
-  VerificationStatus verificationStatus;
 };
 
-
 inline pkix::Result BufferToInput(const Buffer& buffer, pkix::Input& input)
 {
   return input.Init(buffer.begin(), buffer.length());
 }
 
 inline pkix::Result InputToBuffer(pkix::Input input, Buffer& buffer)
 {
   buffer.clear();
--- a/security/certverifier/moz.build
+++ b/security/certverifier/moz.build
@@ -2,16 +2,17 @@
 # vim: set filetype=python:
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 EXPORTS += [
     'BRNameMatchingPolicy.h',
     'CertVerifier.h',
+    'CTLog.h',
     'CTVerifyResult.h',
     'OCSPCache.h',
     'SignedCertificateTimestamp.h',
     'SignedTreeHead.h',
 ]
 
 UNIFIED_SOURCES += [
     'BRNameMatchingPolicy.cpp',
--- a/security/certverifier/tests/gtest/CTLogVerifierTest.cpp
+++ b/security/certverifier/tests/gtest/CTLogVerifierTest.cpp
@@ -17,17 +17,20 @@ using namespace pkix;
 class CTLogVerifierTest : public ::testing::Test
 {
 public:
   void SetUp() override
   {
     // Does nothing if NSS is already initialized.
     MOZ_RELEASE_ASSERT(NSS_NoDB_Init(nullptr) == SECSuccess);
 
-    ASSERT_EQ(Success, mLog.Init(InputForBuffer(GetTestPublicKey())));
+    ASSERT_EQ(Success, mLog.Init(InputForBuffer(GetTestPublicKey()),
+                                 -1 /*operator id*/,
+                                 CTLogStatus::Included,
+                                 0 /*disqualification time*/));
     ASSERT_EQ(GetTestPublicKeyId(), mLog.keyId());
   }
 
 protected:
   CTLogVerifier mLog;
 };
 
 TEST_F(CTLogVerifierTest, VerifiesCertSCT)
@@ -118,12 +121,15 @@ TEST_F(CTLogVerifierTest, DoesNotVerifyI
 
 // Test that excess data after the public key is rejected.
 TEST_F(CTLogVerifierTest, ExcessDataInPublicKey)
 {
   Buffer key = GetTestPublicKey();
   MOZ_RELEASE_ASSERT(key.append("extra", 5));
 
   CTLogVerifier log;
-  EXPECT_NE(Success, log.Init(InputForBuffer(key)));
+  EXPECT_NE(Success, log.Init(InputForBuffer(key),
+                              -1 /*operator id*/,
+                              CTLogStatus::Included,
+                              0 /*disqualification time*/));
 }
 
 } }  // namespace mozilla::ct
--- a/security/certverifier/tests/gtest/CTObjectsExtractorTest.cpp
+++ b/security/certverifier/tests/gtest/CTObjectsExtractorTest.cpp
@@ -24,17 +24,20 @@ public:
     MOZ_RELEASE_ASSERT(NSS_NoDB_Init(nullptr) == SECSuccess);
 
     mTestCert = GetDEREncodedX509Cert();
     mEmbeddedCert = GetDEREncodedTestEmbeddedCert();
     mCaCert = GetDEREncodedCACert();
     mCaCertSPKI = ExtractCertSPKI(mCaCert);
 
     Buffer logPublicKey = GetTestPublicKey();
-    ASSERT_EQ(Success, mLog.Init(InputForBuffer(logPublicKey)));
+    ASSERT_EQ(Success, mLog.Init(InputForBuffer(logPublicKey),
+                                 -1 /*operator id*/,
+                                 CTLogStatus::Included,
+                                 0 /*disqualification time*/));
   }
 
 protected:
   Buffer mTestCert;
   Buffer mEmbeddedCert;
   Buffer mCaCert;
   Buffer mCaCertSPKI;
   CTLogVerifier mLog;
--- a/security/certverifier/tests/gtest/MultiLogCTVerifierTest.cpp
+++ b/security/certverifier/tests/gtest/MultiLogCTVerifierTest.cpp
@@ -21,44 +21,50 @@ namespace mozilla { namespace ct {
 
 using namespace mozilla::pkix;
 
 class MultiLogCTVerifierTest : public ::testing::Test
 {
 public:
   MultiLogCTVerifierTest()
     : mNow(Time::uninitialized)
+    , mLogOperatorID(123)
   {}
 
   void SetUp() override
   {
     // Does nothing if NSS is already initialized.
     MOZ_RELEASE_ASSERT(NSS_NoDB_Init(nullptr) == SECSuccess);
 
-    ASSERT_EQ(Success, mVerifier.AddLog(InputForBuffer(GetTestPublicKey())));
+    CTLogVerifier log;
+    ASSERT_EQ(Success, log.Init(InputForBuffer(GetTestPublicKey()),
+                                mLogOperatorID,
+                                CTLogStatus::Included,
+                                0 /*disqualification time*/));
+    ASSERT_EQ(Success, mVerifier.AddLog(Move(log)));
 
     mTestCert = GetDEREncodedX509Cert();
     mEmbeddedCert = GetDEREncodedTestEmbeddedCert();
     mCaCert = GetDEREncodedCACert();
     mCaCertSPKI = ExtractCertSPKI(mCaCert);
     mIntermediateCert = GetDEREncodedIntermediateCert();
     mIntermediateCertSPKI = ExtractCertSPKI(mIntermediateCert);
 
     // Set the current time making sure all test timestamps are in the past.
     mNow = TimeFromEpochInSeconds(1451606400u); // Date.parse("2016-01-01")/1000
   }
 
-  void CheckForSingleVerifiedSCTInResult(const CTVerifyResult& result,
-    SignedCertificateTimestamp::Origin origin)
+  void CheckForSingleValidSCTInResult(const CTVerifyResult& result,
+                                      VerifiedSCT::Origin origin)
   {
     EXPECT_EQ(0U, result.decodingErrors);
-    ASSERT_EQ(1U, result.scts.length());
-    EXPECT_EQ(SignedCertificateTimestamp::VerificationStatus::OK,
-              result.scts[0].verificationStatus);
-    EXPECT_EQ(origin, result.scts[0].origin);
+    ASSERT_EQ(1U, result.verifiedScts.length());
+    EXPECT_EQ(VerifiedSCT::Status::Valid, result.verifiedScts[0].status);
+    EXPECT_EQ(origin, result.verifiedScts[0].origin);
+    EXPECT_EQ(mLogOperatorID, result.verifiedScts[0].logOperatorId);
   }
 
   // Writes an SCTList containing a single |sct| into |output|.
   void EncodeSCTListForTesting(Input sct, Buffer& output)
   {
     Vector<Input> list;
     ASSERT_TRUE(list.append(Move(sct)));
     ASSERT_EQ(Success, EncodeSCTList(list, output));
@@ -80,29 +86,29 @@ public:
     ExtractEmbeddedSCTList(cert, sctList);
     ASSERT_FALSE(sctList.empty());
 
     CTVerifyResult result;
     ASSERT_EQ(Success,
               mVerifier.Verify(InputForBuffer(cert), InputForBuffer(issuerSPKI),
                                InputForBuffer(sctList), Input(), Input(),
                                mNow, result));
-    CheckForSingleVerifiedSCTInResult(result,
-      SignedCertificateTimestamp::Origin::Embedded);
+    CheckForSingleValidSCTInResult(result, VerifiedSCT::Origin::Embedded);
   }
 
 protected:
   MultiLogCTVerifier mVerifier;
   Buffer mTestCert;
   Buffer mEmbeddedCert;
   Buffer mCaCert;
   Buffer mCaCertSPKI;
   Buffer mIntermediateCert;
   Buffer mIntermediateCertSPKI;
   Time mNow;
+  CTLogOperatorId mLogOperatorID;
 };
 
 // Test that an embedded SCT can be extracted and the extracted SCT contains
 // the expected data. This tests the ExtractEmbeddedSCTList function from
 // CTTestUtils.h that other tests here rely upon.
 TEST_F(MultiLogCTVerifierTest, ExtractEmbeddedSCT)
 {
   SignedCertificateTimestamp sct;
@@ -161,73 +167,94 @@ TEST_F(MultiLogCTVerifierTest, VerifiesS
   EncodeSCTListForTesting(InputForBuffer(sct), sctList);
 
   CTVerifyResult result;
   ASSERT_EQ(Success,
             mVerifier.Verify(InputForBuffer(mTestCert), Input(),
                              Input(), InputForBuffer(sctList), Input(),
                              mNow, result));
 
-  CheckForSingleVerifiedSCTInResult(result,
-    SignedCertificateTimestamp::Origin::OCSPResponse);
+  CheckForSingleValidSCTInResult(result, VerifiedSCT::Origin::OCSPResponse);
 }
 
 TEST_F(MultiLogCTVerifierTest, VerifiesSCTFromTLS)
 {
   Buffer sct(GetTestSignedCertificateTimestamp());
   Buffer sctList;
   EncodeSCTListForTesting(InputForBuffer(sct), sctList);
 
   CTVerifyResult result;
   ASSERT_EQ(Success,
             mVerifier.Verify(InputForBuffer(mTestCert), Input(),
                              Input(), Input(), InputForBuffer(sctList),
                              mNow, result));
 
-  CheckForSingleVerifiedSCTInResult(result,
-    SignedCertificateTimestamp::Origin::TLSExtension);
+  CheckForSingleValidSCTInResult(result, VerifiedSCT::Origin::TLSExtension);
 }
 
 TEST_F(MultiLogCTVerifierTest, VerifiesSCTFromMultipleSources)
 {
   Buffer sct(GetTestSignedCertificateTimestamp());
   Buffer sctList;
   EncodeSCTListForTesting(InputForBuffer(sct), sctList);
 
   CTVerifyResult result;
   ASSERT_EQ(Success,
             mVerifier.Verify(InputForBuffer(mTestCert), Input(), Input(),
                              InputForBuffer(sctList), InputForBuffer(sctList),
                              mNow, result));
 
   // The result should contain verified SCTs from TLS and OCSP origins.
-  EnumSet<SignedCertificateTimestamp::Origin> origins;
-  for (const SignedCertificateTimestamp& sct : result.scts) {
-    EXPECT_EQ(SignedCertificateTimestamp::VerificationStatus::OK,
-              sct.verificationStatus);
-    origins += sct.origin;
+  EnumSet<VerifiedSCT::Origin> origins;
+  for (const VerifiedSCT& verifiedSct : result.verifiedScts) {
+    EXPECT_EQ(VerifiedSCT::Status::Valid, verifiedSct.status);
+    origins += verifiedSct.origin;
   }
-  EXPECT_FALSE(
-    origins.contains(SignedCertificateTimestamp::Origin::Embedded));
-  EXPECT_TRUE(
-    origins.contains(SignedCertificateTimestamp::Origin::OCSPResponse));
-  EXPECT_TRUE(
-    origins.contains(SignedCertificateTimestamp::Origin::TLSExtension));
+  EXPECT_FALSE(origins.contains(VerifiedSCT::Origin::Embedded));
+  EXPECT_TRUE(origins.contains(VerifiedSCT::Origin::OCSPResponse));
+  EXPECT_TRUE(origins.contains(VerifiedSCT::Origin::TLSExtension));
 }
 
 TEST_F(MultiLogCTVerifierTest, IdentifiesSCTFromUnknownLog)
 {
   Buffer sctList;
   GetSCTListWithInvalidLogID(sctList);
 
   CTVerifyResult result;
   ASSERT_EQ(Success,
             mVerifier.Verify(InputForBuffer(mTestCert), Input(),
                              Input(), Input(), InputForBuffer(sctList),
                              mNow, result));
 
   EXPECT_EQ(0U, result.decodingErrors);
-  ASSERT_EQ(1U, result.scts.length());
-  EXPECT_EQ(SignedCertificateTimestamp::VerificationStatus::UnknownLog,
-            result.scts[0].verificationStatus);
+  ASSERT_EQ(1U, result.verifiedScts.length());
+  EXPECT_EQ(VerifiedSCT::Status::UnknownLog, result.verifiedScts[0].status);
+}
+
+TEST_F(MultiLogCTVerifierTest, IdentifiesSCTFromDisqualifiedLog)
+{
+  MultiLogCTVerifier verifier;
+  CTLogVerifier log;
+  const uint64_t disqualificationTime = 12345u;
+  ASSERT_EQ(Success, log.Init(InputForBuffer(GetTestPublicKey()),
+    mLogOperatorID, CTLogStatus::Disqualified, disqualificationTime));
+  ASSERT_EQ(Success, verifier.AddLog(Move(log)));
+
+  Buffer sct(GetTestSignedCertificateTimestamp());
+  Buffer sctList;
+  EncodeSCTListForTesting(InputForBuffer(sct), sctList);
+
+  CTVerifyResult result;
+  ASSERT_EQ(Success,
+            verifier.Verify(InputForBuffer(mTestCert), Input(),
+                            Input(), Input(), InputForBuffer(sctList),
+                            mNow, result));
+
+  EXPECT_EQ(0U, result.decodingErrors);
+  ASSERT_EQ(1U, result.verifiedScts.length());
+  EXPECT_EQ(VerifiedSCT::Status::ValidFromDisqualifiedLog,
+            result.verifiedScts[0].status);
+  EXPECT_EQ(disqualificationTime,
+            result.verifiedScts[0].logDisqualificationTime);
+  EXPECT_EQ(mLogOperatorID, result.verifiedScts[0].logOperatorId);
 }
 
 } } // namespace mozilla::ct
--- a/security/manager/ssl/SSLServerCertVerification.cpp
+++ b/security/manager/ssl/SSLServerCertVerification.cpp
@@ -1222,52 +1222,55 @@ GatherSuccessfulValidationTelemetry(cons
 {
   GatherBaselineRequirementsTelemetry(certList);
   GatherEKUTelemetry(certList);
   GatherRootCATelemetry(certList);
   GatherEndEntityTelemetry(certList);
 }
 
 void
-GatherTelemetryForSingleSCT(const ct::SignedCertificateTimestamp& sct)
+GatherTelemetryForSingleSCT(const ct::VerifiedSCT& verifiedSct)
 {
   // See SSL_SCTS_ORIGIN in Histograms.json.
   uint32_t origin = 0;
-  switch (sct.origin) {
-    case ct::SignedCertificateTimestamp::Origin::Embedded:
+  switch (verifiedSct.origin) {
+    case ct::VerifiedSCT::Origin::Embedded:
       origin = 1;
       break;
-    case ct::SignedCertificateTimestamp::Origin::TLSExtension:
+    case ct::VerifiedSCT::Origin::TLSExtension:
       origin = 2;
       break;
-    case ct::SignedCertificateTimestamp::Origin::OCSPResponse:
+    case ct::VerifiedSCT::Origin::OCSPResponse:
       origin = 3;
       break;
     default:
-      MOZ_ASSERT_UNREACHABLE("Unexpected SCT::Origin type");
+      MOZ_ASSERT_UNREACHABLE("Unexpected VerifiedSCT::Origin type");
   }
   Telemetry::Accumulate(Telemetry::SSL_SCTS_ORIGIN, origin);
 
   // See SSL_SCTS_VERIFICATION_STATUS in Histograms.json.
   uint32_t verificationStatus = 0;
-  switch (sct.verificationStatus) {
-    case ct::SignedCertificateTimestamp::VerificationStatus::OK:
+  switch (verifiedSct.status) {
+    case ct::VerifiedSCT::Status::Valid:
       verificationStatus = 1;
       break;
-    case ct::SignedCertificateTimestamp::VerificationStatus::UnknownLog:
+    case ct::VerifiedSCT::Status::UnknownLog:
       verificationStatus = 2;
       break;
-    case ct::SignedCertificateTimestamp::VerificationStatus::InvalidSignature:
+    case ct::VerifiedSCT::Status::InvalidSignature:
       verificationStatus = 3;
       break;
-    case ct::SignedCertificateTimestamp::VerificationStatus::InvalidTimestamp:
+    case ct::VerifiedSCT::Status::InvalidTimestamp:
       verificationStatus = 4;
       break;
+    case ct::VerifiedSCT::Status::ValidFromDisqualifiedLog:
+      verificationStatus = 5;
+      break;
     default:
-      MOZ_ASSERT_UNREACHABLE("Unexpected SCT::VerificationStatus type");
+      MOZ_ASSERT_UNREACHABLE("Unexpected VerifiedSCT::Status type");
   }
   Telemetry::Accumulate(Telemetry::SSL_SCTS_VERIFICATION_STATUS,
                         verificationStatus);
 }
 
 void
 GatherCertificateTransparencyTelemetry(const UniqueCERTCertList& certList,
                                        const CertificateTransparencyInfo& info)
@@ -1278,28 +1281,29 @@ GatherCertificateTransparencyTelemetry(c
   }
 
   if (!info.processedSCTs) {
     // We didn't receive any SCT data for this connection.
     Telemetry::Accumulate(Telemetry::SSL_SCTS_PER_CONNECTION, 0);
     return;
   }
 
-  for (const ct::SignedCertificateTimestamp& sct : info.verifyResult.scts) {
+  for (const ct::VerifiedSCT& sct : info.verifyResult.verifiedScts) {
     GatherTelemetryForSingleSCT(sct);
   }
 
   // Decoding errors are reported to the 0th bucket
   // of the SSL_SCTS_VERIFICATION_STATUS enumerated probe.
   for (size_t i = 0; i < info.verifyResult.decodingErrors; ++i) {
     Telemetry::Accumulate(Telemetry::SSL_SCTS_VERIFICATION_STATUS, 0);
   }
 
   // Handle the histogram of SCTs counts.
-  uint32_t sctsCount = static_cast<uint32_t>(info.verifyResult.scts.length());
+  uint32_t sctsCount =
+    static_cast<uint32_t>(info.verifyResult.verifiedScts.length());
   // Note that sctsCount can be 0 in case we've received SCT binary data,
   // but it failed to parse (e.g. due to unsupported CT protocol version).
   Telemetry::Accumulate(Telemetry::SSL_SCTS_PER_CONNECTION, sctsCount);
 }
 
 // Note: Takes ownership of |peerCertChain| if SECSuccess is not returned.
 SECStatus
 AuthCertificate(CertVerifier& certVerifier,
--- a/security/manager/ssl/nsSSLStatus.cpp
+++ b/security/manager/ssl/nsSSLStatus.cpp
@@ -1,21 +1,21 @@
 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-
  *
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
+#include "CTVerifyResult.h"
 #include "mozilla/Casting.h"
 #include "nsSSLStatus.h"
 #include "nsIClassInfoImpl.h"
 #include "nsIObjectOutputStream.h"
 #include "nsIObjectInputStream.h"
 #include "nsNSSCertificate.h"
-#include "SignedCertificateTimestamp.h"
 #include "ssl.h"
 
 NS_IMETHODIMP
 nsSSLStatus::GetServerCert(nsIX509Cert** aServerCert)
 {
   NS_ENSURE_ARG_POINTER(aServerCert);
 
   nsCOMPtr<nsIX509Cert> cert = mServerCert;
@@ -325,53 +325,54 @@ nsSSLStatus::SetServerCert(nsNSSCertific
   mIsEV = (aEVStatus == EVStatus::EV);
   mHasIsEVStatus = true;
 }
 
 void
 nsSSLStatus::SetCertificateTransparencyInfo(
   const mozilla::psm::CertificateTransparencyInfo& info)
 {
-  using mozilla::ct::SignedCertificateTimestamp;
+  using mozilla::ct::VerifiedSCT;
 
   if (!info.enabled) {
     // CT disabled.
     mCertificateTransparencyStatus =
       nsISSLStatus::CERTIFICATE_TRANSPARENCY_NOT_APPLICABLE;
     return;
   }
 
   if (!info.processedSCTs) {
     // No SCTs processed on the connection.
     mCertificateTransparencyStatus =
       nsISSLStatus::CERTIFICATE_TRANSPARENCY_NONE;
     return;
   }
 
-  bool hasOKSCTs = false;
+  bool hasValidSCTs = false;
   bool hasUnknownLogSCTs = false;
   bool hasInvalidSCTs = false;
-  for (const SignedCertificateTimestamp& sct : info.verifyResult.scts) {
-    switch (sct.verificationStatus) {
-      case SignedCertificateTimestamp::VerificationStatus::OK:
-        hasOKSCTs = true;
+  for (const VerifiedSCT& verifiedSct : info.verifyResult.verifiedScts) {
+    switch (verifiedSct.status) {
+      case VerifiedSCT::Status::Valid:
+        hasValidSCTs = true;
         break;
-      case SignedCertificateTimestamp::VerificationStatus::UnknownLog:
+      case VerifiedSCT::Status::UnknownLog:
+      case VerifiedSCT::Status::ValidFromDisqualifiedLog:
         hasUnknownLogSCTs = true;
         break;
-      case SignedCertificateTimestamp::VerificationStatus::InvalidSignature:
-      case SignedCertificateTimestamp::VerificationStatus::InvalidTimestamp:
+      case VerifiedSCT::Status::InvalidSignature:
+      case VerifiedSCT::Status::InvalidTimestamp:
         hasInvalidSCTs = true;
         break;
       default:
-        MOZ_ASSERT_UNREACHABLE("Unexpected SCT::VerificationStatus type");
+        MOZ_ASSERT_UNREACHABLE("Unexpected VerifiedSCT::Status type");
     }
   }
 
-  if (hasOKSCTs) {
+  if (hasValidSCTs) {
     mCertificateTransparencyStatus =
       nsISSLStatus::CERTIFICATE_TRANSPARENCY_OK;
   } else if (hasUnknownLogSCTs) {
     mCertificateTransparencyStatus =
       nsISSLStatus::CERTIFICATE_TRANSPARENCY_UNKNOWN_LOG;
   } else if (hasInvalidSCTs) {
     mCertificateTransparencyStatus =
       nsISSLStatus::CERTIFICATE_TRANSPARENCY_INVALID;
--- a/security/manager/tools/getCTKnownLogs.py
+++ b/security/manager/tools/getCTKnownLogs.py
@@ -1,23 +1,29 @@
 #!/usr/bin/env python
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 """
 Parses a JSON file listing the known Certificate Transparency logs
-(as downloaded from https://www.certificate-transparency.org/known-logs)
-and generates a C++ header file to be included in Firefox.
+(log_list.json) and generates a C++ header file to be included in Firefox.
+
+The current log_list.json file available under security/manager/tools
+was originally downloaded from
+https://www.certificate-transparency.org/known-logs
+and edited to include the disqualification time for the disqualified logs using
+https://cs.chromium.org/chromium/src/net/cert/ct_known_logs_static-inc.h
 """
 
 from __future__ import print_function
 from string import Template
 import argparse
 import base64
+import datetime
 import json
 import os.path
 import sys
 import textwrap
 import urllib2
 
 
 OUTPUT_TEMPLATE = """\
@@ -27,72 +33,149 @@ OUTPUT_TEMPLATE = """\
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 /* This file was automatically generated by $prog. */
 
 #ifndef $include_guard
 #define $include_guard
 
+#include "CTLog.h"
+
 #include <stddef.h>
 
-struct CTLogInfo {
-  const char* const logName;
-  const char* const logUrl;
-  const char* const logKey;
-  const size_t logKeyLength;
+struct CTLogInfo
+{
+  const char* const name;
+  // Index within kCTLogOperatorList.
+  const mozilla::ct::CTLogStatus status;
+  // 0 for qualified logs, disqualification time for disqualified logs
+  // (in milliseconds, measured since the epoch, ignoring leap seconds).
+  const uint64_t disqualificationTime;
+  const size_t operatorIndex;
+  const char* const key;
+  const size_t keyLength;
+};
+
+struct CTLogOperatorInfo
+{
+  const char* const name;
+  const mozilla::ct::CTLogOperatorId id;
 };
 
 const CTLogInfo kCTLogList[] = {
 $logs
 };
 
+const CTLogOperatorInfo kCTLogOperatorList[] = {
+$operators
+};
+
 #endif // $include_guard
 """
 
 
+def get_disqualification_time(time_str):
+    """
+    Convert a time string such as "2017-01-01T00:00:00Z" to an integer
+    representing milliseconds since the epoch.
+    Timezones in the string are not supported and will result in an exception.
+    """
+    t = datetime.datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%SZ")
+    epoch = datetime.datetime.utcfromtimestamp(0)
+    seconds_since_epoch = (t - epoch).total_seconds()
+    return int(seconds_since_epoch * 1000)
+
+
 def get_hex_lines(blob, width):
     """ Convert a binary string to a multiline text of C escape sequences. """
     text = "".join(["\\x{:02x}".format(ord(c)) for c in blob])
     # When escaped, a single byte takes 4 chars (e.g. "\x00").
     # Make sure we don't break an escaped byte between the lines.
     return textwrap.wrap(text, width - width % 4)
 
 
+def get_operator_and_index(json_data, operator_id):
+    """ Return operator's entry from the JSON along with its array index. """
+    matches = [(operator, index) for (index, operator) in enumerate(
+        json_data["operators"]) if operator["id"] == operator_id]
+    assert len(matches) != 0, "No operators with id {0} defined.".format(
+        operator_id)
+    assert len(matches) == 1, "Found multiple operators with id {0}.".format(
+        operator_id)
+    return matches[0]
+
+
 def get_log_info_structs(json_data):
     """ Return array of CTLogInfo initializers for the known logs. """
     tmpl = Template(textwrap.dedent("""\
           { $description,
-            $url,
+            $status,
+            $disqualification_time, // $disqualification_time_comment
+            $operator_index, // $operator_comment
         $indented_log_key,
             $log_key_len }"""))
     initializers = []
     for log in json_data["logs"]:
         log_key = base64.decodestring(log["key"])
+        # "operated_by" is a list, we assume here it always contains one item.
+        operated_by = log["operated_by"]
+        assert len(operated_by) == 1, "operated_by must contain one item."
+        operator, operator_index = get_operator_and_index(json_data,
+                                                          operated_by[0])
+        if "disqualification_time" in log:
+            status = "mozilla::ct::CTLogStatus::Disqualified"
+            disqualification_time = get_disqualification_time(
+                log["disqualification_time"])
+            disqualification_time_comment = 'Date.parse("{0}")'.format(
+                log["disqualification_time"])
+        else:
+            status = "mozilla::ct::CTLogStatus::Included"
+            disqualification_time = 0
+            disqualification_time_comment = "no disqualification time"
         initializers.append(tmpl.substitute(
             # Use json.dumps for C-escaping strings.
             # Not perfect but close enough.
             description=json.dumps(log["description"]),
-            url=json.dumps("https://{0}/".format(log["url"])),
+            operator_index=operator_index,
+            operator_comment="operated by {0}".
+            # The comment must not contain "/".
+            format(operator["name"]).replace("/", "|"),
+            status=status,
+            disqualification_time=disqualification_time,
+            disqualification_time_comment=disqualification_time_comment,
             # Maximum line width is 80.
             indented_log_key="\n".
             join(['    "{0}"'.format(l) for l in get_hex_lines(log_key, 74)]),
             log_key_len=len(log_key)))
     return initializers
 
 
+def get_log_operator_structs(json_data):
+    """ Return array of CTLogOperatorInfo initializers. """
+    tmpl = Template("  { $name, $id }")
+    initializers = []
+    for operator in json_data["operators"]:
+        initializers.append(tmpl.substitute(
+            name=json.dumps(operator["name"]),
+            id=operator["id"]))
+    return initializers
+
+
 def generate_cpp_header_file(json_data, out_file):
     """ Generate the C++ header file for the known logs. """
     filename = os.path.basename(out_file.name)
     include_guard = filename.replace(".", "_").replace("/", "_")
     log_info_initializers = get_log_info_structs(json_data)
+    operator_info_initializers = get_log_operator_structs(json_data)
     out_file.write(Template(OUTPUT_TEMPLATE).substitute(
         prog=os.path.basename(sys.argv[0]),
         include_guard=include_guard,
-        logs=",\n".join(log_info_initializers)))
+        logs=",\n".join(log_info_initializers),
+        operators=",\n".join(operator_info_initializers)))
 
 
 def run(args):
     """
     Load the input JSON file and generate the C++ header according to the
     command line arguments.
     """
     if args.file:
@@ -121,26 +204,23 @@ def parse_arguments_and_run():
     """ Parse the command line arguments and run the program. """
     arg_parser = argparse.ArgumentParser(
         description="Parses a JSON file listing the known "
         "Certificate Transparency logs and generates "
         "a C++ header file to be included in Firefox.",
         epilog="Example: python %s --url" % os.path.basename(sys.argv[0]))
 
     source_group = arg_parser.add_mutually_exclusive_group(required=True)
-    source_group.add_argument("--file",
-                              help="Read the known CT logs JSON file from the "
-                              "specified location on the filesystem.")
-    source_group.add_argument("--url", nargs="?",
-                              const="https://www.certificate-transparency.org/"
-                              "known-logs/log_list.json",
+    source_group.add_argument("--file", nargs="?",
+                              const="log_list.json",
+                              help="Read the known CT logs JSON data from the "
+                              "specified local file (%(const)s by default).")
+    source_group.add_argument("--url",
                               help="Download the known CT logs JSON file "
-                              "from the specified URL. "
-                              "If no URL is given, download the file "
-                              "from %(const)s.")
+                              "from the specified URL.")
 
     arg_parser.add_argument("--out",
                             default="../../certverifier/CTKnownLogs.h",
                             help="Path and filename of the header file "
                             "to be generated. Defaults to %(default)s")
 
     run(arg_parser.parse_args())
 
new file mode 100644
--- /dev/null
+++ b/security/manager/tools/log_list.json
@@ -0,0 +1,128 @@
+{
+  "operators": [
+  {
+    "name": "Google",
+    "id": 0
+  },
+  {
+    "name": "DigiCert",
+    "id": 1
+  },
+  {
+    "name": "Certly",
+    "id": 2
+  },
+  {
+    "name": "Izenpe",
+    "id": 3
+  },
+  {
+    "name": "Symantec",
+    "id": 4
+  },
+  {
+    "name": "Venafi",
+    "id": 5
+  },
+  {
+    "name": "CNNIC",
+    "id": 7
+  },
+  {
+    "name": "WoSign",
+    "id": 8
+  },
+  {
+    "name": "StartCom",
+    "id": 9
+  }
+  ],
+  "logs": [
+    {
+      "description": "Google 'Pilot' log",
+      "key": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEfahLEimAoz2t01p3uMziiLOl/fHTDM0YDOhBRuiBARsV4UvxG2LdNgoIGLrtCzWE0J5APC2em4JlvR8EEEFMoA==",
+      "url": "ct.googleapis.com/pilot",
+      "maximum_merge_delay": 86400,
+      "operated_by": [0]
+    },
+    {
+      "description": "Google 'Aviator' log",
+      "key": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE1/TMabLkDpCjiupacAlP7xNi0I1JYP8bQFAHDG1xhtolSY1l4QgNRzRrvSe8liE+NPWHdjGxfx3JhTsN9x8/6Q==",
+      "url": "ct.googleapis.com/aviator",
+      "maximum_merge_delay": 86400,
+      "operated_by": [0]
+    },
+    {
+      "description": "DigiCert Log Server",
+      "key": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEAkbFvhu7gkAW6MHSrBlpE1n4+HCFRkC5OLAjgqhkTH+/uzSfSl8ois8ZxAD2NgaTZe1M9akhYlrYkes4JECs6A==",
+      "url": "ct1.digicert-ct.com/log",
+      "maximum_merge_delay": 86400,
+      "operated_by": [1]
+    },
+    {
+      "description": "Google 'Rocketeer' log",
+      "key": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEIFsYyDzBi7MxCAC/oJBXK7dHjG+1aLCOkHjpoHPqTyghLpzA9BYbqvnV16mAw04vUjyYASVGJCUoI3ctBcJAeg==",
+      "url": "ct.googleapis.com/rocketeer",
+      "maximum_merge_delay": 86400,
+      "operated_by": [0]
+    },
+    {
+      "description": "Certly.IO log",
+      "key": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAECyPLhWKYYUgEc+tUXfPQB4wtGS2MNvXrjwFCCnyYJifBtd2Sk7Cu+Js9DNhMTh35FftHaHu6ZrclnNBKwmbbSA==",
+      "url": "log.certly.io",
+      "maximum_merge_delay": 86400,
+      "operated_by": [2],
+      "disqualification_time": "2016-04-15T00:00:00Z"
+    },
+    {
+      "description": "Izenpe log",
+      "key": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEJ2Q5DC3cUBj4IQCiDu0s6j51up+TZAkAEcQRF6tczw90rLWXkJMAW7jr9yc92bIKgV8vDXU4lDeZHvYHduDuvg==",
+      "url": "ct.izenpe.com",
+      "maximum_merge_delay": 86400,
+      "operated_by": [3],
+      "disqualification_time": "2016-05-30T00:00:00Z"
+    },
+    {
+      "description": "Symantec log",
+      "key": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEluqsHEYMG1XcDfy1lCdGV0JwOmkY4r87xNuroPS2bMBTP01CEDPwWJePa75y9CrsHEKqAy8afig1dpkIPSEUhg==",
+      "url": "ct.ws.symantec.com",
+      "maximum_merge_delay": 86400,
+      "operated_by": [4]
+    },
+    {
+      "description": "Venafi log",
+      "key": "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAolpIHxdSlTXLo1s6H1OCdpSj/4DyHDc8wLG9wVmLqy1lk9fz4ATVmm+/1iN2Nk8jmctUKK2MFUtlWXZBSpym97M7frGlSaQXUWyA3CqQUEuIJOmlEjKTBEiQAvpfDjCHjlV2Be4qTM6jamkJbiWtgnYPhJL6ONaGTiSPm7Byy57iaz/hbckldSOIoRhYBiMzeNoA0DiRZ9KmfSeXZ1rB8y8X5urSW+iBzf2SaOfzBvDpcoTuAaWx2DPazoOl28fP1hZ+kHUYvxbcMjttjauCFx+JII0dmuZNIwjfeG/GBb9frpSX219k1O4Wi6OEbHEr8at/XQ0y7gTikOxBn/s5wQIDAQAB",
+      "url": "ctlog.api.venafi.com",
+      "maximum_merge_delay": 86400,
+      "operated_by": [5]
+    },
+    {
+      "description": "Symantec 'Vega' log",
+      "key": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE6pWeAv/u8TNtS4e8zf0ZF2L/lNPQWQc/Ai0ckP7IRzA78d0NuBEMXR2G3avTK0Zm+25ltzv9WWis36b4ztIYTQ==",
+      "url": "vega.ws.symantec.com",
+      "maximum_merge_delay": 86400,
+      "operated_by": [4]
+    },
+    {
+      "description": "CNNIC CT log",
+      "key": "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv7UIYZopMgTTJWPp2IXhhuAf1l6a9zM7gBvntj5fLaFm9pVKhKYhVnno94XuXeN8EsDgiSIJIj66FpUGvai5samyetZhLocRuXhAiXXbDNyQ4KR51tVebtEq2zT0mT9liTtGwiksFQccyUsaVPhsHq9gJ2IKZdWauVA2Fm5x9h8B9xKn/L/2IaMpkIYtd967TNTP/dLPgixN1PLCLaypvurDGSVDsuWabA3FHKWL9z8wr7kBkbdpEhLlg2H+NAC+9nGKx+tQkuhZ/hWR65aX+CNUPy2OB9/u2rNPyDydb988LENXoUcMkQT0dU3aiYGkFAY0uZjD2vH97TM20xYtNQIDAQAB",
+      "url": "ctserver.cnnic.cn",
+      "maximum_merge_delay": 86400,
+      "operated_by": [7]
+    },
+    {
+      "description": "WoSign log",
+      "key": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEzBGIey1my66PTTBmJxklIpMhRrQvAdPG+SvVyLpzmwai8IoCnNBrRhgwhbrpJIsO0VtwKAx+8TpFf1rzgkJgMQ==",
+      "url": "ctlog.wosign.com",
+      "maximum_merge_delay": 86400,
+      "operated_by": [8]
+    },
+    {
+      "description": "StartCom log",
+      "key": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAESPNZ8/YFGNPbsu1Gfs/IEbVXsajWTOaft0oaFIZDqUiwy1o/PErK38SCFFWa+PeOQFXc9NKv6nV0+05/YIYuUQ==",
+      "url": "ct.startssl.com",
+      "maximum_merge_delay": 86400,
+      "operated_by": [9]
+    }
+  ]
+}
--- a/services/common/blocklist-clients.js
+++ b/services/common/blocklist-clients.js
@@ -33,16 +33,21 @@ const PREF_BLOCKLIST_ADDONS_CHECKED_SECO
 const PREF_BLOCKLIST_PLUGINS_COLLECTION      = "services.blocklist.plugins.collection";
 const PREF_BLOCKLIST_PLUGINS_CHECKED_SECONDS = "services.blocklist.plugins.checked";
 const PREF_BLOCKLIST_GFX_COLLECTION          = "services.blocklist.gfx.collection";
 const PREF_BLOCKLIST_GFX_CHECKED_SECONDS     = "services.blocklist.gfx.checked";
 const PREF_BLOCKLIST_ENFORCE_SIGNING         = "services.blocklist.signing.enforced";
 
 const INVALID_SIGNATURE = "Invalid content/signature";
 
+// FIXME: this was the default path in earlier versions of
+// FirefoxAdapter, so for backwards compatibility we maintain this
+// filename, even though it isn't descriptive of who is using it.
+this.KINTO_STORAGE_PATH    = "kinto.sqlite";
+
 this.FILENAME_ADDONS_JSON  = "blocklist-addons.json";
 this.FILENAME_GFX_JSON     = "blocklist-gfx.json";
 this.FILENAME_PLUGINS_JSON = "blocklist-plugins.json";
 
 function mergeChanges(collection, localRecords, changes) {
   const records = {};
   // Local records by id.
   localRecords.forEach((record) => records[record.id] = collection.cleanLocalFields(record));
@@ -72,24 +77,25 @@ function fetchRemoteCollection(collectio
            .listRecords({sort: "id"});
 }
 
 /**
  * Helper to instantiate a Kinto client based on preferences for remote server
  * URL and bucket name. It uses the `FirefoxAdapter` which relies on SQLite to
  * persist the local DB.
  */
-function kintoClient() {
+function kintoClient(connection) {
   let base = Services.prefs.getCharPref(PREF_SETTINGS_SERVER);
   let bucket = Services.prefs.getCharPref(PREF_BLOCKLIST_BUCKET);
 
   let config = {
     remote: base,
     bucket: bucket,
     adapter: FirefoxAdapter,
+    adapterOptions: {sqliteHandle: connection},
   };
 
   return new Kinto(config);
 }
 
 
 class BlocklistClient {
 
@@ -140,34 +146,35 @@ class BlocklistClient {
    * Synchronize from Kinto server, if necessary.
    *
    * @param {int}  lastModified the lastModified date (on the server) for
                                 the remote collection.
    * @param {Date} serverTime   the current date return by the server.
    * @return {Promise}          which rejects on sync or process failure.
    */
   maybeSync(lastModified, serverTime) {
-    let db = kintoClient();
     let opts = {};
     let enforceCollectionSigning =
       Services.prefs.getBoolPref(PREF_BLOCKLIST_ENFORCE_SIGNING);
 
     // if there is a signerName and collection signing is enforced, add a
     // hook for incoming changes that validates the signature
     if (this.signerName && enforceCollectionSigning) {
       opts.hooks = {
         "incoming-changes": [this.validateCollectionSignature.bind(this)]
       }
     }
 
-    let collection = db.collection(this.collectionName, opts);
 
     return Task.spawn((function* syncCollection() {
+      let connection;
       try {
-        yield collection.db.open();
+        connection = yield FirefoxAdapter.openConnection({path: KINTO_STORAGE_PATH});
+        let db = kintoClient(connection);
+        let collection = db.collection(this.collectionName, opts);
 
         let collectionLastModified = yield collection.db.getLastModified();
         // If the data is up to date, there's no need to sync. We still need
         // to record the fact that a check happened.
         if (lastModified <= collectionLastModified) {
           this.updateLastCheck(serverTime);
           return;
         }
@@ -200,17 +207,17 @@ class BlocklistClient {
         // Read local collection of records.
         let list = yield collection.list();
 
         yield this.processCallback(list.data);
 
         // Track last update.
         this.updateLastCheck(serverTime);
       } finally {
-        collection.db.close();
+        yield connection.close();
       }
     }).bind(this));
   }
 
   /**
    * Save last time server was checked in users prefs.
    *
    * @param {Date} serverTime   the current date return by server.
--- a/services/common/kinto-storage-adapter.js
+++ b/services/common/kinto-storage-adapter.js
@@ -184,30 +184,34 @@ const createStatements = [
 const currentSchemaVersion = 1;
 
 /**
  * Firefox adapter.
  *
  * Uses Sqlite as a backing store.
  *
  * Options:
- *  - path: the filename/path for the Sqlite database. If absent, use SQLITE_PATH.
+ *  - sqliteHandle: a handle to the Sqlite database this adapter will
+ *    use as its backing store. To open such a handle, use the
+ *    static openConnection() method.
  */
 class FirefoxAdapter extends Kinto.adapters.BaseAdapter {
   constructor(collection, options = {}) {
     super();
     const { sqliteHandle = null } = options;
     this.collection = collection;
     this._connection = sqliteHandle;
     this._options = options;
   }
 
-  // We need to be capable of calling this from "outside" the adapter
-  // so that someone can initialize a connection and pass it to us in
-  // adapterOptions.
+  /**
+   * Initialize a Sqlite connection to be suitable for use with Kinto.
+   *
+   * This will be called automatically by open().
+   */
   static _init(connection) {
     return Task.spawn(function* () {
       yield connection.executeTransaction(function* doSetup() {
         const schema = yield connection.getSchemaVersion();
 
         if (schema == 0) {
 
           for (let statementName of createStatements) {
@@ -219,52 +223,40 @@ class FirefoxAdapter extends Kinto.adapt
           throw new Error("Unknown database schema: " + schema);
         }
       });
       return connection;
     });
   }
 
   _executeStatement(statement, params) {
-    if (!this._connection) {
-      throw new Error("The storage adapter is not open");
-    }
     return this._connection.executeCached(statement, params);
   }
 
-  open() {
-    const self = this;
-    return Task.spawn(function* () {
-      if (!self._connection) {
-        const path = self._options.path || SQLITE_PATH;
-        const opts = { path, sharedMemoryCache: false };
-        self._connection = yield Sqlite.openConnection(opts).then(FirefoxAdapter._init);
-      }
-    });
-  }
-
-  close() {
-    if (this._connection) {
-      const promise = this._connection.close();
-      this._connection = null;
-      return promise;
-    }
-    return Promise.resolve();
+  /**
+   * Open and initialize a Sqlite connection to a database that Kinto
+   * can use. When you are done with this connection, close it by
+   * calling close().
+   *
+   * Options:
+   *   - path: The path for the Sqlite database
+   *
+   * @returns SqliteConnection
+   */
+  static async openConnection(options) {
+    const opts = Object.assign({}, { sharedMemoryCache: false }, options);
+    return await Sqlite.openConnection(opts).then(this._init);
   }
 
   clear() {
     const params = { collection_name: this.collection };
     return this._executeStatement(statements.clearData, params);
   }
 
   execute(callback, options = { preload: [] }) {
-    if (!this._connection) {
-      throw new Error("The storage adapter is not open");
-    }
-
     let result;
     const conn = this._connection;
     const collection = this.collection;
 
     return conn.executeTransaction(function* doExecuteTransaction() {
       // Preload specified records from DB, within transaction.
       const parameters = [
         collection,
--- a/services/common/tests/unit/test_blocklist_certificates.js
+++ b/services/common/tests/unit/test_blocklist_certificates.js
@@ -9,31 +9,27 @@ const { FirefoxAdapter } = Cu.import("re
 const BinaryInputStream = CC("@mozilla.org/binaryinputstream;1",
   "nsIBinaryInputStream", "setInputStream");
 
 let server;
 
 // set up what we need to make storage adapters
 const kintoFilename = "kinto.sqlite";
 
-let kintoClient;
-
-function do_get_kinto_collection(collectionName) {
-  if (!kintoClient) {
-    let config = {
-      // Set the remote to be some server that will cause test failure when
-      // hit since we should never hit the server directly, only via maybeSync()
-      remote: "https://firefox.settings.services.mozilla.com/v1/",
-      // Set up the adapter and bucket as normal
-      adapter: FirefoxAdapter,
-      bucket: "blocklists"
-    };
-    kintoClient = new Kinto(config);
-  }
-  return kintoClient.collection(collectionName);
+function do_get_kinto_collection(collectionName, sqliteHandle) {
+  let config = {
+    // Set the remote to be some server that will cause test failure when
+    // hit since we should never hit the server directly, only via maybeSync()
+    remote: "https://firefox.settings.services.mozilla.com/v1/",
+    // Set up the adapter and bucket as normal
+    adapter: FirefoxAdapter,
+    adapterOptions: {sqliteHandle},
+    bucket: "blocklists"
+  };
+  return new Kinto(config).collection(collectionName);
 }
 
 // Some simple tests to demonstrate that the logic inside maybeSync works
 // correctly and that simple kinto operations are working as expected. There
 // are more tests for core Kinto.js (and its storage adapter) in the
 // xpcshell tests under /services/common
 add_task(function* test_something(){
   const configPath = "/v1/";
@@ -67,32 +63,32 @@ add_task(function* test_something(){
   server.registerPathHandler(configPath, handleResponse);
   server.registerPathHandler(recordsPath, handleResponse);
 
   // Test an empty db populates
   let result = yield OneCRLBlocklistClient.maybeSync(2000, Date.now());
 
   // Open the collection, verify it's been populated:
   // Our test data has a single record; it should be in the local collection
-  let collection = do_get_kinto_collection("certificates");
-  yield collection.db.open();
+  let sqliteHandle = yield FirefoxAdapter.openConnection({path: kintoFilename});
+  let collection = do_get_kinto_collection("certificates", sqliteHandle);
   let list = yield collection.list();
   do_check_eq(list.data.length, 1);
-  yield collection.db.close();
+  yield sqliteHandle.close();
 
   // Test the db is updated when we call again with a later lastModified value
   result = yield OneCRLBlocklistClient.maybeSync(4000, Date.now());
 
   // Open the collection, verify it's been updated:
   // Our test data now has two records; both should be in the local collection
-  collection = do_get_kinto_collection("certificates");
-  yield collection.db.open();
+  sqliteHandle = yield FirefoxAdapter.openConnection({path: kintoFilename});
+  collection = do_get_kinto_collection("certificates", sqliteHandle);
   list = yield collection.list();
   do_check_eq(list.data.length, 3);
-  yield collection.db.close();
+  yield sqliteHandle.close();
 
   // Try to maybeSync with the current lastModified value - no connection
   // should be attempted.
   // Clear the kinto base pref so any connections will cause a test failure
   Services.prefs.clearUserPref("services.settings.server");
   yield OneCRLBlocklistClient.maybeSync(4000, Date.now());
 
   // Try again with a lastModified value at some point in the past
--- a/services/common/tests/unit/test_blocklist_clients.js
+++ b/services/common/tests/unit/test_blocklist_clients.js
@@ -9,59 +9,58 @@ const { FileUtils } = Cu.import("resourc
 const { OS } = Cu.import("resource://gre/modules/osfile.jsm");
 
 const { Kinto } = Cu.import("resource://services-common/kinto-offline-client.js");
 const { FirefoxAdapter } = Cu.import("resource://services-common/kinto-storage-adapter.js");
 const BlocklistClients = Cu.import("resource://services-common/blocklist-clients.js");
 
 const BinaryInputStream = CC("@mozilla.org/binaryinputstream;1",
   "nsIBinaryInputStream", "setInputStream");
+const kintoFilename = "kinto.sqlite";
 
 const gBlocklistClients = [
   {client: BlocklistClients.AddonBlocklistClient, filename: BlocklistClients.FILENAME_ADDONS_JSON, testData: ["i808","i720", "i539"]},
   {client: BlocklistClients.PluginBlocklistClient, filename: BlocklistClients.FILENAME_PLUGINS_JSON, testData: ["p1044","p32","p28"]},
   {client: BlocklistClients.GfxBlocklistClient, filename: BlocklistClients.FILENAME_GFX_JSON, testData: ["g204","g200","g36"]},
 ];
 
 
 let server;
-let kintoClient;
 
-function kintoCollection(collectionName) {
-  if (!kintoClient) {
-    const config = {
-      // Set the remote to be some server that will cause test failure when
-      // hit since we should never hit the server directly, only via maybeSync()
-      remote: "https://firefox.settings.services.mozilla.com/v1/",
-      adapter: FirefoxAdapter,
-      bucket: "blocklists"
-    };
-    kintoClient = new Kinto(config);
-  }
-  return kintoClient.collection(collectionName);
+function kintoCollection(collectionName, sqliteHandle) {
+  const config = {
+    // Set the remote to be some server that will cause test failure when
+    // hit since we should never hit the server directly, only via maybeSync()
+    remote: "https://firefox.settings.services.mozilla.com/v1/",
+    adapter: FirefoxAdapter,
+    adapterOptions: {sqliteHandle},
+    bucket: "blocklists"
+  };
+  return new Kinto(config).collection(collectionName);
 }
 
 function* readJSON(filepath) {
   const binaryData = yield OS.File.read(filepath);
   const textData = (new TextDecoder()).decode(binaryData);
   return Promise.resolve(JSON.parse(textData));
 }
 
 function* clear_state() {
   for (let {client} of gBlocklistClients) {
     // Remove last server times.
     Services.prefs.clearUserPref(client.lastCheckTimePref);
 
     // Clear local DB.
-    const collection = kintoCollection(client.collectionName);
+    let sqliteHandle;
     try {
-      yield collection.db.open();
+      sqliteHandle = yield FirefoxAdapter.openConnection({path: kintoFilename});
+      const collection = kintoCollection(client.collectionName, sqliteHandle);
       yield collection.clear();
     } finally {
-      yield collection.db.close();
+      yield sqliteHandle.close();
     }
   }
 
   // Remove profile data.
   for (let {filename} of gBlocklistClients) {
     const blocklist = FileUtils.getFile(KEY_PROFILEDIR, [filename]);
     if (blocklist.exists()) {
       blocklist.remove(true);
@@ -121,21 +120,21 @@ function run_test() {
 
 add_task(function* test_records_obtained_from_server_are_stored_in_db(){
   for (let {client} of gBlocklistClients) {
     // Test an empty db populates
     let result = yield client.maybeSync(2000, Date.now());
 
     // Open the collection, verify it's been populated:
     // Our test data has a single record; it should be in the local collection
-    let collection = kintoCollection(client.collectionName);
-    yield collection.db.open();
+    const sqliteHandle = yield FirefoxAdapter.openConnection({path: kintoFilename});
+    let collection = kintoCollection(client.collectionName, sqliteHandle);
     let list = yield collection.list();
     equal(list.data.length, 1);
-    yield collection.db.close();
+    yield sqliteHandle.close();
   }
 });
 add_task(clear_state);
 
 add_task(function* test_list_is_written_to_file_in_profile(){
   for (let {client, filename, testData} of gBlocklistClients) {
     const profFile = FileUtils.getFile(KEY_PROFILEDIR, [filename]);
     strictEqual(profFile.exists(), false);
--- a/services/common/tests/unit/test_blocklist_signatures.js
+++ b/services/common/tests/unit/test_blocklist_signatures.js
@@ -11,16 +11,17 @@ const { OneCRLBlocklistClient } = Cu.imp
 let server;
 
 const PREF_BLOCKLIST_BUCKET            = "services.blocklist.bucket";
 const PREF_BLOCKLIST_ENFORCE_SIGNING   = "services.blocklist.signing.enforced";
 const PREF_BLOCKLIST_ONECRL_COLLECTION = "services.blocklist.onecrl.collection";
 const PREF_SETTINGS_SERVER             = "services.settings.server";
 const PREF_SIGNATURE_ROOT              = "security.content.signature.root_hash";
 
+const kintoFilename = "kinto.sqlite";
 
 const CERT_DIR = "test_blocklist_signatures/";
 const CHAIN_FILES =
     ["collection_signing_ee.pem",
      "collection_signing_int.pem",
      "collection_signing_root.pem"];
 
 function getFileData(file) {
@@ -56,33 +57,33 @@ function getCertChain() {
 
 function* checkRecordCount(count) {
   // open the collection manually
   const base = Services.prefs.getCharPref(PREF_SETTINGS_SERVER);
   const bucket = Services.prefs.getCharPref(PREF_BLOCKLIST_BUCKET);
   const collectionName =
       Services.prefs.getCharPref(PREF_BLOCKLIST_ONECRL_COLLECTION);
 
+  const sqliteHandle = yield FirefoxAdapter.openConnection({path: kintoFilename});
   const config = {
     remote: base,
     bucket: bucket,
     adapter: FirefoxAdapter,
+    adapterOptions: {sqliteHandle},
   };
 
   const db = new Kinto(config);
   const collection = db.collection(collectionName);
 
-  yield collection.db.open();
-
   // Check we have the expected number of records
   let records = yield collection.list();
   do_check_eq(count, records.data.length);
 
   // Close the collection so the test can exit cleanly
-  yield collection.db.close();
+  yield sqliteHandle.close();
 }
 
 // Check to ensure maybeSync is called with correct values when a changes
 // document contains information on when a collection was last modified
 add_task(function* test_check_signatures(){
   const port = server.identity.primaryPort;
 
   // a response to give the client when the cert chain is expected
--- a/services/common/tests/unit/test_kinto.js
+++ b/services/common/tests/unit/test_kinto.js
@@ -8,45 +8,47 @@ Cu.import("resource://testing-common/htt
 const BinaryInputStream = Components.Constructor("@mozilla.org/binaryinputstream;1",
   "nsIBinaryInputStream", "setInputStream");
 
 var server;
 
 // set up what we need to make storage adapters
 const kintoFilename = "kinto.sqlite";
 
-let kintoClient;
+function do_get_kinto_sqliteHandle() {
+  return FirefoxAdapter.openConnection({path: kintoFilename});
+}
 
-function do_get_kinto_collection() {
-  if (!kintoClient) {
-    let config = {
-      remote:`http://localhost:${server.identity.primaryPort}/v1/`,
-      headers: {Authorization: "Basic " + btoa("user:pass")},
-      adapter: FirefoxAdapter
-    };
-    kintoClient = new Kinto(config);
-  }
-  return kintoClient.collection("test_collection");
+function do_get_kinto_collection(sqliteHandle, collection="test_collection") {
+  let config = {
+    remote:`http://localhost:${server.identity.primaryPort}/v1/`,
+    headers: {Authorization: "Basic " + btoa("user:pass")},
+    adapter: FirefoxAdapter,
+    adapterOptions: {sqliteHandle},
+  };
+  return new Kinto(config).collection(collection);
 }
 
 function* clear_collection() {
-  const collection = do_get_kinto_collection();
+  let sqliteHandle;
   try {
-    yield collection.db.open();
+    sqliteHandle = yield do_get_kinto_sqliteHandle();
+    const collection = do_get_kinto_collection(sqliteHandle);
     yield collection.clear();
   } finally {
-    yield collection.db.close();
+    yield sqliteHandle.close();
   }
 }
 
 // test some operations on a local collection
 add_task(function* test_kinto_add_get() {
-  const collection = do_get_kinto_collection();
+  let sqliteHandle;
   try {
-    yield collection.db.open();
+    sqliteHandle = yield do_get_kinto_sqliteHandle();
+    const collection = do_get_kinto_collection(sqliteHandle);
 
     let newRecord = { foo: "bar" };
     // check a record is created
     let createResult = yield collection.create(newRecord);
     do_check_eq(createResult.data.foo, newRecord.foo);
     // check getting the record gets the same info
     let getResult = yield collection.get(createResult.data.id);
     deepEqual(createResult.data, getResult.data);
@@ -59,108 +61,109 @@ add_task(function* test_kinto_add_get() 
     // try a few creates without waiting for the first few to resolve
     let promises = [];
     promises.push(collection.create(newRecord));
     promises.push(collection.create(newRecord));
     promises.push(collection.create(newRecord));
     yield collection.create(newRecord);
     yield Promise.all(promises);
   } finally {
-    yield collection.db.close();
+    yield sqliteHandle.close();
   }
 });
 
 add_task(clear_collection);
 
 // test some operations on multiple connections
 add_task(function* test_kinto_add_get() {
-  const collection1 = do_get_kinto_collection();
-  const collection2 = kintoClient.collection("test_collection_2");
-
+  let sqliteHandle;
   try {
-    yield collection1.db.open();
-    yield collection2.db.open();
+    sqliteHandle = yield do_get_kinto_sqliteHandle();
+    const collection1 = do_get_kinto_collection(sqliteHandle);
+    const collection2 = do_get_kinto_collection(sqliteHandle, "test_collection_2");
 
     let newRecord = { foo: "bar" };
 
     // perform several write operations alternately without waiting for promises
     // to resolve
     let promises = [];
     for (let i = 0; i < 10; i++) {
       promises.push(collection1.create(newRecord));
       promises.push(collection2.create(newRecord));
     }
 
     // ensure subsequent operations still work
     yield Promise.all([collection1.create(newRecord),
                        collection2.create(newRecord)]);
     yield Promise.all(promises);
   } finally {
-    yield collection1.db.close();
-    yield collection2.db.close();
+    yield sqliteHandle.close();
   }
 });
 
 add_task(clear_collection);
 
 add_task(function* test_kinto_update() {
-  const collection = do_get_kinto_collection();
+  let sqliteHandle;
   try {
-    yield collection.db.open();
+    sqliteHandle = yield do_get_kinto_sqliteHandle();
+    const collection = do_get_kinto_collection(sqliteHandle);
     const newRecord = { foo: "bar" };
     // check a record is created
     let createResult = yield collection.create(newRecord);
     do_check_eq(createResult.data.foo, newRecord.foo);
     do_check_eq(createResult.data._status, "created");
     // check we can update this OK
     let copiedRecord = Object.assign(createResult.data, {});
     deepEqual(createResult.data, copiedRecord);
     copiedRecord.foo = "wibble";
     let updateResult = yield collection.update(copiedRecord);
     // check the field was updated
     do_check_eq(updateResult.data.foo, copiedRecord.foo);
     // check the status is still "created", since we haven't synced
     // the record
     do_check_eq(updateResult.data._status, "created");
   } finally {
-    yield collection.db.close();
+    yield sqliteHandle.close();
   }
 });
 
 add_task(clear_collection);
 
 add_task(function* test_kinto_clear() {
-  const collection = do_get_kinto_collection();
+  let sqliteHandle;
   try {
-    yield collection.db.open();
+    sqliteHandle = yield do_get_kinto_sqliteHandle();
+    const collection = do_get_kinto_collection(sqliteHandle);
 
     // create an expected number of records
     const expected = 10;
     const newRecord = { foo: "bar" };
     for (let i = 0; i < expected; i++) {
       yield collection.create(newRecord);
     }
     // check the collection contains the correct number
     let list = yield collection.list();
     do_check_eq(list.data.length, expected);
     // clear the collection and check again - should be 0
     yield collection.clear();
     list = yield collection.list();
     do_check_eq(list.data.length, 0);
   } finally {
-    yield collection.db.close();
+    yield sqliteHandle.close();
   }
 });
 
 add_task(clear_collection);
 
 add_task(function* test_kinto_delete(){
-  const collection = do_get_kinto_collection();
+  let sqliteHandle;
   try {
-    yield collection.db.open();
+    sqliteHandle = yield do_get_kinto_sqliteHandle();
+    const collection = do_get_kinto_collection(sqliteHandle);
     const newRecord = { foo: "bar" };
     // check a record is created
     let createResult = yield collection.create(newRecord);
     do_check_eq(createResult.data.foo, newRecord.foo);
     // check getting the record gets the same info
     let getResult = yield collection.get(createResult.data.id);
     deepEqual(createResult.data, getResult.data);
     // delete that record
@@ -168,24 +171,25 @@ add_task(function* test_kinto_delete(){
     // check the ID is set on the result
     do_check_eq(getResult.data.id, deleteResult.data.id);
     // and check that get no longer returns the record
     try {
       getResult = yield collection.get(createResult.data.id);
       do_throw("there should not be a result");
     } catch (e) { }
   } finally {
-    yield collection.db.close();
+    yield sqliteHandle.close();
   }
 });
 
 add_task(function* test_kinto_list(){
-  const collection = do_get_kinto_collection();
+  let sqliteHandle;
   try {
-    yield collection.db.open();
+    sqliteHandle = yield do_get_kinto_sqliteHandle();
+    const collection = do_get_kinto_collection(sqliteHandle);
     const expected = 10;
     const created = [];
     for (let i = 0; i < expected; i++) {
       let newRecord = { foo: "test " + i };
       let createResult = yield collection.create(newRecord);
       created.push(createResult.data);
     }
     // check the collection contains the correct number
@@ -199,80 +203,84 @@ add_task(function* test_kinto_list(){
         if (createdRecord.id == retrievedRecord.id) {
           deepEqual(createdRecord, retrievedRecord);
           found = true;
         }
       }
       do_check_true(found);
     }
   } finally {
-    yield collection.db.close();
+    yield sqliteHandle.close();
   }
 });
 
 add_task(clear_collection);
 
 add_task(function* test_loadDump_ignores_already_imported_records(){
-  const collection = do_get_kinto_collection();
+  let sqliteHandle;
   try {
-    yield collection.db.open();
+    sqliteHandle = yield do_get_kinto_sqliteHandle();
+    const collection = do_get_kinto_collection(sqliteHandle);
     const record = {id: "41b71c13-17e9-4ee3-9268-6a41abf9730f", title: "foo", last_modified: 1457896541};
     yield collection.loadDump([record]);
     let impactedRecords = yield collection.loadDump([record]);
     do_check_eq(impactedRecords.length, 0);
   } finally {
-    yield collection.db.close();
+    yield sqliteHandle.close();
   }
 });
 
 add_task(clear_collection);
 
 add_task(function* test_loadDump_should_overwrite_old_records(){
-  const collection = do_get_kinto_collection();
+  let sqliteHandle;
   try {
-    yield collection.db.open();
+    sqliteHandle = yield do_get_kinto_sqliteHandle();
+    const collection = do_get_kinto_collection(sqliteHandle);
     const record = {id: "41b71c13-17e9-4ee3-9268-6a41abf9730f", title: "foo", last_modified: 1457896541};
     yield collection.loadDump([record]);
     const updated = Object.assign({}, record, {last_modified: 1457896543});
     let impactedRecords = yield collection.loadDump([updated]);
     do_check_eq(impactedRecords.length, 1);
   } finally {
-    yield collection.db.close();
+    yield sqliteHandle.close();
   }
 });
 
 add_task(clear_collection);
 
 add_task(function* test_loadDump_should_not_overwrite_unsynced_records(){
-  const collection = do_get_kinto_collection();
+  let sqliteHandle;
   try {
-    yield collection.db.open();
+    sqliteHandle = yield do_get_kinto_sqliteHandle();
+    const collection = do_get_kinto_collection(sqliteHandle);
     const recordId = "41b71c13-17e9-4ee3-9268-6a41abf9730f";
     yield collection.create({id: recordId, title: "foo"}, {useRecordId: true});
     const record = {id: recordId, title: "bar", last_modified: 1457896541};
     let impactedRecords = yield collection.loadDump([record]);
     do_check_eq(impactedRecords.length, 0);
   } finally {
-    yield collection.db.close();
+    yield sqliteHandle.close();
   }
 });
 
 add_task(clear_collection);
 
 add_task(function* test_loadDump_should_not_overwrite_records_without_last_modified(){
-  const collection = do_get_kinto_collection();
+  let sqliteHandle;
   try {
-    yield collection.db.open();
+    sqliteHandle = yield do_get_kinto_sqliteHandle();
+    const collection = do_get_kinto_collection(sqliteHandle);
     const recordId = "41b71c13-17e9-4ee3-9268-6a41abf9730f";
     yield collection.create({id: recordId, title: "foo"}, {synced: true});
     const record = {id: recordId, title: "bar", last_modified: 1457896541};
     let impactedRecords = yield collection.loadDump([record]);
     do_check_eq(impactedRecords.length, 0);
   } finally {
-    yield collection.db.close();
+    yield sqliteHandle.close();
   }
 });
 
 add_task(clear_collection);
 
 // Now do some sanity checks against a server - we're not looking to test
 // core kinto.js functionality here (there is excellent test coverage in
 // kinto.js), more making sure things are basically working as expected.
@@ -300,21 +308,22 @@ add_task(function* test_kinto_sync(){
     } catch (e) {
       dump(`${e}\n`);
     }
   }
   server.registerPathHandler(configPath, handleResponse);
   server.registerPathHandler(recordsPath, handleResponse);
 
   // create an empty collection, sync to populate
-  const collection = do_get_kinto_collection();
+  let sqliteHandle;
   try {
     let result;
+    sqliteHandle = yield do_get_kinto_sqliteHandle();
+    const collection = do_get_kinto_collection(sqliteHandle);
 
-    yield collection.db.open();
     result = yield collection.sync();
     do_check_true(result.ok);
 
     // our test data has a single record; it should be in the local collection
     let list = yield collection.list();
     do_check_eq(list.data.length, 1);
 
     // now sync again; we should now have 2 records
@@ -326,17 +335,17 @@ add_task(function* test_kinto_sync(){
     // sync again; the second records should have been modified
     const before = list.data[0].title;
     result = yield collection.sync();
     do_check_true(result.ok);
     list = yield collection.list();
     const after = list.data[0].title;
     do_check_neq(before, after);
   } finally {
-    yield collection.db.close();
+    yield sqliteHandle.close();
   }
 });
 
 function run_test() {
   // Set up an HTTP Server
   server = new HttpServer();
   server.start(-1);
 
--- a/services/common/tests/unit/test_storage_adapter.js
+++ b/services/common/tests/unit/test_storage_adapter.js
@@ -2,233 +2,232 @@
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 Cu.import("resource://services-common/kinto-offline-client.js");
 Cu.import("resource://services-common/kinto-storage-adapter.js");
 
 // set up what we need to make storage adapters
 const kintoFilename = "kinto.sqlite";
 
-let gFirefoxAdapter = null;
+function do_get_kinto_connection() {
+  return FirefoxAdapter.openConnection({path: kintoFilename});
+}
 
-function do_get_kinto_adapter() {
-  if (gFirefoxAdapter == null) {
-    gFirefoxAdapter = new FirefoxAdapter("test");
-  }
-  return gFirefoxAdapter;
+function do_get_kinto_adapter(sqliteHandle) {
+  return new FirefoxAdapter("test", {sqliteHandle});
 }
 
 function do_get_kinto_db() {
   let profile = do_get_profile();
   let kintoDB = profile.clone();
   kintoDB.append(kintoFilename);
   return kintoDB;
 }
 
 function cleanup_kinto() {
   add_test(function cleanup_kinto_files(){
     let kintoDB = do_get_kinto_db();
     // clean up the db
     kintoDB.remove(false);
-    // force re-creation of the adapter
-    gFirefoxAdapter = null;
     run_next_test();
   });
 }
 
 function test_collection_operations() {
   add_task(function* test_kinto_clear() {
-    let adapter = do_get_kinto_adapter();
-    yield adapter.open();
+    let sqliteHandle = yield do_get_kinto_connection();
+    let adapter = do_get_kinto_adapter(sqliteHandle);
     yield adapter.clear();
-    yield adapter.close();
+    yield sqliteHandle.close();
   });
 
   // test creating new records... and getting them again
   add_task(function* test_kinto_create_new_get_existing() {
-    let adapter = do_get_kinto_adapter();
-    yield adapter.open();
+    let sqliteHandle = yield do_get_kinto_connection();
+    let adapter = do_get_kinto_adapter(sqliteHandle);
     let record = {id:"test-id", foo:"bar"};
     yield adapter.execute((transaction) => transaction.create(record));
     let newRecord = yield adapter.get("test-id");
     // ensure the record is the same as when it was added
     deepEqual(record, newRecord);
-    yield adapter.close();
+    yield sqliteHandle.close();
   });
 
   // test removing records
   add_task(function* test_kinto_can_remove_some_records() {
-    let adapter = do_get_kinto_adapter();
-    yield adapter.open();
+    let sqliteHandle = yield do_get_kinto_connection();
+    let adapter = do_get_kinto_adapter(sqliteHandle);
     // create a second record
     let record = {id:"test-id-2", foo:"baz"};
     yield adapter.execute((transaction) => transaction.create(record));
     let newRecord = yield adapter.get("test-id-2");
     deepEqual(record, newRecord);
     // delete the record
     yield adapter.execute((transaction) => transaction.delete(record.id));
     newRecord = yield adapter.get(record.id);
     // ... and ensure it's no longer there
     do_check_eq(newRecord, undefined);
     // ensure the other record still exists
     newRecord = yield adapter.get("test-id");
     do_check_neq(newRecord, undefined);
-    yield adapter.close();
+    yield sqliteHandle.close();
   });
 
   // test getting records that don't exist
   add_task(function* test_kinto_get_non_existant() {
-    let adapter = do_get_kinto_adapter();
+    let sqliteHandle = yield do_get_kinto_connection();
+    let adapter = do_get_kinto_adapter(sqliteHandle);
     yield adapter.open();
     // Kinto expects adapters to either:
     let newRecord = yield adapter.get("missing-test-id");
     // resolve with an undefined record
     do_check_eq(newRecord, undefined);
-    yield adapter.close();
+    yield sqliteHandle.close();
   });
 
   // test updating records... and getting them again
   add_task(function* test_kinto_update_get_existing() {
-    let adapter = do_get_kinto_adapter();
-    yield adapter.open();
+    let sqliteHandle = yield do_get_kinto_connection();
+    let adapter = do_get_kinto_adapter(sqliteHandle);
     let originalRecord = {id:"test-id", foo:"bar"};
     let updatedRecord = {id:"test-id", foo:"baz"};
     yield adapter.clear();
     yield adapter.execute((transaction) => transaction.create(originalRecord));
     yield adapter.execute((transaction) => transaction.update(updatedRecord));
     // ensure the record exists
     let newRecord = yield adapter.get("test-id");
     // ensure the record is the same as when it was added
     deepEqual(updatedRecord, newRecord);
-    yield adapter.close();
+    yield sqliteHandle.close();
   });
 
   // test listing records
   add_task(function* test_kinto_list() {
-    let adapter = do_get_kinto_adapter();
-    yield adapter.open();
+    let sqliteHandle = yield do_get_kinto_connection();
+    let adapter = do_get_kinto_adapter(sqliteHandle);
     let originalRecord = {id:"test-id-1", foo:"bar"};
     let records = yield adapter.list();
     do_check_eq(records.length, 1);
     yield adapter.execute((transaction) => transaction.create(originalRecord));
     records = yield adapter.list();
     do_check_eq(records.length, 2);
-    yield adapter.close();
+    yield sqliteHandle.close();
   });
 
   // test aborting transaction
   add_task(function* test_kinto_aborting_transaction() {
-    let adapter = do_get_kinto_adapter();
-    yield adapter.open();
+    let sqliteHandle = yield do_get_kinto_connection();
+    let adapter = do_get_kinto_adapter(sqliteHandle);
     yield adapter.clear();
     let record = {id: 1, foo: "bar"};
     let error = null;
     try {
       yield adapter.execute((transaction) => {
         transaction.create(record);
         throw new Error("unexpected");
       });
     } catch (e) {
       error = e;
     }
     do_check_neq(error, null);
     records = yield adapter.list();
     do_check_eq(records.length, 0);
-    yield adapter.close();
+    yield sqliteHandle.close();
   });
 
   // test save and get last modified
   add_task(function* test_kinto_last_modified() {
     const initialValue = 0;
     const intendedValue = 12345678;
 
-    let adapter = do_get_kinto_adapter();
-    yield adapter.open();
+    let sqliteHandle = yield do_get_kinto_connection();
+    let adapter = do_get_kinto_adapter(sqliteHandle);
     let lastModified = yield adapter.getLastModified();
     do_check_eq(lastModified, initialValue);
     let result = yield adapter.saveLastModified(intendedValue);
     do_check_eq(result, intendedValue);
     lastModified = yield adapter.getLastModified();
     do_check_eq(lastModified, intendedValue);
 
     // test saveLastModified parses values correctly
     result = yield adapter.saveLastModified(" " + intendedValue + " blah");
     // should resolve with the parsed int
     do_check_eq(result, intendedValue);
     // and should have saved correctly
     lastModified = yield adapter.getLastModified();
     do_check_eq(lastModified, intendedValue);
-    yield adapter.close();
+    yield sqliteHandle.close();
   });
 
   // test loadDump(records)
   add_task(function* test_kinto_import_records() {
-    let adapter = do_get_kinto_adapter();
-    yield adapter.open();
+    let sqliteHandle = yield do_get_kinto_connection();
+    let adapter = do_get_kinto_adapter(sqliteHandle);
     let record1 = {id: 1, foo: "bar"};
     let record2 = {id: 2, foo: "baz"};
     let impactedRecords = yield adapter.loadDump([
       record1, record2
     ]);
     do_check_eq(impactedRecords.length, 2);
     let newRecord1 = yield adapter.get("1");
     // ensure the record is the same as when it was added
     deepEqual(record1, newRecord1);
     let newRecord2 = yield adapter.get("2");
     // ensure the record is the same as when it was added
     deepEqual(record2, newRecord2);
-    yield adapter.close();
+    yield sqliteHandle.close();
   });
 
   add_task(function* test_kinto_import_records_should_override_existing() {
-    let adapter = do_get_kinto_adapter();
-    yield adapter.open();
+    let sqliteHandle = yield do_get_kinto_connection();
+    let adapter = do_get_kinto_adapter(sqliteHandle);
     yield adapter.clear();
     records = yield adapter.list();
     do_check_eq(records.length, 0);
     let impactedRecords = yield adapter.loadDump([
       {id: 1, foo: "bar"},
       {id: 2, foo: "baz"},
     ]);
     do_check_eq(impactedRecords.length, 2);
     yield adapter.loadDump([
       {id: 1, foo: "baz"},
       {id: 3, foo: "bab"},
     ]);
     records = yield adapter.list();
     do_check_eq(records.length, 3);
     let newRecord1 = yield adapter.get("1");
     deepEqual(newRecord1.foo, "baz");
-    yield adapter.close();
+    yield sqliteHandle.close();
   });
 
   add_task(function* test_import_updates_lastModified() {
-    let adapter = do_get_kinto_adapter();
+    let sqliteHandle = yield do_get_kinto_connection();
+    let adapter = do_get_kinto_adapter(sqliteHandle);
     yield adapter.open();
     yield adapter.loadDump([
       {id: 1, foo: "bar", last_modified: 1457896541},
       {id: 2, foo: "baz", last_modified: 1458796542},
     ]);
     let lastModified = yield adapter.getLastModified();
     do_check_eq(lastModified, 1458796542);
-    yield adapter.close();
+    yield sqliteHandle.close();
   });
 
   add_task(function* test_import_preserves_older_lastModified() {
-    let adapter = do_get_kinto_adapter();
-    yield adapter.open();
+    let sqliteHandle = yield do_get_kinto_connection();
+    let adapter = do_get_kinto_adapter(sqliteHandle);
     yield adapter.saveLastModified(1458796543);
 
     yield adapter.loadDump([
       {id: 1, foo: "bar", last_modified: 1457896541},
       {id: 2, foo: "baz", last_modified: 1458796542},
     ]);
     let lastModified = yield adapter.getLastModified();
     do_check_eq(lastModified, 1458796543);
-    yield adapter.close();
+    yield sqliteHandle.close();
   });
 }
 
 // test kinto db setup and operations in various scenarios
 // test from scratch - no current existing database
 add_test(function test_db_creation() {
   add_test(function test_create_from_scratch() {
     // ensure the file does not exist in the profile
--- a/taskcluster/ci/build/android.yml
+++ b/taskcluster/ci/build/android.yml
@@ -15,16 +15,17 @@ android-api-15/debug:
     run:
         using: mozharness
         actions: [get-secrets build multi-l10n update]
         config:
             - builds/releng_base_android_64_builds.py
             - disable_signing.py
             - platform_supports_post_upload_to_latest.py
         script: "mozharness/scripts/fx_desktop_build.py"
+        secrets: true
         custom-build-variant-cfg: api-15-debug
         tooltool-downloads: internal
 
 android-x86/opt:
     description: "Android 4.2 x86 Opt"
     index:
         product: mobile
         job-name: android-x86-opt
@@ -39,16 +40,17 @@ android-x86/opt:
     run:
         using: mozharness
         actions: [get-secrets build multi-l10n update]
         config:
             - builds/releng_base_android_64_builds.py
             - disable_signing.py
             - platform_supports_post_upload_to_latest.py
         script: "mozharness/scripts/fx_desktop_build.py"
+        secrets: true
         custom-build-variant-cfg: x86
         tooltool-downloads: internal
 
 android-api-15/opt:
     description: "Android 4.0 API15+ Opt"
     index:
         product: mobile
         job-name: android-api-15-opt
@@ -63,16 +65,17 @@ android-api-15/opt:
     run:
         using: mozharness
         actions: [get-secrets build multi-l10n update]
         config:
             - builds/releng_base_android_64_builds.py
             - disable_signing.py
             - platform_supports_post_upload_to_latest.py
         script: "mozharness/scripts/fx_desktop_build.py"
+        secrets: true
         custom-build-variant-cfg: api-15
         tooltool-downloads: internal
 
 android-api-15-nightly/opt:
     description: "Android 4.0 API15+ Nightly"
     attributes:
         nightly: true
     index:
@@ -90,16 +93,17 @@ android-api-15-nightly/opt:
         using: mozharness
         actions: [get-secrets build multi-l10n update]
         config:
             - builds/releng_base_android_64_builds.py
             - disable_signing.py
             - platform_supports_post_upload_to_latest.py
             - taskcluster_nightly.py
         script: "mozharness/scripts/fx_desktop_build.py"
+        secrets: true
         custom-build-variant-cfg: api-15
         tooltool-downloads: internal
     run-on-projects: []
 
 android-api-15-gradle/opt:
     description: "Android 4.0 API15+ (Gradle) Opt"
     index:
         product: mobile
@@ -128,10 +132,11 @@ android-api-15-gradle/opt:
     run:
         using: mozharness
         actions: [get-secrets build multi-l10n update]
         config:
             - builds/releng_base_android_64_builds.py
             - disable_signing.py
             - platform_supports_post_upload_to_latest.py
         script: "mozharness/scripts/fx_desktop_build.py"
+        secrets: true
         custom-build-variant-cfg: api-15-gradle
         tooltool-downloads: internal
--- a/testing/marionette/client/marionette_driver/marionette.py
+++ b/testing/marionette/client/marionette_driver/marionette.py
@@ -568,16 +568,18 @@ class Marionette(object):
 
         """
         self.host = host
         self.port = self.local_port = int(port)
         self.bin = bin
         self.instance = None
         self.session = None
         self.session_id = None
+        self.process_id = None
+        self.profile = None
         self.window = None
         self.chrome_window = None
         self.baseurl = baseurl
         self._test_name = None
         self.socket_timeout = socket_timeout
         self.crashed = 0
 
         startup_timeout = startup_timeout or self.DEFAULT_STARTUP_TIMEOUT
@@ -1214,21 +1216,21 @@ class Marionette(object):
             self.instance.restart(clean=clean)
             self.raise_for_port()
 
         self.start_session(session_id=session_id)
 
         # Restore the context as used before the restart
         self.set_context(context)
 
-        if in_app and self.session.get("processId"):
+        if in_app and self.process_id:
             # In some cases Firefox restarts itself by spawning into a new process group.
             # As long as mozprocess cannot track that behavior (bug 1284864) we assist by
             # informing about the new process id.
-            self.instance.runner.process_handler.check_for_detached(self.session["processId"])
+            self.instance.runner.process_handler.check_for_detached(self.process_id)
 
     def absolute_url(self, relative_url):
         '''
         Returns an absolute url for files served from Marionette's www directory.
 
         :param relative_url: The url of a static file, relative to Marionette's www directory.
         '''
         return "{0}{1}".format(self.baseurl, relative_url)
@@ -1266,16 +1268,19 @@ class Marionette(object):
         self.wait_for_port(timeout=timeout)
         self.protocol, _ = self.client.connect()
 
         body = {"capabilities": desired_capabilities, "sessionId": session_id}
         resp = self._send_message("newSession", body)
 
         self.session_id = resp["sessionId"]
         self.session = resp["value"] if self.protocol == 1 else resp["capabilities"]
+        # fallback to processId can be removed in Firefox 55
+        self.process_id = self.session.get("moz:processID", self.session.get("processId"))
+        self.profile = self.session.get("moz:profile")
 
         return self.session
 
     @property
     def test_name(self):
         return self._test_name
 
     @test_name.setter
@@ -1295,16 +1300,18 @@ class Marionette(object):
         """
         try:
             if send_request:
                 self._send_message("deleteSession")
         finally:
             if reset_session_id:
                 self.session_id = None
             self.session = None
+            self.process_id = None
+            self.profile = None
             self.window = None
             self.client.close()
 
     @property
     def session_capabilities(self):
         """A JSON dictionary representing the capabilities of the
         current session.
 
--- a/testing/marionette/driver.js
+++ b/testing/marionette/driver.js
@@ -155,17 +155,18 @@ this.GeckoDriver = function(appName, ser
 
     // supported features
     "raisesAccessibilityExceptions": false,
     "rotatable": this.appName == "B2G",
     "proxy": {},
 
     // proprietary extensions
     "specificationLevel": 0,
-    "processId" : Services.appinfo.processID,
+    "moz:processID": Services.appinfo.processID,
+    "moz:profile": Services.dirsvc.get("ProfD", Ci.nsIFile).path,
   };
 
   this.mm = globalMessageManager;
   this.listener = proxy.toListener(() => this.mm, this.sendAsync.bind(this));
 
   // always keep weak reference to current dialogue
   this.dialog = null;
   let handleDialog = (subject, topic) => {
--- a/testing/marionette/harness/marionette/tests/unit/test_capabilities.py
+++ b/testing/marionette/harness/marionette/tests/unit/test_capabilities.py
@@ -33,18 +33,24 @@ class TestCapabilities(MarionetteTestCas
         self.assertEqual(self.caps["specificationLevel"], 0)
 
     def test_supported_features(self):
         self.assertIn("rotatable", self.caps)
         self.assertIn("acceptInsecureCerts", self.caps)
         self.assertFalse(self.caps["acceptInsecureCerts"])
 
     def test_additional_capabilities(self):
-        self.assertIn("processId", self.caps)
-        self.assertEqual(self.caps["processId"], self.appinfo["processID"])
+        self.assertIn("moz:processID", self.caps)
+        self.assertEqual(self.caps["moz:processID"], self.appinfo["processID"])
+        self.assertEqual(self.marionette.process_id, self.appinfo["processID"])
+
+        current_profile = self.marionette.instance.runner.profile.profile
+        self.assertIn("moz:profile", self.caps)
+        self.assertEqual(self.caps["moz:profile"], current_profile)
+        self.assertEqual(self.marionette.profile, current_profile)
 
     def test_we_can_pass_in_capabilities_on_session_start(self):
         self.marionette.delete_session()
         capabilities = {"desiredCapabilities": {"somethingAwesome": "cake"}}
         self.marionette.start_session(capabilities)
         caps = self.marionette.session_capabilities
         self.assertIn("somethingAwesome", caps)
 
--- a/testing/marionette/harness/marionette/tests/unit/test_crash.py
+++ b/testing/marionette/harness/marionette/tests/unit/test_crash.py
@@ -47,17 +47,17 @@ class BaseCrashTestCase(MarionetteTestCa
     # Reduce the timeout for faster processing of the tests
     socket_timeout = 10
 
     def setUp(self):
         super(BaseCrashTestCase, self).setUp()
 
         self.mozcrash_mock = MockMozCrash(self.marionette)
         self.crash_count = self.marionette.crashed
-        self.pid = self.marionette.session["processId"]
+        self.pid = self.marionette.process_id
         self.remote_uri = self.marionette.absolute_url("javascriptPage.html")
 
     def tearDown(self):
         self.marionette.crashed = self.crash_count
 
         super(BaseCrashTestCase, self).tearDown()
 
     def crash(self, chrome=True):
@@ -93,17 +93,17 @@ class TestCrash(BaseCrashTestCase):
         self.assertRaisesRegexp(IOError, 'Process crashed',
                                 self.crash, chrome=True)
         self.assertEqual(self.marionette.crashed, 1)
         self.assertIsNone(self.marionette.session)
         self.assertRaisesRegexp(MarionetteException, 'Please start a session',
                                 self.marionette.get_url)
 
         self.marionette.start_session()
-        self.assertNotEqual(self.marionette.session['processId'], self.pid)
+        self.assertNotEqual(self.marionette.process_id, self.pid)
 
         # TODO: Bug 1314594 - Causes a hang for the communication between the
         # chrome and frame script.
         # self.marionette.get_url()
 
     @run_if_e10s
     def test_crash_content_process(self):
         # If e10s is disabled the chrome process crashes
@@ -112,17 +112,17 @@ class TestCrash(BaseCrashTestCase):
         self.assertRaisesRegexp(IOError, 'Content process crashed',
                                 self.crash, chrome=False)
         self.assertEqual(self.marionette.crashed, 1)
         self.assertIsNone(self.marionette.session)
         self.assertRaisesRegexp(MarionetteException, 'Please start a session',
                                 self.marionette.get_url)
 
         self.marionette.start_session()
-        self.assertNotEqual(self.marionette.session['processId'], self.pid)
+        self.assertNotEqual(self.marionette.process_id, self.pid)
         self.marionette.get_url()
 
     @expectedFailure
     def test_unexpected_crash(self):
         self.crash(chrome=True)
 
 
 class TestCrashInSetUp(BaseCrashTestCase):
@@ -132,17 +132,17 @@ class TestCrashInSetUp(BaseCrashTestCase
 
         self.assertRaisesRegexp(IOError, 'Process crashed',
                                 self.crash, chrome=True)
         self.assertEqual(self.marionette.crashed, 1)
         self.assertIsNone(self.marionette.session)
 
     def test_crash_in_setup(self):
         self.marionette.start_session()
-        self.assertNotEqual(self.marionette.session['processId'], self.pid)
+        self.assertNotEqual(self.marionette.process_id, self.pid)
 
 
 class TestCrashInTearDown(BaseCrashTestCase):
 
     def tearDown(self):
         try:
             self.assertRaisesRegexp(IOError, 'Process crashed',
                                     self.crash, chrome=True)
--- a/testing/marionette/harness/marionette/tests/unit/test_quit_restart.py
+++ b/testing/marionette/harness/marionette/tests/unit/test_quit_restart.py
@@ -6,17 +6,17 @@ from marionette import MarionetteTestCas
 from marionette_driver.errors import MarionetteException
 
 
 class TestQuitRestart(MarionetteTestCase):
 
     def setUp(self):
         MarionetteTestCase.setUp(self)
 
-        self.pid = self.marionette.session["processId"]
+        self.pid = self.marionette.process_id
         self.session_id = self.marionette.session_id
 
         self.assertNotEqual(self.marionette.get_pref("browser.startup.page"), 3)
         self.marionette.set_pref("browser.startup.page", 3)
 
     def tearDown(self):
         # Ensure to restart a session if none exist for clean-up
         if not self.marionette.session:
@@ -26,17 +26,17 @@ class TestQuitRestart(MarionetteTestCase
 
         MarionetteTestCase.tearDown(self)
 
     def test_force_restart(self):
         self.marionette.restart()
         self.assertEqual(self.marionette.session_id, self.session_id)
 
         # A forced restart will cause a new process id
-        self.assertNotEqual(self.marionette.session["processId"], self.pid)
+        self.assertNotEqual(self.marionette.process_id, self.pid)
 
         # If a preference value is not forced, a restart will cause a reset
         self.assertNotEqual(self.marionette.get_pref("browser.startup.page"), 3)
 
     def test_force_quit(self):
         self.marionette.quit()
 
         self.assertEqual(self.marionette.session, None)
@@ -51,35 +51,35 @@ class TestQuitRestart(MarionetteTestCase
         with self.assertRaises(ValueError):
             self.marionette.restart(in_app=True, clean=True)
 
     def test_in_app_restart(self):
         self.marionette.restart(in_app=True)
         self.assertEqual(self.marionette.session_id, self.session_id)
 
         # An in-app restart will keep the same process id only on Linux
-        if self.marionette.session_capabilities['platformName'] == 'linux':
-            self.assertEqual(self.marionette.session["processId"], self.pid)
+        if self.marionette.session_capabilities["platformName"] == "linux":
+            self.assertEqual(self.marionette.process_id, self.pid)
         else:
-            self.assertNotEqual(self.marionette.session["processId"], self.pid)
+            self.assertNotEqual(self.marionette.process_id, self.pid)
 
         # If a preference value is not forced, a restart will cause a reset
         self.assertNotEqual(self.marionette.get_pref("browser.startup.page"), 3)
 
     def test_in_app_restart_with_callback(self):
         self.marionette.restart(in_app=True,
                                 callback=lambda: self.shutdown(restart=True))
 
         self.assertEqual(self.marionette.session_id, self.session_id)
 
         # An in-app restart will keep the same process id only on Linux
-        if self.marionette.session_capabilities['platformName'] == 'linux':
-            self.assertEqual(self.marionette.session["processId"], self.pid)
+        if self.marionette.session_capabilities["platformName"] == "linux":
+            self.assertEqual(self.marionette.process_id, self.pid)
         else:
-            self.assertNotEqual(self.marionette.session["processId"], self.pid)
+            self.assertNotEqual(self.marionette.process_id, self.pid)
 
         # If a preference value is not forced, a restart will cause a reset
         self.assertNotEqual(self.marionette.get_pref("browser.startup.page"), 3)
 
     def test_in_app_quit(self):
         self.marionette.quit(in_app=True)
 
         self.assertEqual(self.marionette.session, None)
@@ -129,40 +129,40 @@ class TestQuitRestart(MarionetteTestCase
         self.assertNotIn('chrome://', self.marionette.get_url(),
                          "Context doesn't default to content")
 
         # restart while we are in chrome context
         self.marionette.set_context('chrome')
         self.marionette.restart(in_app=True)
 
         # An in-app restart will keep the same process id only on Linux
-        if self.marionette.session_capabilities['platformName'] == 'linux':
-            self.assertEqual(self.marionette.session["processId"], self.pid)
+        if self.marionette.session_capabilities["platformName"] == "linux":
+            self.assertEqual(self.marionette.process_id, self.pid)
         else:
-            self.assertNotEqual(self.marionette.session["processId"], self.pid)
+            self.assertNotEqual(self.marionette.process_id, self.pid)
 
         self.assertIn('chrome://', self.marionette.get_url(),
                       "Not in chrome context after a restart with set_context")
 
     def test_keep_context_after_restart_by_using_context(self):
         # Check that we are in content context which is used by default in Marionette
         self.assertNotIn('chrome://', self.marionette.get_url(),
                          "Context doesn't default to content")
 
         # restart while we are in chrome context
         with self.marionette.using_context('chrome'):
             self.marionette.restart(in_app=True)
 
             # An in-app restart will keep the same process id only on Linux
-            if self.marionette.session_capabilities['platformName'] == 'linux':
-                self.assertEqual(self.marionette.session["processId"], self.pid)
+            if self.marionette.session_capabilities["platformName"] == "linux":
+                self.assertEqual(self.marionette.process_id, self.pid)
             else:
-                self.assertNotEqual(self.marionette.session["processId"], self.pid)
+                self.assertNotEqual(self.marionette.process_id, self.pid)
 
-            self.assertIn('chrome://', self.marionette.get_url(),
+            self.assertIn("chrome://", self.marionette.get_url(),
                           "Not in chrome context after a restart with using_context")
 
     def shutdown(self, restart=False):
         self.marionette.set_context("chrome")
         self.marionette.execute_script("""
             Components.utils.import("resource://gre/modules/Services.jsm");
             let flags = Ci.nsIAppStartup.eAttemptQuit
             if(arguments[0]) {
--- a/testing/mozharness/mozharness/mozilla/secrets.py
+++ b/testing/mozharness/mozharness/mozilla/secrets.py
@@ -47,17 +47,17 @@ class SecretsMixin(object):
         is used, or no secret is written.
         """
         if self.config.get('forced_artifact_build'):
             self.info('Skipping due to forced artifact build.')
             return
 
         secret_files = self.config.get('secret_files', [])
 
-        scm_level = self.config.get('scm-level', 1)
+        scm_level = self.config.get('scm_level', 1)
         subst = {
             'scm-level': scm_level,
         }
 
         for sf in secret_files:
             filename = sf['filename']
             secret_name = sf['secret_name'] % subst
             min_scm_level = sf.get('min_scm_level', 0)
--- a/toolkit/components/extensions/ExtensionStorageSync.jsm
+++ b/toolkit/components/extensions/ExtensionStorageSync.jsm
@@ -208,17 +208,17 @@ if (AppConstants.platform != "android") 
       const cryptoKeyRecord = yield collection.getAny(STORAGE_SYNC_CRYPTO_KEYRING_RECORD_ID);
 
       let data = cryptoKeyRecord.data;
       if (!data) {
         // This is a new keyring. Invent an ID for this record. If this
         // changes, it means a client replaced the keyring, so we need to
         // reupload everything.
         const uuidgen = Cc["@mozilla.org/uuid-generator;1"].getService(Ci.nsIUUIDGenerator);
-        const uuid = uuidgen.generateUUID();
+        const uuid = uuidgen.generateUUID().toString();
         data = {uuid};
       }
       return data;
     }),
 
     /**
      * Retrieve the actual keyring from the crypto collection.
      *
@@ -659,17 +659,17 @@ this.ExtensionStorageSync = {
       // changes is when a new keyring is uploaded, which only happens
       // after a server wipe. So when we get a "conflict" (resolved by
       // server_wins), we check whether the server version has a new
       // UUID. If so, reset our sync status, so that we'll reupload
       // everything.
       const result = yield cryptoCollection.sync();
       if (result.resolved.length > 0) {
         if (result.resolved[0].uuid != cryptoKeyRecord.uuid) {
-          log.info("Detected a new UUID. Reseting sync status for everything.");
+          log.info(`Detected a new UUID (${result.resolved[0].uuid}, was ${cryptoKeyRecord.uuid}). Reseting sync status for everything.`);
           yield cryptoCollection.resetSyncStatus();
 
           // Server version is now correct. Return that result.
           return result;
         }
       }
       // No conflicts, or conflict was just someone else adding keys.
       return result;
--- a/toolkit/components/extensions/test/xpcshell/test_ext_storage_sync.js
+++ b/toolkit/components/extensions/test/xpcshell/test_ext_storage_sync.js
@@ -190,17 +190,24 @@ class KintoServer {
 
       response.setStatusLine(null, 200, "OK");
       response.setHeader("Content-Type", "application/json; charset=UTF-8");
       response.setHeader("Date", (new Date()).toUTCString());
       response.setHeader("ETag", this.etag.toString());
 
       const records = this.collections.get(collectionId);
       // Can't JSON a Set directly, so convert to Array
-      const data = Array.from(records);
+      let data = Array.from(records);
+      if (request.queryString.includes("_since=")) {
+        data = data.filter(r => !(r._inPast || false));
+      }
+
+      // Remove records that we only needed to serve once.
+      // FIXME: come up with a more coherent idea of time here.
+      // See bug 1321570.
       for (const record of records) {
         if (record._onlyOnce) {
           records.delete(record);
         }
       }
 
       const body = JSON.stringify({
         "data": data,
@@ -249,31 +256,52 @@ class KintoServer {
       "last_modified": etag,
     };
     this.etag = etag;
     const methodName = conflict ? "encryptAndAddRecordWithConflict" : "encryptAndAddRecord";
     this[methodName](new KeyRingEncryptionRemoteTransformer(),
                      "storage-sync-crypto", keysRecord);
   }
 
+  // Add an already-encrypted record.
+  addRecord(collectionId, record) {
+    this.collections.get(collectionId).add(record);
+  }
+
+  // Add a record that is only served if no `_since` is present.
+  //
+  // Since in real life, Kinto only serves a record as part of a
+  // changes feed if `_since` is before the record's modification
+  // time, this can be helpful to test certain kinds of syncing logic.
+  //
+  // FIXME: tracking of "time" in this mock server really needs to be
+  // implemented correctly rather than these hacks. See bug 1321570.
+  addRecordInPast(collectionId, record) {
+    record._inPast = true;
+    this.addRecord(collectionId, record);
+  }
+
   encryptAndAddRecord(transformer, collectionId, record) {
     return transformer.encode(record).then(encrypted => {
-      this.collections.get(collectionId).add(encrypted);
+      this.addRecord(collectionId, encrypted);
     });
   }
 
   // Like encryptAndAddRecord, but add a flag that will only serve
   // this record once.
   //
   // Since in real life, Kinto only serves a record as part of a changes feed
   // once, this can be useful for testing complicated syncing logic.
+  //
+  // FIXME: This kind of logic really needs to be subsumed into some
+  // more-realistic tracking of "time" (simulated by etags). See bug 1321570.
   encryptAndAddRecordOnlyOnce(transformer, collectionId, record) {
     return transformer.encode(record).then(encrypted => {
       encrypted._onlyOnce = true;
-      this.collections.get(collectionId).add(encrypted);
+      this.addRecord(collectionId, encrypted);
     });
   }
 
   // Conflicts block the next push and then appear in the collection specified.
   encryptAndAddRecordWithConflict(transformer, collectionId, record) {
     return transformer.encode(record).then(encrypted => {
       this.conflicts.push({collectionId, encrypted});
     });
@@ -400,17 +428,17 @@ const loggedInUser = {
       token: "some-access-token",
     },
   },
 };
 const defaultCollectionId = extensionIdToCollectionId(loggedInUser, defaultExtensionId);
 
 function uuid() {
   const uuidgen = Cc["@mozilla.org/uuid-generator;1"].getService(Ci.nsIUUIDGenerator);
-  return uuidgen.generateUUID();
+  return uuidgen.generateUUID().toString();
 }
 
 add_task(function* test_key_to_id() {
   equal(keyToId("foo"), "key-foo");
   equal(keyToId("my-new-key"), "key-my_2D_new_2D_key");
   equal(keyToId(""), "key-");
   equal(keyToId("™"), "key-_2122_");
   equal(keyToId("\b"), "key-_8_");
@@ -458,16 +486,39 @@ add_task(function* ensureKeysFor_posts_n
       ok(newKeys.hasKeysFor([extensionId]), `key isn't present for ${extensionId}`);
 
       let posts = server.getPosts();
       equal(posts.length, 1);
       const post = posts[0];
       assertPostedNewRecord(post);
       const body = yield assertPostedEncryptedKeys(post);
       ok(body.keys.collections[extensionId], `keys object should have a key for ${extensionId}`);
+
+      // Try adding another key to make sure that the first post was
+      // OK, even on a new profile.
+      yield cryptoCollection._clear();
+      server.clearPosts();
+      // Restore the first posted keyring
+      server.addRecordInPast("storage-sync-crypto", post.body.data);
+      const extensionId2 = uuid();
+      newKeys = yield ExtensionStorageSync.ensureKeysFor([extensionId2]);
+      ok(newKeys.hasKeysFor([extensionId]), `didn't forget key for ${extensionId}`);
+      ok(newKeys.hasKeysFor([extensionId2]), `new key generated for ${extensionId2}`);
+
+      posts = server.getPosts();
+      // FIXME: some kind of bug where we try to repush the
+      // server_wins version multiple times in a single sync. We
+      // actually push 5 times as of this writing.
+      // See bug 1321571.
+      // equal(posts.length, 1);
+      const newPost = posts[posts.length - 1];
+      const newBody = yield assertPostedEncryptedKeys(newPost);
+      ok(newBody.keys.collections[extensionId], `keys object should have a key for ${extensionId}`);
+      ok(newBody.keys.collections[extensionId2], `keys object should have a key for ${extensionId2}`);
+
     });
   });
 });
 
 add_task(function* ensureKeysFor_pulls_key() {
   // ensureKeysFor is implemented by adding a key to our local record
   // and doing a sync. This means that if the same key exists
   // remotely, we get a "conflict". Ensure that we handle this
@@ -663,16 +714,17 @@ add_task(function* checkSyncKeyRing_over
               "keyring upload should be authorized");
         equal(postedKeys.headers["If-None-Match"], "*",
               "keyring upload should be to empty Kinto server");
         equal(postedKeys.path, collectionRecordsPath("storage-sync-crypto") + "/keys",
               "keyring upload should be to keyring path");
 
         let body = yield new KeyRingEncryptionRemoteTransformer().decode(postedKeys.body.data);
         ok(body.uuid, "new keyring should have a UUID");
+        equal(typeof body.uuid, "string", "keyring UUIDs should be strings");
         notEqual(body.uuid, "abcd",
                  "new keyring should not have the same UUID as previous keyring");
         ok(body.keys,
            "new keyring should have a keys attribute");
         ok(body.keys.default, "new keyring should have a default key");
         // We should keep the extension key that was in our uploaded version.
         deepEqual(extensionKey, body.keys.collections[extensionId],
                   "ensureKeysFor should have returned keyring with the same key that was uploaded");
--- a/toolkit/components/narrate/NarrateControls.jsm
+++ b/toolkit/components/narrate/NarrateControls.jsm
@@ -11,19 +11,20 @@ Cu.import("resource://gre/modules/narrat
 Cu.import("resource://gre/modules/Services.jsm");
 Cu.import("resource://gre/modules/AsyncPrefs.jsm");
 Cu.import("resource://gre/modules/TelemetryStopwatch.jsm");
 
 this.EXPORTED_SYMBOLS = ["NarrateControls"];
 
 var gStrings = Services.strings.createBundle("chrome://global/locale/narrate.properties");
 
-function NarrateControls(mm, win) {
+function NarrateControls(mm, win, languagePromise) {
   this._mm = mm;
   this._winRef = Cu.getWeakReference(win);
+  this._languagePromise = languagePromise;
 
   win.addEventListener("unload", this);
 
   // Append content style sheet in document head
   let style = win.document.createElement("link");
   style.rel = "stylesheet";
   style.href = "chrome://global/skin/narrate.css";
   win.document.head.appendChild(style);
@@ -105,17 +106,17 @@ function NarrateControls(mm, win) {
       <div id="narrate-rate" class="narrate-row">
         <input id="narrate-rate-input" value="0" title="${"speed"}"
                step="5" max="100" min="-100" type="range">
       </div>
       <div id="narrate-voices" class="narrate-row"></div>
       <div class="dropdown-arrow"></div>
     </li>`;
 
-  this.narrator = new Narrator(win);
+  this.narrator = new Narrator(win, languagePromise);
 
   let branch = Services.prefs.getBranch("narrate.");
   let selectLabel = gStrings.GetStringFromName("selectvoicelabel");
   this.voiceSelect = new VoiceSelect(win, selectLabel);
   this.voiceSelect.element.addEventListener("change", this);
   this.voiceSelect.element.id = "voice-select";
   win.speechSynthesis.addEventListener("voiceschanged", this);
   dropdown.querySelector("#narrate-voices").appendChild(
@@ -158,17 +159,17 @@ NarrateControls.prototype = {
         break;
     }
   },
 
   /**
    * Returns true if synth voices are available.
    */
   _setupVoices: function() {
-    return this.narrator.languagePromise.then(language => {
+    return this._languagePromise.then(language => {
       this.voiceSelect.clear();
       let win = this._win;
       let voicePrefs = this._getVoicePref();
       let selectedVoice = voicePrefs[language || "default"];
       let comparer = win.Intl ?
         (new Intl.Collator()).compare : (a, b) => a.localeCompare(b);
       let filter = !Services.prefs.getBoolPref("narrate.filter-voices");
       let options = win.speechSynthesis.getVoices().filter(v => {
@@ -222,17 +223,17 @@ NarrateControls.prototype = {
   _onRateInput: function(evt) {
     AsyncPrefs.set("narrate.rate", parseInt(evt.target.value, 10));
     this.narrator.setRate(this._convertRate(evt.target.value));
   },
 
   _onVoiceChange: function() {
     let voice = this.voice;
     this.narrator.setVoice(voice);
-    this.narrator.languagePromise.then(language => {
+    this._languagePromise.then(language => {
       if (language) {
         let voicePref = this._getVoicePref();
         voicePref[language || "default"] = voice;
         AsyncPrefs.set("narrate.voice", JSON.stringify(voicePref));
       }
     });
   },
 
--- a/toolkit/components/narrate/Narrator.jsm
+++ b/toolkit/components/narrate/Narrator.jsm
@@ -3,55 +3,37 @@
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 "use strict";
 
 const { interfaces: Ci, utils: Cu } = Components;
 
 Cu.import("resource://gre/modules/XPCOMUtils.jsm");
 
-XPCOMUtils.defineLazyModuleGetter(this, "LanguageDetector",
-  "resource:///modules/translation/LanguageDetector.jsm");
 XPCOMUtils.defineLazyModuleGetter(this, "Services",
   "resource://gre/modules/Services.jsm");
 
 this.EXPORTED_SYMBOLS = [ "Narrator" ];
 
 // Maximum time into paragraph when pressing "skip previous" will go
 // to previous paragraph and not the start of current one.
 const PREV_THRESHOLD = 2000;
 // All text-related style rules that we should copy over to the highlight node.
 const kTextStylesRules = ["font-family", "font-kerning", "font-size",
   "font-size-adjust", "font-stretch", "font-variant", "font-weight",
   "line-height", "letter-spacing", "text-orientation",
   "text-transform", "word-spacing"];
 
-function Narrator(win) {
+function Narrator(win, languagePromise) {
   this._winRef = Cu.getWeakReference(win);
+  this._languagePromise = languagePromise;
   this._inTest = Services.prefs.getBoolPref("narrate.test");
   this._speechOptions = {};
   this._startTime = 0;
   this._stopped = false;
-
-  this.languagePromise = new Promise(resolve => {
-    let detect = () => {
-      win.document.removeEventListener("AboutReaderContentReady", detect);
-      let sampleText = this._doc.getElementById(
-        "moz-reader-content").textContent.substring(0, 60 * 1024);
-      LanguageDetector.detectLanguage(sampleText).then(result => {
-        resolve(result.confident ? result.language : null);
-      });
-    };
-
-    if (win.document.body.classList.contains("loaded")) {
-      detect();
-    } else {
-      win.document.addEventListener("AboutReaderContentReady", detect);
-    }
-  });
 }
 
 Narrator.prototype = {
   get _doc() {
     return this._winRef.get().document;
   },
 
   get _win() {
@@ -261,17 +243,17 @@ Narrator.prototype = {
 
   start: function(speechOptions) {
     this._speechOptions = {
       rate: speechOptions.rate,
       voice: this._getVoice(speechOptions.voice)
     };
 
     this._stopped = false;
-    return this.languagePromise.then(language => {
+    return this._languagePromise.then(language => {
       if (!this._speechOptions.voice) {
         this._speechOptions.lang = language;
       }
 
       let tw = this._treeWalker;
       if (!this._isParagraphInView(tw.currentNode)) {
         tw.currentNode = tw.root;
         while (tw.nextNode()) {
--- a/toolkit/components/reader/AboutReader.jsm
+++ b/toolkit/components/reader/AboutReader.jsm
@@ -12,19 +12,22 @@ Cu.import("resource://gre/modules/Reader
 Cu.import("resource://gre/modules/Services.jsm");
 Cu.import("resource://gre/modules/XPCOMUtils.jsm");
 
 XPCOMUtils.defineLazyModuleGetter(this, "AsyncPrefs", "resource://gre/modules/AsyncPrefs.jsm");
 XPCOMUtils.defineLazyModuleGetter(this, "NarrateControls", "resource://gre/modules/narrate/NarrateControls.jsm");
 XPCOMUtils.defineLazyModuleGetter(this, "Rect", "resource://gre/modules/Geometry.jsm");
 XPCOMUtils.defineLazyModuleGetter(this, "Task", "resource://gre/modules/Task.jsm");
 XPCOMUtils.defineLazyModuleGetter(this, "UITelemetry", "resource://gre/modules/UITelemetry.jsm");
+XPCOMUtils.defineLazyModuleGetter(this, "LanguageDetector", "resource:///modules/translation/LanguageDetector.jsm");
 
 var gStrings = Services.strings.createBundle("chrome://global/locale/aboutReader.properties");
 
+const gIsFirefoxDesktop = Services.appinfo.ID == "{ec8030f7-c20a-464f-9b0e-13a3a9e97384}";
+
 var AboutReader = function(mm, win, articlePromise) {
   let url = this._getOriginalUrl(win);
   if (!(url.startsWith("http://") || url.startsWith("https://"))) {
     let errorMsg = "Only http:// and https:// URLs can be loaded in about:reader.";
     if (Services.prefs.getBoolPref("reader.errors.includeURLs"))
       errorMsg += " Tried to load: " + url + ".";
     Cu.reportError(errorMsg);
     win.location.href = "about:blank";
@@ -40,16 +43,19 @@ var AboutReader = function(mm, win, arti
   this._mm.addMessageListener("Reader:GetStoredArticleData", this);
 
   this._docRef = Cu.getWeakReference(doc);
   this._winRef = Cu.getWeakReference(win);
   this._innerWindowId = win.QueryInterface(Ci.nsIInterfaceRequestor)
     .getInterface(Ci.nsIDOMWindowUtils).currentInnerWindowID;
 
   this._article = null;
+  this._languagePromise = new Promise(resolve => {
+    this._foundLanguage = resolve;
+  });
 
   if (articlePromise) {
     this._articlePromise = articlePromise;
   }
 
   this._headerElementRef = Cu.getWeakReference(doc.getElementById("reader-header"));
   this._domainElementRef = Cu.getWeakReference(doc.getElementById("reader-domain"));
   this._titleElementRef = Cu.getWeakReference(doc.getElementById("reader-title"));
@@ -68,17 +74,16 @@ var AboutReader = function(mm, win, arti
 
   Services.obs.addObserver(this, "inner-window-destroyed", false);
 
   doc.addEventListener("visibilitychange", this, false);
 
   this._setupStyleDropdown();
   this._setupButton("close-button", this._onReaderClose.bind(this), "aboutReader.toolbar.close");
 
-  const gIsFirefoxDesktop = Services.appinfo.ID == "{ec8030f7-c20a-464f-9b0e-13a3a9e97384}";
   if (gIsFirefoxDesktop) {
     // we're ready for any external setup, send a signal for that.
     this._mm.sendAsyncMessage("Reader:OnSetup");
   }
 
   let colorSchemeValues = JSON.parse(Services.prefs.getCharPref("reader.color_scheme.values"));
   let colorSchemeOptions = colorSchemeValues.map((value) => {
     return { name: gStrings.GetStringFromName("aboutReader.colorScheme." + value),
@@ -109,17 +114,17 @@ var AboutReader = function(mm, win, arti
 
   this._setupFontSizeButtons();
 
   this._setupContentWidthButtons();
 
   this._setupLineHeightButtons();
 
   if (win.speechSynthesis && Services.prefs.getBoolPref("narrate.enabled")) {
-    new NarrateControls(mm, win);
+    new NarrateControls(mm, win, this._languagePromise);
   }
 
   this._loadArticle();
 };
 
 AboutReader.prototype = {
   _BLOCK_IMAGES_SELECTOR: ".content p > img:only-child, " +
                           ".content p > a:only-child > img:only-child, " +
@@ -708,22 +713,29 @@ AboutReader.prototype = {
         img.onload = function() {
           setImageMargins(img);
         };
       }
     }
   },
 
   _maybeSetTextDirection: function Read_maybeSetTextDirection(article) {
-    if (!article.dir)
-      return;
-
-    // Set "dir" attribute on content
-    this._contentElement.setAttribute("dir", article.dir);
-    this._headerElement.setAttribute("dir", article.dir);
+    if (article.dir) {
+      // Set "dir" attribute on content
+      this._contentElement.setAttribute("dir", article.dir);
+      this._headerElement.setAttribute("dir", article.dir);
+    } else {
+      this._languagePromise.then(language => {
+        // TODO: Remove the hardcoded language codes below once bug 1320265 is resolved.
+        if (["ar", "fa", "he", "ug", "ur"].includes(language)) {
+          this._contentElement.setAttribute("dir", "rtl");
+          this._headerElement.setAttribute("dir", "rtl");
+        }
+      });
+    }
   },
 
   _fixLocalLinks() {
     // We need to do this because preprocessing the content through nsIParserUtils
     // gives back a DOM with a <base> element. That influences how these URLs get
     // resolved, making them no longer match the document URI (which is
     // about:reader?url=...). To fix this, make all the hash URIs absolute. This
     // is hacky, but the alternative of removing the base element has potential
@@ -783,32 +795,41 @@ AboutReader.prototype = {
 
     let parserUtils = Cc["@mozilla.org/parserutils;1"].getService(Ci.nsIParserUtils);
     let contentFragment = parserUtils.parseFragment(article.content,
       Ci.nsIParserUtils.SanitizerDropForms | Ci.nsIParserUtils.SanitizerAllowStyle,
       false, articleUri, this._contentElement);
     this._contentElement.innerHTML = "";
     this._contentElement.appendChild(contentFragment);
     this._fixLocalLinks();
+    this._findLanguage(article.textContent);
     this._maybeSetTextDirection(article);
 
     this._contentElement.style.display = "block";
     this._updateImageMargins();
 
     this._requestFavicon();
     this._doc.body.classList.add("loaded");
 
     this._goToReference(articleUri.ref);
 
     Services.obs.notifyObservers(this._win, "AboutReader:Ready", "");
 
     this._doc.dispatchEvent(
       new this._win.CustomEvent("AboutReaderContentReady", { bubbles: true, cancelable: false }));
   },
 
+  _findLanguage: function(textContent) {
+    if (gIsFirefoxDesktop) {
+      LanguageDetector.detectLanguage(textContent).then(result => {
+        this._foundLanguage(result.confident ? result.language : null);
+      });
+    }
+  },
+
   _hideContent: function() {
     this._headerElement.style.display = "none";
     this._contentElement.style.display = "none";
   },
 
   _showProgressDelayed: function() {
     this._win.setTimeout(function() {
       // No need to show progress if the article has been loaded,
--- a/toolkit/components/telemetry/Histograms.json
+++ b/toolkit/components/telemetry/Histograms.json
@@ -8565,17 +8565,17 @@
   },
   "SSL_SCTS_VERIFICATION_STATUS": {
     "alert_emails": ["seceng-telemetry@mozilla.com"],
     "expires_in_version": "never",
     "kind": "enumerated",
     "n_values": 10,
     "bug_numbers": [1293231],
     "releaseChannelCollection": "opt-out",
-    "description": "Verification status of Signed Certificate Timestamps received (0=Decoding error, 1=SCT verified, 2=SCT from unknown log, 3=Invalid SCT signature, 4=SCT timestamp is in the future)"
+    "description": "Verification status of Signed Certificate Timestamps received (0=Decoding error, 1=Valid SCT, 2=SCT from unknown log, 3=Invalid SCT signature, 4=SCT timestamp is in the future, 5=Valid SCT from a disqualified log)"
   },
   "SSL_SERVER_AUTH_EKU": {
     "alert_emails": ["seceng-telemetry@mozilla.com"],
     "expires_in_version": "never",
     "kind": "enumerated",
     "n_values": 10,
     "description": "Presence of of the Server Authenticaton EKU in accepted SSL server certificates (0=No EKU, 1=EKU present and has id_kp_serverAuth, 2=EKU present and has id_kp_serverAuth as well as some other EKU, 3=EKU present but does not contain id_kp_serverAuth)"
   },
--- a/tools/lint/eslint/modules.json
+++ b/tools/lint/eslint/modules.json
@@ -113,17 +113,18 @@
   "JNI.jsm": ["JNI", "android_log"],
   "jpakeclient.js": ["JPAKEClient", "SendCredentialsController"],
   "Jsbeautify.jsm": ["jsBeautify"],
   "jsdebugger.jsm": ["addDebuggerToGlobal"],
   "json2.js": ["JSON"],
   "keys.js": ["BulkKeyBundle", "SyncKeyBundle"],
   "KeyValueParser.jsm": ["parseKeyValuePairsFromLines", "parseKeyValuePairs", "parseKeyValuePairsFromFile"],
   "kinto-http-client.js": ["KintoHttpClient"],
-  "kinto-offline-client.js": ["loadKinto"],
+  "kinto-offline-client.js": ["Kinto"],
+  "kinto-storage-adapter.js": ["FirefoxAdapter"],
   "loader-plugin-raw.jsm": ["requireRawId"],
   "loader.js": ["WorkerDebuggerLoader", "worker"],
   "Loader.jsm": ["DevToolsLoader", "devtools", "BuiltinProvider", "require", "loader"],
   "logger.jsm": ["Logger"],
   "logging.js": ["getTestLogger", "initTestLogging"],
   "LoginManagerContent.jsm": ["LoginManagerContent", "LoginFormFactory", "UserAutoCompleteResult"],
   "LoginRecipes.jsm": ["LoginRecipesContent", "LoginRecipesParent"],
   "logmanager.js": ["LogManager"],
--- a/uriloader/exthandler/nsExternalHelperAppService.cpp
+++ b/uriloader/exthandler/nsExternalHelperAppService.cpp
@@ -586,16 +586,19 @@ static const nsExtraMimeTypeEntry extraM
   { IMAGE_JPEG, "jpeg,jpg,jfif,pjpeg,pjp", "JPEG Image" },
   { IMAGE_PNG, "png", "PNG Image" },
   { IMAGE_APNG, "apng", "APNG Image" },
   { IMAGE_TIFF, "tiff,tif", "TIFF Image" },
   { IMAGE_XBM, "xbm", "XBM Image" },
   { IMAGE_SVG_XML, "svg", "Scalable Vector Graphics" },
   { MESSAGE_RFC822, "eml", "RFC-822 data" },
   { TEXT_PLAIN, "txt,text", "Text File" },
+  { APPLICATION_JSON, "json", "JavaScript Object Notation" },
+  { TEXT_VTT, "vtt", "Web Video Text Tracks" },
+  { TEXT_CACHE_MANIFEST, "appcache", "Application Cache Manifest" },
   { TEXT_HTML, "html,htm,shtml,ehtml", "HyperText Markup Language" },
   { "application/xhtml+xml", "xhtml,xht", "Extensible HyperText Markup Language" },
   { APPLICATION_MATHML_XML, "mml", "Mathematical Markup Language" },
   { APPLICATION_RDF, "rdf", "Resource Description Framework" },
   { TEXT_XUL, "xul", "XML-Based User Interface Language" },
   { TEXT_XML, "xml,xsl,xbl", "Extensible Markup Language" },
   { TEXT_CSS, "css", "Style Sheet" },
   { TEXT_VCARD, "vcf,vcard", "Contact Information" },
--- a/widget/android/fennec/FennecJNIWrappers.cpp
+++ b/widget/android/fennec/FennecJNIWrappers.cpp
@@ -200,16 +200,24 @@ auto CodecProxy::Flush() const -> bool
 constexpr char CodecProxy::Input_t::name[];
 constexpr char CodecProxy::Input_t::signature[];
 
 auto CodecProxy::Input(mozilla::jni::ByteBuffer::Param a0, mozilla::jni::Object::Param a1, mozilla::jni::Object::Param a2) const -> bool
 {
     return mozilla::jni::Method<Input_t>::Call(CodecProxy::mCtx, nullptr, a0, a1, a2);
 }
 
+constexpr char CodecProxy::IsAdaptivePlaybackSupported_t::name[];
+constexpr char CodecProxy::IsAdaptivePlaybackSupported_t::signature[];
+
+auto CodecProxy::IsAdaptivePlaybackSupported() const -> bool
+{
+    return mozilla::jni::Method<IsAdaptivePlaybackSupported_t>::Call(CodecProxy::mCtx, nullptr);
+}
+
 constexpr char CodecProxy::Release_t::name[];
 constexpr char CodecProxy::Release_t::signature[];
 
 auto CodecProxy::Release() const -> bool
 {
     return mozilla::jni::Method<Release_t>::Call(CodecProxy::mCtx, nullptr);
 }
 
--- a/widget/android/fennec/FennecJNIWrappers.h
+++ b/widget/android/fennec/FennecJNIWrappers.h
@@ -722,16 +722,35 @@ public:
         static const mozilla::jni::CallingThread callingThread =
                 mozilla::jni::CallingThread::ANY;
         static const mozilla::jni::DispatchTarget dispatchTarget =
                 mozilla::jni::DispatchTarget::CURRENT;
     };
 
     auto Input(mozilla::jni::ByteBuffer::Param, mozilla::jni::Object::Param, mozilla::jni::Object::Param) const -> bool;
 
+    struct IsAdaptivePlaybackSupported_t {
+        typedef CodecProxy Owner;
+        typedef bool ReturnType;
+        typedef bool SetterType;
+        typedef mozilla::jni::Args<> Args;
+        static constexpr char name[] = "isAdaptivePlaybackSupported";
+        static constexpr char signature[] =
+                "()Z";
+        static const bool isStatic = false;
+        static const mozilla::jni::ExceptionMode exceptionMode =
+                mozilla::jni::ExceptionMode::ABORT;
+        static const mozilla::jni::CallingThread callingThread =
+                mozilla::jni::CallingThread::ANY;
+        static const mozilla::jni::DispatchTarget dispatchTarget =
+                mozilla::jni::DispatchTarget::CURRENT;
+    };
+
+    auto IsAdaptivePlaybackSupported() const -> bool;
+
     struct Release_t {
         typedef CodecProxy Owner;
         typedef bool ReturnType;
         typedef bool SetterType;
         typedef mozilla::jni::Args<> Args;
         static constexpr char name[] = "release";
         static constexpr char signature[] =
                 "()Z";
--- a/widget/gtk/nsWindow.cpp
+++ b/widget/gtk/nsWindow.cpp
@@ -492,24 +492,24 @@ nsWindow::~nsWindow()
     mTransparencyBitmap = nullptr;
 
     Destroy();
 }
 
 /* static */ void
 nsWindow::ReleaseGlobals()
 {
-  for (uint32_t i = 0; i < ArrayLength(gCursorCache); ++i) {
-    if (gCursorCache[i]) {
+  for (auto & cursor : gCursorCache) {
+    if (cursor) {
 #if (MOZ_WIDGET_GTK == 3)
-      g_object_unref(gCursorCache[i]);
+      g_object_unref(cursor);
 #else
-      gdk_cursor_unref(gCursorCache[i]);
+      gdk_cursor_unref(cursor);
 #endif
-      gCursorCache[i] = nullptr;
+      cursor = nullptr;
     }
   }
 }
 
 void
 nsWindow::CommonCreate(nsIWidget *aParent, bool aListenForResizes)
 {
     mParent = aParent;
@@ -655,17 +655,17 @@ SetWidgetForHierarchy(GdkWindow *aWindow
 {
     gpointer data;
     gdk_window_get_user_data(aWindow, &data);
 
     if (data != aOldWidget) {
         if (!GTK_IS_WIDGET(data))
             return;
 
-        GtkWidget* widget = static_cast<GtkWidget*>(data);
+        auto* widget = static_cast<GtkWidget*>(data);
         if (gtk_widget_get_parent(widget) != aOldWidget)
             return;
 
         // This window belongs to a child widget, which will no longer be a
         // child of aOldWidget.
         gtk_widget_reparent(widget, aNewWidget);
 
         return;
@@ -890,17 +890,17 @@ nsWindow::ReparentNativeWidget(nsIWidget
         // reparent.
         MOZ_ASSERT(gdk_window_is_destroyed(mGdkWindow),
                    "live GdkWindow with no widget");
         return;
     }
     MOZ_ASSERT(!gdk_window_is_destroyed(mGdkWindow),
                "destroyed GdkWindow with widget");
 
-    nsWindow* newParent = static_cast<nsWindow*>(aNewParent);
+    auto* newParent = static_cast<nsWindow*>(aNewParent);
     GdkWindow* newParentWindow = newParent->mGdkWindow;
     GtkWidget* newContainer = newParent->GetMozContainerWidget();
     GtkWindow* shell = GTK_WINDOW(mShell);
 
     if (shell && gtk_window_get_transient_for(shell)) {
       GtkWindow* topLevelParent =
           GTK_WINDOW(gtk_widget_get_toplevel(newContainer));
       gtk_window_set_transient_for(shell, topLevelParent);
@@ -935,17 +935,17 @@ nsWindow::ReparentNativeWidgetInternal(n
 
         if (!mIsTopLevel) {
             gdk_window_reparent(mGdkWindow, aNewParentWindow,
                                 DevicePixelsToGdkCoordRoundDown(mBounds.x),
                                 DevicePixelsToGdkCoordRoundDown(mBounds.y));
         }
     }
 
-    nsWindow* newParent = static_cast<nsWindow*>(aNewParent);
+    auto* newParent = static_cast<nsWindow*>(aNewParent);
     bool parentHasMappedToplevel =
         newParent && newParent->mHasMappedToplevel;
     if (mHasMappedToplevel != parentHasMappedToplevel) {
         SetHasMappedToplevel(parentHasMappedToplevel);
     }
 }
 
 void
@@ -1169,17 +1169,17 @@ nsWindow::Resize(double aX, double aY, d
 
 void
 nsWindow::ResizePluginSocketWidget()
 {
     // e10s specific, a eWindowType_plugin_ipc_chrome holds its own
     // nsPluginNativeWindowGtk wrapper. We are responsible for resizing
     // the embedded socket widget.
     if (mWindowType == eWindowType_plugin_ipc_chrome) {
-        nsPluginNativeWindowGtk* wrapper = (nsPluginNativeWindowGtk*)
+        auto* wrapper = (nsPluginNativeWindowGtk*)
           GetNativeData(NS_NATIVE_PLUGIN_OBJECT_PTR);
         if (wrapper) {
             wrapper->width = mBounds.width;
             wrapper->height = mBounds.height;
             wrapper->SetAllocation();
         }
     }
 }
@@ -1563,18 +1563,18 @@ nsWindow::UpdateClientOffset()
                           &length_returned,
                           (guchar **) &frame_extents) ||
         length_returned/sizeof(glong) != 4) {
         mClientOffset = nsIntPoint(0, 0);
         return;
     }
 
     // data returned is in the order left, right, top, bottom
-    int32_t left = int32_t(frame_extents[0]);
-    int32_t top = int32_t(frame_extents[2]);
+    auto left = int32_t(frame_extents[0]);
+    auto top = int32_t(frame_extents[2]);
 
     g_free(frame_extents);
 
     mClientOffset = nsIntPoint(left, top);
 }
 
 LayoutDeviceIntPoint
 nsWindow::GetClientOffset()
@@ -2551,18 +2551,18 @@ nsWindow::OnEnterNotifyEvent(GdkEventCro
 
     DispatchInputEvent(&event);
 }
 
 // XXX Is this the right test for embedding cases?
 static bool
 is_top_level_mouse_exit(GdkWindow* aWindow, GdkEventCrossing *aEvent)
 {
-    gint x = gint(aEvent->x_root);
-    gint y = gint(aEvent->y_root);
+    auto x = gint(aEvent->x_root);
+    auto y = gint(aEvent->y_root);
     GdkDisplay* display = gdk_window_get_display(aWindow);
     GdkWindow* winAtPt = gdk_display_get_window_at_pointer(display, &x, &y);
     if (!winAtPt)
         return true;
     GdkWindow* topLevelAtPt = gdk_window_get_toplevel(winAtPt);
     GdkWindow* topLevelWidget = gdk_window_get_toplevel(aWindow);
     return topLevelAtPt != topLevelWidget;
 }
@@ -3378,17 +3378,17 @@ nsWindow::ThemeChanged()
         return;
 
     // Dispatch theme change notification to all child windows
     GList *children =
         gdk_window_peek_children(mGdkWindow);
     while (children) {
         GdkWindow *gdkWin = GDK_WINDOW(children->data);
 
-        nsWindow *win = (nsWindow*) g_object_get_data(G_OBJECT(gdkWin),
+        auto *win = (nsWindow*) g_object_get_data(G_OBJECT(gdkWin),
                                                       "nsWindow");
 
         if (win && win != this) { // guard against infinite recursion
             RefPtr<nsWindow> kungFuDeathGrip = win;
             win->ThemeChanged();
         }
 
         children = children->next;
@@ -4390,17 +4390,17 @@ nsWindow::ConfigureChildren(const nsTArr
     // size information from a source other than our owner. Don't let our parent
     // update this information.
     if (mWindowType == eWindowType_plugin_ipc_chrome) {
       return NS_OK;
     }
 
     for (uint32_t i = 0; i < aConfigurations.Length(); ++i) {
         const Configuration& configuration = aConfigurations[i];
-        nsWindow* w = static_cast<nsWindow*>(configuration.mChild.get());
+        auto* w = static_cast<nsWindow*>(configuration.mChild.get());
         NS_ASSERTION(w->GetParent() == this,
                      "Configured widget is not a child");
         w->SetWindowClipRegion(configuration.mClipRegion, true);
         if (w->mBounds.Size() != configuration.mBounds.Size()) {
             w->Resize(configuration.mBounds.x, configuration.mBounds.y,
                       configuration.mBounds.width, configuration.mBounds.height,
                       true);
         } else if (w->mBounds.TopLeft() != configuration.mBounds.TopLeft()) {
@@ -4479,17 +4479,17 @@ nsWindow::ResizeTransparencyBitmap()
         return;
 
     if (mBounds.width == mTransparencyBitmapWidth &&
         mBounds.height == mTransparencyBitmapHeight)
         return;
 
     int32_t newRowBytes = GetBitmapStride(mBounds.width);
     int32_t newSize = newRowBytes * mBounds.height;
-    gchar* newBits = new gchar[newSize];
+    auto* newBits = new gchar[newSize];
     // fill new mask with "transparent", first
     memset(newBits, 0, newSize);
 
     // Now copy the intersection of the old and new areas into the new mask
     int32_t copyWidth = std::min(mBounds.width, mTransparencyBitmapWidth);
     int32_t copyHeight = std::min(mBounds.height, mTransparencyBitmapHeight);
     int32_t oldRowBytes = GetBitmapStride(mTransparencyBitmapWidth);
     int32_t copyBytes = GetBitmapStride(copyWidth);
@@ -5177,34 +5177,34 @@ nsWindow::CheckForRollup(gdouble aMouseX
         rollupWidget = rollupListener->GetRollupWidget();
     }
     if (!rollupWidget) {
         nsBaseWidget::gRollupListener = nullptr;
         return false;
     }
 
     bool retVal = false;
-    GdkWindow *currentPopup =
+    auto *currentPopup =
         (GdkWindow *)rollupWidget->GetNativeData(NS_NATIVE_WINDOW);
     if (aAlwaysRollup || !is_mouse_in_window(currentPopup, aMouseX, aMouseY)) {
         bool rollup = true;
         if (aIsWheel) {
             rollup = rollupListener->ShouldRollupOnMouseWheelEvent();
             retVal = rollupListener->ShouldConsumeOnMouseWheelEvent();
         }
         // if we're dealing with menus, we probably have submenus and
         // we don't want to rollup if the click is in a parent menu of
         // the current submenu
         uint32_t popupsToRollup = UINT32_MAX;
         if (!aAlwaysRollup) {
             AutoTArray<nsIWidget*, 5> widgetChain;
             uint32_t sameTypeCount = rollupListener->GetSubmenuWidgetChain(&widgetChain);
             for (uint32_t i=0; i<widgetChain.Length(); ++i) {
                 nsIWidget* widget = widgetChain[i];
-                GdkWindow* currWindow =
+                auto* currWindow =
                     (GdkWindow*) widget->GetNativeData(NS_NATIVE_WINDOW);
                 if (is_mouse_in_window(currWindow, aMouseX, aMouseY)) {
                   // don't roll up if the mouse event occurred within a
                   // menu of the same type. If the mouse event occurred
                   // in a menu higher than that, roll up, but pass the
                   // number of popups to Rollup so that only those of the
                   // same type close up.
                   if (i < sameTypeCount) {
@@ -5658,18 +5658,18 @@ leave_notify_event_cb(GtkWidget *widget,
                       GdkEventCrossing *event)
 {
     if (is_parent_grab_leave(event)) {
         return TRUE;
     }
 
     // bug 369599: Suppress LeaveNotify events caused by pointer grabs to
     // avoid generating spurious mouse exit events.
-    gint x = gint(event->x_root);
-    gint y = gint(event->y_root);
+    auto x = gint(event->x_root);
+    auto y = gint(event->y_root);
     GdkDisplay* display = gtk_widget_get_display(widget);
     GdkWindow* winAtPt = gdk_display_get_window_at_pointer(display, &x, &y);
     if (winAtPt == event->window) {
         return TRUE;
     }
 
     RefPtr<nsWindow> window = get_window_for_gdk_window(event->window);
     if (!window)
@@ -5782,17 +5782,17 @@ focus_out_event_cb(GtkWidget *widget, Gd
 // example), a request to make the parent window active is issued.  When the
 // parent window becomes active, keyboard events will be received.
 
 static GdkFilterReturn
 popup_take_focus_filter(GdkXEvent *gdk_xevent,
                         GdkEvent *event,
                         gpointer data)
 {
-    XEvent* xevent = static_cast<XEvent*>(gdk_xevent);
+    auto* xevent = static_cast<XEvent*>(gdk_xevent);
     if (xevent->type != ClientMessage)
         return GDK_FILTER_CONTINUE;
 
     XClientMessageEvent& xclient = xevent->xclient;
     if (xclient.message_type != gdk_x11_get_xatom_by_name("WM_PROTOCOLS"))
         return GDK_FILTER_CONTINUE;
 
     Atom atom = xclient.data.l[0];
@@ -6281,17 +6281,17 @@ get_inner_gdk_window (GdkWindow *aWindow
                       gint x, gint y,
                       gint *retx, gint *rety)
 {
     gint cx, cy, cw, ch;
     GList *children = gdk_window_peek_children(aWindow);
     for (GList *child = g_list_last(children);
          child;
          child = g_list_previous(child)) {
-        GdkWindow *childWindow = (GdkWindow *) child->data;
+        auto *childWindow = (GdkWindow *) child->data;
         if (get_window_for_gdk_window(childWindow)) {
 #if (MOZ_WIDGET_GTK == 2)
             gdk_window_get_geometry(childWindow, &cx, &cy, &cw, &ch, nullptr);
 #else
             gdk_window_get_geometry(childWindow, &cx, &cy, &cw, &ch);
 #endif
             if ((cx < x) && (x < (cx + cw)) &&
                 (cy < y) && (y < (cy + ch)) &&
@@ -6393,23 +6393,19 @@ nsWindow::DispatchRestoreEventAccessible
 {
     DispatchEventToRootAccessible(nsIAccessibleEvent::EVENT_WINDOW_RESTORE);
 }
 
 #endif /* #ifdef ACCESSIBILITY */
 
 // nsChildWindow class
 
-nsChildWindow::nsChildWindow()
-{
-}
-
-nsChildWindow::~nsChildWindow()
-{
-}
+nsChildWindow::nsChildWindow() = default;
+
+nsChildWindow::~nsChildWindow() = default;
 
 NS_IMETHODIMP_(void)
 nsWindow::SetInputContext(const InputContext& aContext,
                           const InputContextAction& aAction)
 {
     if (!mIMContext) {
         return;
     }
--- a/widget/nsBaseDragService.cpp
+++ b/widget/nsBaseDragService.cpp
@@ -57,19 +57,17 @@ nsBaseDragService::nsBaseDragService()
     mDragEventDispatchedToChildProcess(false),
     mDragAction(DRAGDROP_ACTION_NONE),
     mDragActionFromChildProcess(DRAGDROP_ACTION_UNINITIALIZED), mTargetSize(0,0),
     mContentPolicyType(nsIContentPolicy::TYPE_OTHER),
     mSuppressLevel(0), mInputSource(nsIDOMMouseEvent::MOZ_SOURCE_MOUSE)
 {
 }
 
-nsBaseDragService::~nsBaseDragService()
-{
-}
+nsBaseDragService::~nsBaseDragService() = default;
 
 NS_IMPL_ISUPPORTS(nsBaseDragService, nsIDragService, nsIDragSession)
 
 //---------------------------------------------------------
 NS_IMETHODIMP
 nsBaseDragService::SetCanDrop(bool aCanDrop)
 {
   mCanDrop = aCanDrop;
@@ -556,18 +554,17 @@ nsBaseDragService::DrawDrag(nsIDOMNode* 
     return NS_ERROR_FAILURE;
 
   *aPresContext = presShell->GetPresContext();
 
   nsCOMPtr<nsIFrameLoaderOwner> flo = do_QueryInterface(dragNode);
   if (flo) {
     RefPtr<nsFrameLoader> fl = flo->GetFrameLoader();
     if (fl) {
-      mozilla::dom::TabParent* tp =
-        static_cast<mozilla::dom::TabParent*>(fl->GetRemoteBrowser());
+      auto* tp = static_cast<mozilla::dom::TabParent*>(fl->GetRemoteBrowser());
       if (tp && tp->TakeDragVisualization(*aSurface, aScreenDragRect)) {
         if (mImage) {
           // Just clear the surface if chrome has overridden it with an image.
           *aSurface = nullptr;
         }
 
         return NS_OK;
       }
--- a/widget/nsBaseWidget.cpp
+++ b/widget/nsBaseWidget.cpp
@@ -690,17 +690,17 @@ void nsBaseWidget::SetZIndex(int32_t aZI
 {
   // Hold a ref to ourselves just in case, since we're going to remove
   // from our parent.
   nsCOMPtr<nsIWidget> kungFuDeathGrip(this);
 
   mZIndex = aZIndex;
 
   // reorder this child in its parent's list.
-  nsBaseWidget* parent = static_cast<nsBaseWidget*>(GetParent());
+  auto* parent = static_cast<nsBaseWidget*>(GetParent());
   if (parent) {
     parent->RemoveChild(this);
     // Scope sib outside the for loop so we can check it afterward
     nsIWidget* sib = parent->GetFirstChild();
     for ( ; sib; sib = sib->GetNextSibling()) {
       int32_t childZIndex = GetZIndex();
       if (aZIndex < childZIndex) {
         // Insert ourselves before sib
@@ -2026,17 +2026,17 @@ nsIWidget::SynthesizeNativeTouchTap(Layo
   notifier.SkipNotification();  // we'll do it in the long-tap callback
   return NS_OK;
 }
 
 // static
 void
 nsIWidget::OnLongTapTimerCallback(nsITimer* aTimer, void* aClosure)
 {
-  nsIWidget *self = static_cast<nsIWidget *>(aClosure);
+  auto *self = static_cast<nsIWidget *>(aClosure);
 
   if ((self->mLongTapTouchPoint->mStamp + self->mLongTapTouchPoint->mDuration) >
       TimeStamp::Now()) {
 #ifdef XP_WIN
     // Windows needs us to keep pumping feedback to the digitizer, so update
     // the pointer id with the same position.
     self->SynthesizeNativeTouchPoint(self->mLongTapTouchPoint->mPointerId,
                                      TOUCH_CONTACT,
--- a/widget/xremoteclient/XRemoteClient.cpp
+++ b/widget/xremoteclient/XRemoteClient.cpp
@@ -352,17 +352,17 @@ XRemoteClient::GetLock(Window aWindow, b
     if (!locked && !NS_FAILED(rv)) {
       /* We tried to grab the lock this time, and failed because someone
 	 else is holding it already.  So, wait for a PropertyDelete event
 	 to come in, and try again. */
       MOZ_LOG(sRemoteLm, LogLevel::Debug, 
 	     ("window 0x%x is locked by %s; waiting...\n",
 	      (unsigned int) aWindow, data));
       waited = True;
-      while (1) {
+      while (true) {
 	XEvent event;
 	int select_retval;
 	fd_set select_set;
 	struct timeval delay;
 	delay.tv_sec = 10;
 	delay.tv_usec = 0;
 
 	FD_ZERO(&select_set);
@@ -637,24 +637,24 @@ XRemoteClient::DoSendCommandLine(Window 
   for (int i = 0; i < argc; ++i) {
     int32_t len = strlen(argv[i]);
     if (i == 0 && aDesktopStartupID) {
       len += sizeof(desktopStartupPrefix) - 1 + strlen(aDesktopStartupID);
     }
     argvlen += len;
   }
 
-  int32_t* buffer = (int32_t*) malloc(argvlen + argc + 1 +
+  auto* buffer = (int32_t*) malloc(argvlen + argc + 1 +
                                       sizeof(int32_t) * (argc + 1));
   if (!buffer)
     return NS_ERROR_OUT_OF_MEMORY;
 
   buffer[0] = TO_LITTLE_ENDIAN32(argc);
 
-  char *bufend = (char*) (buffer + argc + 1);
+  auto *bufend = (char*) (buffer + argc + 1);
 
   bufend = estrcpy(cwdbuf, bufend);
 
   for (int i = 0; i < argc; ++i) {
     buffer[i + 1] = TO_LITTLE_ENDIAN32(bufend - ((char*) buffer));
     bufend = estrcpy(argv[i], bufend);
     if (i == 0 && aDesktopStartupID) {
       bufend = estrcpy(desktopStartupPrefix, bufend - 1);