Merge inbound to mozilla-central r=merge a=merge
authorDorel Luca <dluca@mozilla.com>
Wed, 29 Nov 2017 12:26:15 +0200
changeset 394139 3f6b9aaed8cd57954e0c960cde06d25228196456
parent 394138 4e80703b91e584e95672b21aa390feb4fc308b1a (current diff)
parent 394048 40b464eb6b31e66294aba0fd13b8a938f25d573d (diff)
child 394140 e565b4d7a9278b51392373d40e01333c8a9b446a
child 394183 650c0ef5cdcdc653eaea78dbfd2ecac5a285944b
child 394233 d0b0f4499790b54db0ecc514d4277583716c21d5
push id32990
push userdluca@mozilla.com
push dateWed, 29 Nov 2017 10:27:31 +0000
treeherdermozilla-central@3f6b9aaed8cd [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersmerge, merge
milestone59.0a1
first release with
nightly linux32
3f6b9aaed8cd / 59.0a1 / 20171129111030 / files
nightly linux64
3f6b9aaed8cd / 59.0a1 / 20171129111030 / files
nightly mac
3f6b9aaed8cd / 59.0a1 / 20171129111030 / files
nightly win32
3f6b9aaed8cd / 59.0a1 / 20171129111030 / files
nightly win64
3f6b9aaed8cd / 59.0a1 / 20171129111030 / files
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
releases
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Merge inbound to mozilla-central r=merge a=merge
devtools/client/preferences/devtools.js
dom/webidl/MediaStreamList.webidl
media/webrtc/signaling/src/peerconnection/MediaPipelineFactory.cpp
media/webrtc/signaling/src/peerconnection/MediaPipelineFactory.h
media/webrtc/signaling/src/peerconnection/MediaStreamList.cpp
media/webrtc/signaling/src/peerconnection/MediaStreamList.h
media/webrtc/signaling/src/peerconnection/PeerConnectionMedia.cpp
testing/mochitest/runtests.py
xpfe/components/autocomplete/jar.mn
xpfe/components/autocomplete/moz.build
xpfe/components/autocomplete/resources/content/autocomplete.css
xpfe/components/autocomplete/resources/content/autocomplete.xml
--- a/accessible/ipc/win/HandlerProvider.cpp
+++ b/accessible/ipc/win/HandlerProvider.cpp
@@ -12,16 +12,17 @@
 #include "AccessibleDocument.h"
 #include "AccessibleTable.h"
 #include "AccessibleTable2.h"
 #include "AccessibleTableCell.h"
 #include "HandlerData.h"
 #include "HandlerData_i.c"
 #include "mozilla/Assertions.h"
 #include "mozilla/a11y/AccessibleWrap.h"
+#include "mozilla/a11y/HandlerDataCleanup.h"
 #include "mozilla/dom/ContentChild.h"
 #include "mozilla/Move.h"
 #include "mozilla/mscom/AgileReference.h"
 #include "mozilla/mscom/FastMarshaler.h"
 #include "mozilla/mscom/Interceptor.h"
 #include "mozilla/mscom/MainThreadHandoff.h"
 #include "mozilla/mscom/MainThreadInvoker.h"
 #include "mozilla/mscom/Ptr.h"
@@ -127,17 +128,18 @@ HandlerProvider::GetAndSerializePayload(
   // AddRef/Release pair for this reference is handled by payloadRef
   payload.mGeckoBackChannel = this;
 
   mSerializer = MakeUnique<mscom::StructToStream>(payload, &IA2Payload_Encode);
 
   // Now that we have serialized payload, we should clean up any
   // BSTRs, interfaces, etc. fetched in BuildInitialIA2Data.
   CleanupStaticIA2Data(payload.mStaticData);
-  CleanupDynamicIA2Data(payload.mDynamicData);
+  // No need to zero memory, since payload is going out of scope.
+  CleanupDynamicIA2Data(payload.mDynamicData, false);
 }
 
 HRESULT
 HandlerProvider::GetHandlerPayloadSize(NotNull<mscom::IInterceptor*> aInterceptor,
                                        NotNull<DWORD*> aOutPayloadSize)
 {
   MOZ_ASSERT(mscom::IsCurrentThreadMTA());
 
@@ -392,48 +394,21 @@ HandlerProvider::BuildDynamicIA2Data(Dyn
   hr = target->get_uniqueID(&aOutIA2Data->mUniqueId);
 }
 
 void
 HandlerProvider::CleanupStaticIA2Data(StaticIA2Data& aData)
 {
   // When CoMarshalInterface writes interfaces out to a stream, it AddRefs.
   // Therefore, we must release our references after this.
-  if (aData.mIA2) {
-    aData.mIA2->Release();
-  }
-  if (aData.mIEnumVARIANT) {
-    aData.mIEnumVARIANT->Release();
-  }
-  if (aData.mIAHypertext) {
-    aData.mIAHypertext->Release();
-  }
-  if (aData.mIAHyperlink) {
-    aData.mIAHyperlink->Release();
-  }
-  if (aData.mIATable) {
-    aData.mIATable->Release();
-  }
-  if (aData.mIATable2) {
-    aData.mIATable2->Release();
-  }
-  if (aData.mIATableCell) {
-    aData.mIATableCell->Release();
-  }
+  ReleaseStaticIA2DataInterfaces(aData);
   ZeroMemory(&aData, sizeof(StaticIA2Data));
 }
 
 void
-HandlerProvider::CleanupDynamicIA2Data(DynamicIA2Data& aData)
-{
-  ::VariantClear(&aData.mRole);
-  ZeroMemory(&aData, sizeof(DynamicIA2Data));
-}
-
-void
 HandlerProvider::BuildInitialIA2Data(
   NotNull<mscom::IInterceptor*> aInterceptor,
   StaticIA2Data* aOutStaticData,
   DynamicIA2Data* aOutDynamicData)
 {
   BuildStaticIA2Data(aInterceptor, aOutStaticData);
   if (!aOutStaticData->mIA2) {
     return;
--- a/accessible/ipc/win/HandlerProvider.h
+++ b/accessible/ipc/win/HandlerProvider.h
@@ -70,17 +70,16 @@ private:
                               NotNull<mscom::IInterceptor*> aInterceptor);
   void BuildStaticIA2Data(NotNull<mscom::IInterceptor*> aInterceptor,
                           StaticIA2Data* aOutData);
   void BuildDynamicIA2Data(DynamicIA2Data* aOutIA2Data);
   void BuildInitialIA2Data(NotNull<mscom::IInterceptor*> aInterceptor,
                            StaticIA2Data* aOutStaticData,
                            DynamicIA2Data* aOutDynamicData);
   static void CleanupStaticIA2Data(StaticIA2Data& aData);
-  static void CleanupDynamicIA2Data(DynamicIA2Data& aData);
   bool IsTargetInterfaceCacheable();
   // Replace a raw object from the main thread with a wrapped, intercepted
   // object suitable for calling from the MTA.
   // The reference to the original object is adopted; i.e. you should not
   // separately release it.
   // This is intended for objects returned from method calls on the main thread.
   template<typename Interface> HRESULT ToWrappedObject(Interface** aObj);
   void GetAllTextInfoMainThread(BSTR* aText,
--- a/accessible/ipc/win/handler/AccessibleHandler.cpp
+++ b/accessible/ipc/win/handler/AccessibleHandler.cpp
@@ -11,16 +11,17 @@
 #define INITGUID
 
 #include "AccessibleHandler.h"
 #include "AccessibleHandlerControl.h"
 
 #include "Factory.h"
 #include "HandlerData.h"
 #include "mozilla/ArrayUtils.h"
+#include "mozilla/a11y/HandlerDataCleanup.h"
 #include "mozilla/mscom/Registration.h"
 #include "mozilla/UniquePtr.h"
 
 #include <objbase.h>
 #include <uiautomation.h>
 #include <winreg.h>
 
 #include "AccessibleHypertext.h"
@@ -87,16 +88,18 @@ AccessibleHandler::AccessibleHandler(IUn
     return;
   }
 
   mCacheGen = ctl->GetCacheGen();
 }
 
 AccessibleHandler::~AccessibleHandler()
 {
+  // No need to zero memory, since we're being destroyed anyway.
+  CleanupDynamicIA2Data(mCachedData.mDynamicData, false);
   if (mCachedData.mGeckoBackChannel) {
     mCachedData.mGeckoBackChannel->Release();
   }
   ClearTextCache();
 }
 
 HRESULT
 AccessibleHandler::ResolveIA2()
@@ -388,48 +391,31 @@ AccessibleHandler::ReadHandlerPayload(IS
   // interfaces are available. Therefore, deserialize into a temporary struct
   // and update mCachedData only after deserialization completes.
   // The decoding functions can misbehave if their target memory is not zeroed
   // beforehand, so ensure we do that.
   IA2Payload newData{};
   if (!deserializer.Read(&newData, &IA2Payload_Decode)) {
     return E_FAIL;
   }
+  // Clean up the old data.
+  // No need to zero memory, since we're about to completely replace this.
+  CleanupDynamicIA2Data(mCachedData.mDynamicData, false);
   mCachedData = newData;
 
   // These interfaces have been aggregated into the proxy manager.
   // The proxy manager will resolve these interfaces now on QI,
   // so we can release these pointers.
   // However, we don't null them out because we use their presence
   // to determine whether the interface is available
   // so as to avoid pointless cross-proc QI calls returning E_NOINTERFACE.
   // Note that if pointers to other objects (in contrast to
   // interfaces of *this* object) are added in future, we should not release
   // those pointers.
-  if (mCachedData.mStaticData.mIA2) {
-    mCachedData.mStaticData.mIA2->Release();
-  }
-  if (mCachedData.mStaticData.mIEnumVARIANT) {
-    mCachedData.mStaticData.mIEnumVARIANT->Release();
-  }
-  if (mCachedData.mStaticData.mIAHypertext) {
-    mCachedData.mStaticData.mIAHypertext->Release();
-  }
-  if (mCachedData.mStaticData.mIAHyperlink) {
-    mCachedData.mStaticData.mIAHyperlink->Release();
-  }
-  if (mCachedData.mStaticData.mIATable) {
-    mCachedData.mStaticData.mIATable->Release();
-  }
-  if (mCachedData.mStaticData.mIATable2) {
-    mCachedData.mStaticData.mIATable2->Release();
-  }
-  if (mCachedData.mStaticData.mIATableCell) {
-    mCachedData.mStaticData.mIATableCell->Release();
-  }
+  ReleaseStaticIA2DataInterfaces(mCachedData.mStaticData);
 
   if (!mCachedData.mGeckoBackChannel) {
     return S_OK;
   }
 
   RefPtr<AccessibleHandlerControl> ctl(gControlFactory.GetOrCreateSingleton());
   if (!ctl) {
     return E_OUTOFMEMORY;
new file mode 100644
--- /dev/null
+++ b/accessible/ipc/win/handler/HandlerDataCleanup.h
@@ -0,0 +1,85 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef mozilla_a11y_HandlerDataCleanup_h
+#define mozilla_a11y_HandlerDataCleanup_h
+
+#include <OleAuto.h>
+#include "HandlerData.h"
+
+namespace mozilla {
+namespace a11y {
+
+inline void
+ReleaseStaticIA2DataInterfaces(StaticIA2Data& aData)
+{
+  // Only interfaces of the proxied object wrapped by this handler should be
+  // released here, never other objects!
+  // For example, if StaticIA2Data were to include accParent in future,
+  // that must not be released here.
+  if (aData.mIA2) {
+    aData.mIA2->Release();
+  }
+  if (aData.mIEnumVARIANT) {
+    aData.mIEnumVARIANT->Release();
+  }
+  if (aData.mIAHypertext) {
+    aData.mIAHypertext->Release();
+  }
+  if (aData.mIAHyperlink) {
+    aData.mIAHyperlink->Release();
+  }
+  if (aData.mIATable) {
+    aData.mIATable->Release();
+  }
+  if (aData.mIATable2) {
+    aData.mIATable2->Release();
+  }
+  if (aData.mIATableCell) {
+    aData.mIATableCell->Release();
+  }
+}
+
+inline void
+CleanupDynamicIA2Data(DynamicIA2Data& aData, bool aZero=true)
+{
+  ::VariantClear(&aData.mRole);
+  if (aData.mKeyboardShortcut) {
+    ::SysFreeString(aData.mKeyboardShortcut);
+  }
+  if (aData.mName) {
+    ::SysFreeString(aData.mName);
+  }
+  if (aData.mDescription) {
+    ::SysFreeString(aData.mDescription);
+  }
+  if (aData.mDefaultAction) {
+    ::SysFreeString(aData.mDefaultAction);
+  }
+  if (aData.mValue) {
+    ::SysFreeString(aData.mValue);
+  }
+  if (aData.mAttributes) {
+    ::SysFreeString(aData.mAttributes);
+  }
+  if (aData.mIA2Locale.language)  {
+    ::SysFreeString(aData.mIA2Locale.language);
+  }
+  if (aData.mIA2Locale.country)  {
+    ::SysFreeString(aData.mIA2Locale.country);
+  }
+  if (aData.mIA2Locale.variant)  {
+    ::SysFreeString(aData.mIA2Locale.variant);
+  }
+  if (aZero) {
+    ZeroMemory(&aData, sizeof(DynamicIA2Data));
+  }
+}
+
+} // namespace a11y
+} // namespace mozilla
+
+#endif // mozilla_a11y_HandlerDataCleanup_h
--- a/accessible/ipc/win/handler/moz.build
+++ b/accessible/ipc/win/handler/moz.build
@@ -1,17 +1,20 @@
 # -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
 # vim: set filetype=python:
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 SharedLibrary('AccessibleHandler')
 
-EXPORTS.mozilla.a11y += ['AccessibleHandler.h']
+EXPORTS.mozilla.a11y += [
+    'AccessibleHandler.h',
+    'HandlerDataCleanup.h',
+]
 
 LOCAL_INCLUDES += [
     '/accessible/interfaces/ia2',
     '/ipc/mscom/oop',
 ]
 
 SOURCES += [
     '!dlldata.c',
--- a/accessible/tests/mochitest/tree/test_combobox.xul
+++ b/accessible/tests/mochitest/tree/test_combobox.xul
@@ -118,53 +118,16 @@
           {
             // xul:menupopup
             role: ROLE_COMBOBOX_LIST, // context menu popup
             children: []
           }
         ]
       };
 
-      // XPFE and Toolkit autocomplete widgets differ.
-      var ac1h = document.getElementById("autocomplete");
-      if ("clearResults" in ac1h) {
-        SimpleTest.ok(true, "Testing (Old) XPFE autocomplete widget. (ac1h)");
-
-        // Popup is always created.
-        accTree.children.push(
-          {
-            // xul:panel
-            role: ROLE_COMBOBOX_LIST,
-            children: [
-              {
-                // xul:tree
-                role: ROLE_TABLE,
-                children: [
-                  {
-                    // xul:treecols
-                    role: ROLE_LIST,
-                    children: [
-                      {
-                        // xul:treecol
-                        role: ROLE_COLUMNHEADER,
-                        children: []
-                      }
-                    ]
-                  }
-                ]
-              }
-            ]
-          }
-          );
-      } else {
-        SimpleTest.ok(true, "Testing (New) Toolkit autocomplete widget. (ac1h)");
-
-        // Popup is lazily created, so not present in this case.
-      }
-
       testAccessibleTree("autocomplete", accTree);
 
       //////////////////////////////////////////////////////////////////////////
       // textbox@type=autocomplete #2 (child menupoup)
 
       accTree = {
         // textbox
         role: ROLE_AUTOCOMPLETE,
@@ -190,53 +153,16 @@
           {
             // xul:menupopup
             role: ROLE_COMBOBOX_LIST, // context menu popup
             children: []
           }
         ]
       };
 
-      // XPFE and Toolkit autocomplete widgets differ.
-      var ac2cmp = document.getElementById("autocomplete2");
-      if ("clearResults" in ac2cmp) {
-        SimpleTest.ok(true, "Testing (Old) XPFE autocomplete widget. (ac2mp)");
-
-        // Popup is always created.
-        accTree.children.push(
-          {
-            // xul:panel
-            role: ROLE_COMBOBOX_LIST,
-            children: [
-              {
-                // xul:tree
-                role: ROLE_TABLE,
-                children: [
-                  {
-                    // xul:treecols
-                    role: ROLE_LIST,
-                    children: [
-                      {
-                        // xul:treecol
-                        role: ROLE_COLUMNHEADER,
-                        children: []
-                      }
-                    ]
-                  }
-                ]
-              }
-            ]
-          }
-          );
-      } else {
-        SimpleTest.ok(true, "Testing (New) Toolkit autocomplete widget. (ac2mp)");
-
-        // Popup is lazily created, so not present in this case.
-      }
-
       testAccessibleTree("autocomplete2", accTree);
 
       SimpleTest.finish()
     }
 
     SimpleTest.waitForExplicitFinish();
     addA11yLoadEvent(doTest);
   ]]>
--- a/accessible/tests/mochitest/tree/test_txtctrl.xul
+++ b/accessible/tests/mochitest/tree/test_txtctrl.xul
@@ -120,75 +120,40 @@
         ]
       };
 
       function test_AutocompleteControl() {
         testAccessibleTree("txc_autocomplete", accTree);
         SimpleTest.finish();
       }
 
-      // XPFE and Toolkit autocomplete widgets differ.
       var txc = document.getElementById("txc_autocomplete");
-      if ("clearResults" in txc) {
-        SimpleTest.ok(true, "Testing (Old) XPFE autocomplete widget.");
+      SimpleTest.ok(txc, "Testing (New) Toolkit autocomplete widget.");
 
-        // Popup is always created. (See code below.)
+      // Dumb access to trigger popup lazy creation.
+      dump("Trigget popup lazy creation");
+      waitForEvent(EVENT_REORDER, txc, test_AutocompleteControl);
+      txc.popup;
 
-        accTree.children.push(
-          {
-            // xul:panel
-            role: ROLE_COMBOBOX_LIST,
-            children: [
-              {
-                // xul:tree
-                role: ROLE_TABLE,
-                children: [
-                  {
-                    // xul:treecols
-                    role: ROLE_LIST,
-                    children: [
-                      {
-                        // xul:treecol
-                        role: ROLE_COLUMNHEADER,
-                        children: []
-                      }
-                    ]
-                  }
-                ]
-              }
-            ]
-          }
-        );
-        test_AutocompleteControl();
-
-      } else {
-        SimpleTest.ok(true, "Testing (New) Toolkit autocomplete widget.");
-
-        // Dumb access to trigger popup lazy creation.
-        dump("Trigget popup lazy creation");
-        waitForEvent(EVENT_REORDER, txc, test_AutocompleteControl);
-        txc.popup;
-
-        accTree.children.push(
-          {
-            role: ROLE_LIST,
-            children: [
-              {
-                role: ROLE_LIST,
-                children: [
-                  {
-                    role: ROLE_COLUMNHEADER,
-                    children: []
-                  }
-                ]
-              }
-            ]
-          }
-        );
-      }
+      accTree.children.push(
+        {
+          role: ROLE_LIST,
+          children: [
+            {
+              role: ROLE_LIST,
+              children: [
+                {
+                  role: ROLE_COLUMNHEADER,
+                  children: []
+                }
+              ]
+            }
+          ]
+        }
+      );
     }
 
     SimpleTest.waitForExplicitFinish();
     addA11yLoadEvent(doTest);
   ]]>
   </script>
 
   <hbox flex="1" style="overflow: auto;">
--- a/browser/base/content/browser-pageActions.js
+++ b/browser/base/content/browser-pageActions.js
@@ -867,18 +867,18 @@ var BrowserPageActionFeedback = {
     return this.feedbackAnimationBox = document.getElementById("pageActionFeedbackAnimatableBox");
   },
 
   get feedbackLabel() {
     delete this.feedbackLabel;
     return this.feedbackLabel = document.getElementById("pageActionFeedbackMessage");
   },
 
-  show(action, event) {
-    this.feedbackLabel.textContent = this.panelNode.getAttribute(action.id + "Feedback");
+  show(action, event, textContentOverride) {
+    this.feedbackLabel.textContent = this.panelNode.getAttribute((textContentOverride || action.id) + "Feedback");
     this.panelNode.hidden = false;
 
     let anchor = BrowserPageActions.panelAnchorNodeForAction(action, event);
     this.panelNode.openPopup(anchor, {
       position: "bottomcenter topright",
       triggerEvent: event,
     });
 
@@ -989,17 +989,18 @@ BrowserPageActions.sendToDevice = {
       item.addEventListener("command", event => {
         if (panelNode) {
           panelNode.hidePopup();
         }
         // There are items in the subview that don't represent devices: "Sign
         // in", "Learn about Sync", etc.  Device items will be .sendtab-target.
         if (event.target.classList.contains("sendtab-target")) {
           let action = PageActions.actionForID("sendToDevice");
-          BrowserPageActionFeedback.show(action, event);
+          let textOverride = gSync.offline && "sendToDeviceOffline";
+          BrowserPageActionFeedback.show(action, event, textOverride);
         }
       });
       return item;
     });
 
     bodyNode.removeAttribute("state");
     // In the first ~10 sec after startup, Sync may not be loaded and the list
     // of devices will be empty.
--- a/browser/base/content/browser-sync.js
+++ b/browser/base/content/browser-sync.js
@@ -60,16 +60,20 @@ var gSync = {
     return UIState.get().status == UIState.STATUS_SIGNED_IN;
   },
 
   get remoteClients() {
     return Weave.Service.clientsEngine.remoteClients
            .sort((a, b) => a.name.localeCompare(b.name));
   },
 
+  get offline() {
+    return Weave.Service.scheduler.offline;
+  },
+
   _generateNodeGetters() {
     for (let k of ["Status", "Avatar", "Label", "Container"]) {
       let prop = "appMenu" + k;
       let suffix = k.toLowerCase();
       delete this[prop];
       this.__defineGetter__(prop, function() {
         delete this[prop];
         return this[prop] = document.getElementById("appMenu-fxa-" + suffix);
--- a/browser/base/content/browser.xul
+++ b/browser/base/content/browser.xul
@@ -431,17 +431,18 @@
            role="alert"
            type="arrow"
            hidden="true"
            flip="slide"
            position="bottomcenter topright"
            tabspecific="true"
            noautofocus="true"
            copyURLFeedback="&copyURLFeedback.label;"
-           sendToDeviceFeedback="&sendToDeviceFeedback.label;">
+           sendToDeviceFeedback="&sendToDeviceFeedback.label;"
+           sendToDeviceOfflineFeedback="&sendToDeviceOfflineFeedback.label;">
       <hbox id="pageActionFeedbackAnimatableBox">
         <image id="pageActionFeedbackAnimatableImage"/>
       </hbox>
       <label id="pageActionFeedbackMessage"/>
     </panel>
 
     <menupopup id="pageActionContextMenu"
                onpopupshowing="BrowserPageActions.onContextMenuShowing(event, this);">
--- a/browser/components/customizableui/content/panelUI.inc.xul
+++ b/browser/components/customizableui/content/panelUI.inc.xul
@@ -429,17 +429,17 @@
                     showMoreTooltipText="&appMenuRemoteTabs.showMore.tooltip;"
                     notabsforclientlabel="&appMenuRemoteTabs.notabs.label;"
                     />
             </vbox>
             <!-- Sync is ready to Sync but the "tabs" engine isn't enabled-->
             <hbox id="PanelUI-remotetabs-tabsdisabledpane" pack="center" flex="1">
               <vbox class="PanelUI-remotetabs-instruction-box" align="center">
                 <hbox pack="center">
-                  <image class="fxaSyncIllustration"/>
+                  <image class="fxaSyncIllustrationIssue"/>
                 </hbox>
                 <label class="PanelUI-remotetabs-instruction-label">&appMenuRemoteTabs.tabsnotsyncing.label;</label>
                 <hbox pack="center">
                   <toolbarbutton class="PanelUI-remotetabs-prefs-button"
                                  label="&appMenuRemoteTabs.openprefs.label;"
                                  oncommand="gSync.openPrefs('synced-tabs');"/>
                 </hbox>
               </vbox>
@@ -447,19 +447,18 @@
             <!-- Sync is ready to Sync but we are still fetching the tabs to show -->
             <vbox id="PanelUI-remotetabs-fetching">
               <!-- Show intentionally blank panel, see bug 1239845 -->
             </vbox>
             <!-- Sync has only 1 (ie, this) device connected -->
             <hbox id="PanelUI-remotetabs-nodevicespane" pack="center" flex="1">
               <vbox class="PanelUI-remotetabs-instruction-box">
                 <hbox pack="center">
-                  <image class="fxaSyncIllustration"/>
+                  <image class="fxaSyncIllustrationIssue"/>
                 </hbox>
-                <label class="PanelUI-remotetabs-instruction-title">&appMenuRemoteTabs.noclients.title;</label>
                 <label class="PanelUI-remotetabs-instruction-label">&appMenuRemoteTabs.noclients.subtitle;</label>
                 <!-- The inner HTML for PanelUI-remotetabs-mobile-promo is built at runtime -->
                 <label id="PanelUI-remotetabs-mobile-promo" fxAccountsBrand="&syncBrand.fxAccount.label;"/>
               </vbox>
             </hbox>
           </deck>
         </vbox>
         <!-- a box to ensure contained boxes are centered horizonally -->
@@ -479,17 +478,17 @@
           <!-- When Sync needs re-authentication. This uses the exact same messaging
                as "Sync is not configured" but remains a separate box so we get
                the goodness of observing broadcasters to manage the hidden states -->
           <vbox id="PanelUI-remotetabs-reauthsync"
                 flex="1"
                 align="center"
                 class="PanelUI-remotetabs-instruction-box"
                 observes="sync-reauth-state">
-            <image class="fxaSyncIllustration"/>
+            <image class="fxaSyncIllustrationIssue"/>
             <label class="PanelUI-remotetabs-instruction-label">&appMenuRemoteTabs.notsignedin.label;</label>
             <toolbarbutton class="PanelUI-remotetabs-prefs-button"
                            label="&appMenuRemoteTabs.signin.label;"
                            oncommand="gSync.openPrefs('synced-tabs');"/>
           </vbox>
         </hbox>
       </vbox>
     </panelview>
--- a/browser/components/syncedtabs/SyncedTabsDeckComponent.js
+++ b/browser/components/syncedtabs/SyncedTabsDeckComponent.js
@@ -111,23 +111,23 @@ SyncedTabsDeckComponent.prototype = {
         break;
       default:
         break;
     }
   },
 
   // There's no good way to mock fxAccounts in browser tests where it's already
   // been instantiated, so we have this method for stubbing.
-  _accountStatus() {
-    return this._fxAccounts.accountStatus();
+  _getSignedInUser() {
+    return this._fxAccounts.getSignedInUser();
   },
 
   getPanelStatus() {
-    return this._accountStatus().then(exists => {
-      if (!exists || this._SyncedTabs.loginFailed) {
+    return this._getSignedInUser().then(user => {
+      if (!user || !user.verified || this._SyncedTabs.loginFailed) {
         return this.PANELS.NOT_AUTHED_INFO;
       }
       if (!this._SyncedTabs.isConfiguredToSyncTabs) {
         return this.PANELS.TABS_DISABLED;
       }
       if (!this._SyncedTabs.hasSyncedThisSession) {
         return this.PANELS.TABS_FETCHING;
       }
--- a/browser/components/syncedtabs/sidebar.xhtml
+++ b/browser/components/syncedtabs/sidebar.xhtml
@@ -76,23 +76,24 @@
           <!-- Show intentionally blank panel, see bug 1239845 -->
         </div>
         <div class="notAuthedInfo sync-state">
           <div class="syncIllustration"></div>
           <p class="instructions">&syncedTabs.sidebar.notsignedin.label;</p>
           <button class="button sync-prefs">&fxaSignIn.label;</button>
         </div>
         <div class="singleDeviceInfo sync-state">
-          <p>&syncedTabs.sidebar.noclients.title;</p>
-          <p>&syncedTabs.sidebar.noclients.subtitle;</p>
-          <p class="device-promo" fxAccountsBrand="&syncBrand.fxAccount.label;"></p>
+          <div class="syncIllustrationIssue"></div>
+          <p class="instructions">&syncedTabs.sidebar.noclients.subtitle;</p>
+          <p class="instructions device-promo" fxAccountsBrand="&syncBrand.fxAccount.label;"></p>
         </div>
         <div class="tabs-disabled sync-state">
-          <p>&syncedTabs.sidebar.tabsnotsyncing.label;</p>
-          <p><a href="#" class="sync-prefs text-link">&syncedTabs.sidebar.openprefs.label;</a></p>
+          <div class="syncIllustrationIssue"></div>
+          <p class="instructions">&syncedTabs.sidebar.tabsnotsyncing.label;</p>
+          <button class="button sync-prefs">&syncedTabs.sidebar.openprefs.label;</button>
         </div>
       </div>
     </template>
 
     <div class="content-container">
       <!-- the non-scrollable header -->
       <div class="content-header">
         <div class="sidebar-search-container tabs-container sync-state">
--- a/browser/components/syncedtabs/test/browser/browser_sidebar_syncedtabslist.js
+++ b/browser/components/syncedtabs/test/browser/browser_sidebar_syncedtabslist.js
@@ -62,17 +62,17 @@ const FIXTURE = [
   }
 ];
 
 let originalSyncedTabsInternal = null;
 
 async function testClean() {
   let syncedTabsDeckComponent = window.SidebarUI.browser.contentWindow.syncedTabsDeckComponent;
   let SyncedTabs = window.SidebarUI.browser.contentWindow.SyncedTabs;
-  syncedTabsDeckComponent._accountStatus.restore();
+  syncedTabsDeckComponent._getSignedInUser.restore();
   SyncedTabs._internal.getTabClients.restore();
   SyncedTabs._internal = originalSyncedTabsInternal;
 
   await new Promise(resolve => {
     window.SidebarUI.browser.contentWindow.addEventListener("unload", function() {
       resolve();
     }, {once: true});
     SidebarUI.hide();
@@ -92,17 +92,17 @@ add_task(async function testSyncedTabsSi
   originalSyncedTabsInternal = SyncedTabs._internal;
   SyncedTabs._internal = {
     isConfiguredToSyncTabs: true,
     hasSyncedThisSession: true,
     getTabClients() { return Promise.resolve([]); },
     syncTabs() { return Promise.resolve(); },
   };
 
-  sinon.stub(syncedTabsDeckComponent, "_accountStatus", () => Promise.resolve(true));
+  sinon.stub(syncedTabsDeckComponent, "_getSignedInUser", () => Promise.resolve({verified: true}));
   sinon.stub(SyncedTabs._internal, "getTabClients", () => Promise.resolve(Cu.cloneInto(FIXTURE, {})));
 
   await syncedTabsDeckComponent.updatePanel();
   // This is a hacky way of waiting for the view to render. The view renders
   // after the following promise (a different instance of which is triggered
   // in updatePanel) resolves, so we wait for it here as well
   await syncedTabsDeckComponent.tabListComponent._store.getData();
 
@@ -145,17 +145,17 @@ add_task(async function testSyncedTabsSi
   originalSyncedTabsInternal = SyncedTabs._internal;
   SyncedTabs._internal = {
     isConfiguredToSyncTabs: true,
     hasSyncedThisSession: true,
     getTabClients() { return Promise.resolve([]); },
     syncTabs() { return Promise.resolve(); },
   };
 
-  sinon.stub(syncedTabsDeckComponent, "_accountStatus", () => Promise.resolve(true));
+  sinon.stub(syncedTabsDeckComponent, "_getSignedInUser", () => Promise.resolve({verified: true}));
   sinon.stub(SyncedTabs._internal, "getTabClients", () => Promise.resolve(Cu.cloneInto(FIXTURE, {})));
 
   await syncedTabsDeckComponent.updatePanel();
   // This is a hacky way of waiting for the view to render. The view renders
   // after the following promise (a different instance of which is triggered
   // in updatePanel) resolves, so we wait for it here as well
   await syncedTabsDeckComponent.tabListComponent._store.getData();
 
@@ -190,17 +190,17 @@ add_task(async function testSyncedTabsSi
   Array.prototype.forEach.call(selectedPanel.querySelectorAll(".tab"), (tabNode, i) => {
     checkItem(tabNode, FIXTURE_TABS[i]);
   });
 });
 
 add_task(testClean);
 
 add_task(async function testSyncedTabsSidebarStatus() {
-  let accountExists = false;
+  let account = null;
 
   await SidebarUI.show("viewTabsSidebar");
   let syncedTabsDeckComponent = window.SidebarUI.browser.contentWindow.syncedTabsDeckComponent;
   let SyncedTabs = window.SidebarUI.browser.contentWindow.SyncedTabs;
 
   originalSyncedTabsInternal = SyncedTabs._internal;
   SyncedTabs._internal = {
     isConfiguredToSyncTabs: false,
@@ -209,31 +209,31 @@ add_task(async function testSyncedTabsSi
     syncTabs() { return Promise.resolve(); },
   };
 
   Assert.ok(syncedTabsDeckComponent, "component exists");
 
   sinon.spy(syncedTabsDeckComponent, "updatePanel");
   sinon.spy(syncedTabsDeckComponent, "observe");
 
-  sinon.stub(syncedTabsDeckComponent, "_accountStatus", () => Promise.reject("Test error"));
+  sinon.stub(syncedTabsDeckComponent, "_getSignedInUser", () => Promise.reject("Test error"));
   await syncedTabsDeckComponent.updatePanel();
 
   let selectedPanel = syncedTabsDeckComponent.container.querySelector(".sync-state.selected");
   Assert.ok(selectedPanel.classList.contains("notAuthedInfo"),
     "not-authed panel is selected on auth error");
 
-  syncedTabsDeckComponent._accountStatus.restore();
-  sinon.stub(syncedTabsDeckComponent, "_accountStatus", () => Promise.resolve(accountExists));
+  syncedTabsDeckComponent._getSignedInUser.restore();
+  sinon.stub(syncedTabsDeckComponent, "_getSignedInUser", () => Promise.resolve(account));
   await syncedTabsDeckComponent.updatePanel();
   selectedPanel = syncedTabsDeckComponent.container.querySelector(".sync-state.selected");
   Assert.ok(selectedPanel.classList.contains("notAuthedInfo"),
     "not-authed panel is selected");
 
-  accountExists = true;
+  account = {verified: true};
   await syncedTabsDeckComponent.updatePanel();
   selectedPanel = syncedTabsDeckComponent.container.querySelector(".sync-state.selected");
   Assert.ok(selectedPanel.classList.contains("tabs-disabled"),
     "tabs disabled panel is selected");
 
   SyncedTabs._internal.isConfiguredToSyncTabs = true;
   await syncedTabsDeckComponent.updatePanel();
   selectedPanel = syncedTabsDeckComponent.container.querySelector(".sync-state.selected");
@@ -267,17 +267,17 @@ add_task(async function testSyncedTabsSi
   originalSyncedTabsInternal = SyncedTabs._internal;
   SyncedTabs._internal = {
     isConfiguredToSyncTabs: true,
     hasSyncedThisSession: true,
     getTabClients() { return Promise.resolve([]); },
     syncTabs() { return Promise.resolve(); },
   };
 
-  sinon.stub(syncedTabsDeckComponent, "_accountStatus", () => Promise.resolve(true));
+  sinon.stub(syncedTabsDeckComponent, "_getSignedInUser", () => Promise.resolve({verified: true}));
   sinon.stub(SyncedTabs._internal, "getTabClients", () => Promise.resolve(Cu.cloneInto(FIXTURE, {})));
 
   await syncedTabsDeckComponent.updatePanel();
   // This is a hacky way of waiting for the view to render. The view renders
   // after the following promise (a different instance of which is triggered
   // in updatePanel) resolves, so we wait for it here as well
   await syncedTabsDeckComponent.tabListComponent._store.getData();
 
--- a/browser/components/syncedtabs/test/xpcshell/test_SyncedTabsDeckComponent.js
+++ b/browser/components/syncedtabs/test/xpcshell/test_SyncedTabsDeckComponent.js
@@ -132,38 +132,43 @@ add_task(async function testObserver() {
   Assert.equal(component.updatePanel.callCount, 4, "triggers panel update again");
 });
 
 add_task(async function testPanelStatus() {
   let deckStore = new SyncedTabsDeckStore();
   let listStore = new SyncedTabsListStore();
   let listComponent = {};
   let fxAccounts = {
-    accountStatus() {}
+    getSignedInUser() {}
   };
   let SyncedTabsMock = {
     getTabClients() {}
   };
 
   sinon.stub(listStore, "getData");
 
 
   let component = new SyncedTabsDeckComponent({
     fxAccounts,
     deckStore,
     listComponent,
     SyncedTabs: SyncedTabsMock
   });
 
-  let isAuthed = false;
-  sinon.stub(fxAccounts, "accountStatus", () => Promise.resolve(isAuthed));
+  let account = null;
+  sinon.stub(fxAccounts, "getSignedInUser", () => Promise.resolve(account));
   let result = await component.getPanelStatus();
   Assert.equal(result, component.PANELS.NOT_AUTHED_INFO);
 
-  isAuthed = true;
+  account = {verified: false};
+
+  result = await component.getPanelStatus();
+  Assert.equal(result, component.PANELS.NOT_AUTHED_INFO);
+
+  account = {verified: true};
 
   SyncedTabsMock.loginFailed = true;
   result = await component.getPanelStatus();
   Assert.equal(result, component.PANELS.NOT_AUTHED_INFO);
   SyncedTabsMock.loginFailed = false;
 
   SyncedTabsMock.isConfiguredToSyncTabs = false;
   result = await component.getPanelStatus();
@@ -181,18 +186,18 @@ add_task(async function testPanelStatus(
   sinon.stub(SyncedTabsMock, "getTabClients", () => Promise.resolve(clients));
   result = await component.getPanelStatus();
   Assert.equal(result, component.PANELS.SINGLE_DEVICE_INFO);
 
   clients = ["mock-client"];
   result = await component.getPanelStatus();
   Assert.equal(result, component.PANELS.TABS_CONTAINER);
 
-  fxAccounts.accountStatus.restore();
-  sinon.stub(fxAccounts, "accountStatus", () => Promise.reject("err"));
+  fxAccounts.getSignedInUser.restore();
+  sinon.stub(fxAccounts, "getSignedInUser", () => Promise.reject("err"));
   result = await component.getPanelStatus();
   Assert.equal(result, component.PANELS.NOT_AUTHED_INFO);
 
   sinon.stub(component, "getPanelStatus", () => Promise.resolve("mock-panelId"));
   sinon.spy(deckStore, "selectPanel");
   await component.updatePanel();
   Assert.ok(deckStore.selectPanel.calledWith("mock-panelId"));
 });
--- a/browser/installer/package-manifest.in
+++ b/browser/installer/package-manifest.in
@@ -63,19 +63,16 @@
 [xpcom]
 @RESPATH@/dependentlibs.list
 #ifdef MOZ_SHARED_MOZGLUE
 @BINPATH@/@DLL_PREFIX@mozglue@DLL_SUFFIX@
 #endif
 #ifndef MOZ_STATIC_JS
 @BINPATH@/@DLL_PREFIX@mozjs@DLL_SUFFIX@
 #endif
-#ifdef MOZ_DMD
-@BINPATH@/@DLL_PREFIX@dmd@DLL_SUFFIX@
-#endif
 #ifndef MOZ_SYSTEM_NSPR
 #ifndef MOZ_FOLD_LIBS
 @BINPATH@/@DLL_PREFIX@nspr4@DLL_SUFFIX@
 @BINPATH@/@DLL_PREFIX@plc4@DLL_SUFFIX@
 @BINPATH@/@DLL_PREFIX@plds4@DLL_SUFFIX@
 #endif
 #endif
 #ifdef XP_MACOSX
--- a/browser/locales/en-US/chrome/browser/browser.dtd
+++ b/browser/locales/en-US/chrome/browser/browser.dtd
@@ -46,16 +46,17 @@ can reach it easily. -->
 <!ENTITY  unpinTab.accesskey                 "b">
 <!ENTITY  sendTabToDevice.label              "Send Tab to Device">
 <!ENTITY  sendTabToDevice.accesskey          "n">
 <!ENTITY  sendPageToDevice.label             "Send Page to Device">
 <!ENTITY  sendPageToDevice.accesskey         "n">
 <!ENTITY  sendLinkToDevice.label             "Send Link to Device">
 <!ENTITY  sendLinkToDevice.accesskey         "n">
 <!ENTITY  sendToDeviceFeedback.label         "Sent!">
+<!ENTITY  sendToDeviceOfflineFeedback.label  "Queued (offline)">
 <!ENTITY  moveToNewWindow.label              "Move to New Window">
 <!ENTITY  moveToNewWindow.accesskey          "W">
 <!ENTITY  bookmarkAllTabs.label              "Bookmark All Tabs…">
 <!ENTITY  bookmarkAllTabs.accesskey          "T">
 <!ENTITY  undoCloseTab.label                 "Undo Close Tab">
 <!ENTITY  undoCloseTab.accesskey             "U">
 <!ENTITY  closeTab.label                     "Close Tab">
 <!ENTITY  closeTab.accesskey                 "c">
@@ -372,17 +373,16 @@ These should match what Safari and other
 <!ENTITY appMenuRemoteTabs.showAll.label "Show All">
 <!ENTITY appMenuRemoteTabs.showAll.tooltip "Show all tabs from this device">
 <!-- LOCALIZATION NOTE (appMenuRemoteTabs.tabsnotsyncing.label): This is shown
      when Sync is configured but syncing tabs is disabled. -->
 <!ENTITY appMenuRemoteTabs.tabsnotsyncing.label "Turn on tab syncing to view a list of tabs from your other devices.">
 <!-- LOCALIZATION NOTE (appMenuRemoteTabs.noclients.label): This is shown
      when Sync is configured but this appears to be the only device attached to
      the account. We also show links to download Firefox for android/ios. -->
-<!ENTITY appMenuRemoteTabs.noclients.title "No synced tabs… yet!">
 <!ENTITY appMenuRemoteTabs.noclients.subtitle "Want to see your tabs from other devices here?">
 <!ENTITY appMenuRemoteTabs.openprefs.label "Sync Preferences">
 <!ENTITY appMenuRemoteTabs.notsignedin.label "Sign in to view a list of tabs from your other devices.">
 <!ENTITY appMenuRemoteTabs.signin.label "Sign in to Sync">
 <!ENTITY appMenuRemoteTabs.managedevices.label "Manage Devices…">
 <!ENTITY appMenuRemoteTabs.sidebar.label "View Synced Tabs Sidebar">
 
 <!ENTITY appMenuRecentHighlights.label "Recent Highlights">
@@ -786,17 +786,16 @@ you can use these alternative items. Oth
      The word "toolbar" is appended automatically and should not be contained below! -->
 <!ENTITY tabsToolbar.label "Browser tabs">
 
 <!-- LOCALIZATION NOTE (syncTabsMenu3.label): This appears in the history menu -->
 <!ENTITY syncTabsMenu3.label     "Synced Tabs">
 
 <!ENTITY syncedTabs.sidebar.label              "Synced Tabs">
 <!ENTITY syncedTabs.sidebar.noclients.label    "Sign in to Firefox from your other devices to view their tabs here.">
-<!ENTITY syncedTabs.sidebar.noclients.title    "No synced tabs… yet!">
 <!ENTITY syncedTabs.sidebar.noclients.subtitle "Want to see your tabs from other devices here?">
 <!ENTITY syncedTabs.sidebar.notsignedin.label  "Sign in to view a list of tabs from your other devices.">
 <!ENTITY syncedTabs.sidebar.notabs.label       "No open tabs">
 <!ENTITY syncedTabs.sidebar.openprefs.label    "Open &syncBrand.shortName.label; Preferences">
 <!-- LOCALIZATION NOTE (syncedTabs.sidebar.tabsnotsyncing.label): This is shown
      when Sync is configured but syncing tabs is disabled. -->
 <!ENTITY syncedTabs.sidebar.tabsnotsyncing.label       "Turn on tab syncing to view a list of tabs from your other devices.">
 <!ENTITY syncedTabs.sidebar.searchPlaceholder  "Search synced tabs">
--- a/browser/modules/test/browser/browser_BrowserUITelemetry_syncedtabs.js
+++ b/browser/modules/test/browser/browser_BrowserUITelemetry_syncedtabs.js
@@ -87,17 +87,17 @@ add_task(async function test_menu() {
 add_task(async function test_sidebar() {
   // Reset BrowserUITelemetry's world.
   BUIT._countableEvents = {};
 
   await SidebarUI.show("viewTabsSidebar");
 
   let syncedTabsDeckComponent = SidebarUI.browser.contentWindow.syncedTabsDeckComponent;
 
-  syncedTabsDeckComponent._accountStatus = () => Promise.resolve(true);
+  syncedTabsDeckComponent._getSignedInUser = () => Promise.resolve({verified: true});
 
   // Once the tabs container has been selected (which here means "'selected'
   // added to the class list") we are ready to test.
   let container = SidebarUI.browser.contentDocument.querySelector(".tabs-container");
   let promiseUpdated = BrowserTestUtils.waitForAttribute("class", container);
 
   await syncedTabsDeckComponent.updatePanel();
   await promiseUpdated;
--- a/browser/themes/shared/customizableui/panelUI.inc.css
+++ b/browser/themes/shared/customizableui/panelUI.inc.css
@@ -682,24 +682,32 @@ toolbarbutton[constrain-size="true"][cui
   /* This margin is to line this label up with the labels in toolbarbuttons. */
   margin-left: 28px;
 }
 
 #PanelUI-remotetabs[mainview] .PanelUI-remotetabs-notabsforclient-label {
   margin-left: 32px;
 }
 
-.fxaSyncIllustration {
+.fxaSyncIllustration,
+.fxaSyncIllustrationIssue {
   width: 180px;
   height: var(--panel-ui-sync-illustration-height);
-  list-style-image: url(chrome://browser/skin/fxa/sync-illustration.svg);
   -moz-context-properties: fill;
   fill: #cdcdcd;
 }
 
+.fxaSyncIllustration {
+  list-style-image: url(chrome://browser/skin/fxa/sync-illustration.svg);
+}
+
+.fxaSyncIllustrationIssue {
+  list-style-image: url(chrome://browser/skin/fxa/sync-illustration-issue.svg);
+}
+
 .PanelUI-remotetabs-prefs-button > .toolbarbutton-text {
   /* !important to override ".cui-widget-panel toolbarbutton > .toolbarbutton-text" above. */
   text-align: center !important;
   text-shadow: none;
 }
 
 #PanelUI-remotetabs[mainview] { /* panel anchored to toolbar button might be too skinny */
   min-width: 19em;
new file mode 100644
--- /dev/null
+++ b/browser/themes/shared/fxa/sync-illustration-issue.svg
@@ -0,0 +1,63 @@
+<!-- This Source Code Form is subject to the terms of the Mozilla Public
+   - License, v. 2.0. If a copy of the MPL was not distributed with this
+   - file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
+<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 173.9 156.5">
+  <style>
+    .st0{opacity:0.1;fill:#0C0C0D;enable-background:new ;} .st1{fill:#FFFFFF;} .st2{fill:url(#SVGID_1_);} .st3{fill:#F9F9FA;} .st4{fill:url(#SVGID_2_);} .st5{fill:url(#SVGID_3_);} .st6{fill:url(#SVGID_4_);} .st7{fill:url(#SVGID_5_);} .st8{fill:url(#SVGID_6_);} .st9{fill:url(#SVGID_7_);}
+  </style>
+  <path class="st0" d="M140.9 152h-69c-.6 0-1-.4-1-1s.4-1 1-1H141c.6 0 1 .4 1 1s-.5 1-1.1 1zm-9.3-5.1h-12c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h12c.3 0 .5.2.5.5s-.2.5-.5.5zm-15.7 9.6h-12c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h12c.3 0 .5.2.5.5s-.2.5-.5.5zm-20 0h-3c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h3c.3 0 .5.2.5.5s-.2.5-.5.5zm-7 0h-1c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h1c.3 0 .5.2.5.5s-.2.5-.5.5zm-10 0h-12c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h12c.3 0 .5.2.5.5s-.2.5-.5.5zm-20 0h-3c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h3c.3 0 .5.2.5.5s-.2.5-.5.5zm-7 0h-1c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h1c.3 0 .5.2.5.5s-.2.5-.5.5zm-10 0h-12c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h12c.3 0 .5.2.5.5s-.2.5-.5.5zm-20 0h-3c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h3c.3 0 .5.2.5.5s-.2.5-.5.5zm-7 0h-1c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h1c.3 0 .5.2.5.5s-.2.5-.5.5z"/>
+  <path class="st1" d="M85 20.4h21.3s-6.7-14.9 7.5-16.8c12.6-1.7 17.6 11.3 17.6 11.3s1.5-7.5 9-6.1 12.9 13.3 12.9 13.3h18.6"/>
+  <path class="st0" d="M172.2 18.6h-4c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h4c.3 0 .5.2.5.5s-.2.5-.5.5zm-13 0h-1c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h1c.3 0 .5.2.5.5s-.2.5-.5.5zm-5 0h-.8c-.1-.1-.2-.1-.2-.2-.1-.2-.5-1-1.2-2.1-.1-.2-.1-.5.2-.7.2-.1.5-.1.7.2.5.8.9 1.5 1.1 1.9h.2c.3 0 .5.2.5.5s-.2.4-.5.4zm-47.5-.6h-1.3c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h.6c-.1-.2-.2-.6-.3-.9-.1-.3.1-.6.3-.7.3-.1.6.1.7.3.3.9.6 1.5.6 1.5.1.3 0 .5-.3.7-.1.1-.2.1-.3.1zm-9.3 0h-12c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h12c.3 0 .5.2.5.5s-.3.5-.5.5zm7.8-5.5c-.3 0-.5-.2-.5-.4 0-.3-.1-.7-.1-1s.2-.5.5-.5.5.2.5.5 0 .6.1 1c.1.2-.1.4-.5.4.1.1.1.1 0 0zm26.2-1c-.2 0-.4-.1-.4-.3-.1-.2-.3-.5-.4-.9-.1-.2 0-.5.2-.7.2-.1.5 0 .7.2.2.3.3.6.5.9.1.2 0 .5-.2.7-.3.1-.3.1-.4.1zm16.1-1.3c-.1 0-.3 0-.4-.1-1.7-1.8-4-3.1-6.4-3.7-1.3-.3-2.6-.2-3.9.2-.3.1-.5-.1-.6-.3-.1-.3.1-.5.3-.6 1.4-.4 2.9-.5 4.4-.2 2.6.6 5.1 2 6.9 4 .2.2.2.5 0 .7-.1-.1-.2 0-.3 0zm-18.8-3c-.2 0-.3-.1-.4-.2-.6-.8-1.3-1.5-2-2.1-.2-.2-.1-.5.1-.7.2-.1.4-.1.6 0 .8.7 1.5 1.4 2.1 2.2.2.2.1.5-.1.7 0 .1-.2.1-.3.1zm-20.5-3.8c-.3 0-.5-.2-.5-.5 0-.2.1-.3.2-.4 1.8-1.3 4-2.2 6.2-2.4 1.9-.3 3.8-.2 5.7.2.3.1.5.3.4.6s-.3.5-.6.4c-1.7-.4-3.5-.4-5.3-.2-2.1.2-4.1.9-5.7 2.2-.2.1-.3.1-.4.1z"/>
+  <path class="st1" d="M172.9 22.4H85c-.6 0-1-.4-1-1s.4-1 1-1h87.9c.6 0 1 .4 1 1s-.5 1-1 1zM.8 37.7h11.9s-3.7-8.3 4.2-9.4c7-1 9.8 6.3 9.8 6.3s.8-4.2 5-3.4 7.2 7.4 7.2 7.4h10.3"/>
+  <path class="st0" d="M13 36.4H1.1c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h11.5c.2-.2.5-.2.7 0l.1.1v.1c.1.3 0 .5-.3.7 0 .1-.1.1-.1.1zm32.9-.2h-3c-.3 0-.5-.2-.5-.5s.2-.5.5-.5h3c.3 0 .5.2.5.5s-.2.5-.5.5zM27 33h-.1c-.3-.1-.4-.4-.3-.6.5-2 2.3-3.5 4.4-3.5.4 0 .7 0 1.1.1 1.9.5 3.6 1.5 4.9 3 .2.2.2.5 0 .7s-.5.2-.7 0c-1.1-1.3-2.6-2.3-4.3-2.7-.3-.1-.6-.1-.9-.1-1.7 0-3.1 1.2-3.4 2.8-.3.2-.5.3-.7.3zm-13.6-4.3c-.3 0-.5-.2-.5-.5 0-.1.1-.3.1-.4.8-.8 1.8-1.3 2.8-1.6.3-.1.6.1.6.4s-.1.6-.4.6c-.9.2-1.7.7-2.4 1.3.1.2 0 .2-.2.2zm7.5-1.3h-.1c-.3-.1-.6-.2-.9-.2-.3-.1-.5-.3-.4-.6.1-.3.3-.5.6-.4.3.1.7.2 1 .3.3 0 .5.3.4.6 0 .1-.3.3-.6.3z"/>
+  <path class="st1" d="M49.9 39.7H1c-.6 0-1-.4-1-1s.4-1 1-1h48.8c.6 0 1 .4 1 1s-.4 1-.9 1zm85.5 37.5h-15.3V60.3c0-4.2-3.4-7.5-7.6-7.5H51.1c-4.2 0-7.5 3.4-7.5 7.5V101c0 1.3.4 2.6 1 3.7-.4.5-.8 1-1 1.6l-6.9 16.1c-.3.7-.5 1.5-.5 2.3v1.1c.1 3.4 2.8 6.1 6.2 6h60v3.2c0 4.1 3.3 7.4 7.4 7.4h25.6c4.1 0 7.4-3.3 7.4-7.4V84.7c.1-4.2-3.2-7.5-7.4-7.5z"/>
+  <path class="st1" d="M50.8 56.5h61.4c2 0 3.6 1.6 3.6 3.5v40.7c0 2-1.6 3.6-3.6 3.6H50.8c-2 0-3.5-1.6-3.5-3.6V60.1c0-2 1.6-3.6 3.5-3.6z"/>
+  <path class="st1" d="M52.7 62.5h57.7c1.2 0 2.1.9 2.1 2.1V99c0 1.2-.9 2.1-2.1 2.1H52.7c-1.2 0-2.1-.9-2.1-2.1V64.6c0-1.1 1-2.1 2.1-2.1z"/>
+  <linearGradient id="SVGID_1_" gradientUnits="userSpaceOnUse" x1="55.4468" y1="665.5432" x2="128.2768" y2="738.3832" gradientTransform="translate(.02 -609.83)">
+    <stop offset="0" stop-color="#CCFBFF"/>
+    <stop offset="1" stop-color="#C9E4FF"/>
+  </linearGradient>
+  <path class="st2" d="M110.4 63.5c.6 0 1.1.5 1.1 1.1V99c0 .6-.5 1.1-1.1 1.1H52.7c-.6 0-1.1-.5-1.1-1.1V64.6c0-.6.5-1.1 1.1-1.1h57.7"/>
+  <path class="st3" d="M115.7 107.6c-.4-.8-1.2-1.3-2.1-1.2H49c-.9 0-1.7.5-2.1 1.3L40 123.8c-.1.2-.1.5-.1.7v1.1c.1 1.2 1 2.1 2.2 2H121c1.2.1 2.2-.8 2.2-2v-1c0-.3-.1-.5-.2-.7l-7.3-16.3z"/>
+  <linearGradient id="SVGID_2_" gradientUnits="userSpaceOnUse" x1="-4.5021" y1="627.6644" x2="182.4979" y2="797.1644" gradientTransform="translate(.02 -609.83)">
+    <stop offset="0" stop-color="#00C8D7"/>
+    <stop offset="1" stop-color="#0A84FF"/>
+  </linearGradient>
+  <path class="st4" d="M124.9 122.9l-7.3-16.1c-.3-.8-.9-1.4-1.6-1.8 1.2-1.1 1.9-2.6 1.9-4.2V60.1c0-3.1-2.5-5.5-5.6-5.5H50.9c-3.1 0-5.5 2.5-5.5 5.5v40.7c0 1.5.6 3 1.7 4-.9.4-1.6 1.1-2 2L38.2 123c-.2.5-.3 1-.3 1.5v1.1c.1 2.3 1.9 4.1 4.2 4H121c2.3.1 4.2-1.7 4.2-4v-1c0-.6-.1-1.2-.3-1.7zm-1.7 2.6c-.1 1.2-1 2.1-2.2 2H42.1c-1.2.1-2.2-.8-2.2-2v-1.1c0-.2 0-.5.1-.7l6.9-16.1c.4-.8 1.2-1.3 2.1-1.3h64.7c.9 0 1.7.5 2.1 1.2l7.3 16.1c.1.2.2.5.2.7l-.1 1.2zm-75.9-24.7V60.1c0-2 1.6-3.5 3.5-3.5h61.4c2 0 3.6 1.6 3.6 3.5v40.7c0 2-1.6 3.6-3.5 3.6H50.9c-2 0-3.6-1.6-3.6-3.6z"/>
+  <linearGradient id="SVGID_3_" gradientUnits="userSpaceOnUse" x1="-51.3994" y1="590.94" x2="201.6006" y2="840.94" gradientTransform="translate(.02 -609.83)">
+    <stop offset="0" stop-color="#00C8D7"/>
+    <stop offset="1" stop-color="#0A84FF"/>
+  </linearGradient>
+  <path class="st5" d="M94.7 121.8H68.4c-.4 0-.8-.2-.8-.5.3-1.7.5-2.6.8-4.4 0-.2.3-.4.7-.4l25.3-.2c.4 0 .7.2.7.4.2 1.5.4 3 .4 4.5.1.4-.3.6-.8.6zM64 112.4h-3.9c-.3 0-.6.2-.7.4-.1.5-.2.7-.3 1.2s.4.7.9.7h3.8c.3 0 .6-.1.7-.3.1-.5.2-.7.3-1.2s-.3-.8-.8-.8zm9.9 0h-4.1c-.4 0-.7.2-.7.4-.1.5-.1.7-.2 1.2-.1.3.4.6.9.6h3.9c.3 0 .6-.1.7-.4.1-.5.2-.7.3-1.2.1-.4-.3-.7-.8-.6zm20.5 1.4c-.1-.5-.1-.7-.2-1.2 0-.2-.3-.4-.7-.4h-4.1c-.5 0-.9.3-.9.6l.3 1.2c0 .2.3.4.7.4h3.9c.6 0 1.1-.3 1-.6zm-9.9.1l-.2-1.2c0-.2-.4-.4-.8-.4h-3.7c-.4 0-.8.2-.8.4-.1.5-.2.7-.2 1.2-.1.3.3.6.8.6h4.1c.4-.1.8-.3.8-.6zm19.6-.2l-.2-1.2c0-.2-.3-.3-.6-.3h-3.9c-.5 0-1 .3-.9.7l.3 1.2c.1.2.3.3.7.3h3.8c.4-.1.9-.4.8-.7zm-39.1-5h-3.7c-.3 0-.5.1-.6.3-.1.4-.2.7-.3 1.1s.3.6.8.6h3.6c.3 0 .5-.1.7-.3.2-.4.2-.7.4-1.1s-.4-.6-.9-.6zm9.4 0h-3.9c-.4 0-.7.2-.7.4-.1.4-.2.7-.3 1.1s.3.6.8.6h3.8c.3 0 .6-.1.7-.4.1-.4.2-.7.3-1.1s-.3-.6-.7-.6zm19.6 1.4l-.2-1.1c0-.2-.3-.4-.7-.4h-3.9c-.5 0-.9.3-.8.6l.2 1.1c0 .2.3.4.7.4h3.8c.5-.1.9-.3.9-.6zm-9.5.1l-.2-1.1c0-.2-.3-.4-.7-.4H80c-.4 0-.7.2-.8.4-.1.4-.2.7-.3 1.1-.1.3.3.5.8.5h3.9c.5 0 .9-.3.9-.5zm18.7-.1l-.2-1.1c0-.2-.3-.3-.6-.3h-3.7c-.5 0-1 .3-.9.6l.3 1.1c.1.2.3.3.6.3h3.6c.5-.1.9-.4.9-.6zm-48.1 2.4h-3.8c-.3 0-.6.1-.7.4-.2.4-.3.8-.4 1.2-.1.3.4.7.9.7h3.7c.3 0 .6-.2.6-.4.1-.4.2-.8.4-1.2.2-.5-.2-.8-.7-.7zm1.3-3.7h-3.5c-.2 0-.5.1-.6.3-.1.4-.2.7-.4 1.1-.1.3.2.6.7.6h3.5c.3 0 .5-.1.7-.3.2-.4.3-.7.5-1.1s-.4-.7-.9-.6zm50.9 4.1c.1.4.3.8.3 1.1 0 .2.3.3.6.3h3.7c.5 0 1-.3.9-.6-.1-.4-.2-.8-.3-1.1-.1-.2-.4-.4-.7-.3h-3.7c-.5-.1-.9.2-.8.6zm-1.1-3.7c.1.4.2.7.4 1.1.1.2.4.3.6.3h3.4c.5 0 .8-.3.8-.6-.1-.4-.2-.7-.3-1.1 0-.2-.2-.3-.6-.3H107c-.4 0-.9.3-.8.6z"/>
+  <g>
+    <linearGradient id="SVGID_4_" gradientUnits="userSpaceOnUse" x1="-22.6206" y1="563.3309" x2="225.3794" y2="813.3309" gradientTransform="translate(0 -610)">
+      <stop offset="0" stop-color="#00C8D7"/>
+      <stop offset="1" stop-color="#0A84FF"/>
+    </linearGradient>
+    <circle class="st6" cx="82.6" cy="59.4" r="1.2"/>
+  </g>
+  <path class="st1" d="M109.6 80h25.6c2.5 0 4.4 2 4.4 4.4v50.3c0 2.5-2 4.4-4.4 4.4h-25.6c-2.5 0-4.4-2-4.4-4.4V84.4c-.1-2.4 1.9-4.4 4.4-4.4z"/>
+  <linearGradient id="SVGID_5_" gradientUnits="userSpaceOnUse" x1="-19.2553" y1="590.7758" x2="221.2447" y2="809.2758" gradientTransform="translate(.02 -609.83)">
+    <stop offset="0" stop-color="#00C8D7"/>
+    <stop offset="1" stop-color="#0A84FF"/>
+  </linearGradient>
+  <path class="st7" d="M135.1 81c1.9 0 3.4 1.5 3.4 3.4v50.3c0 1.9-1.5 3.4-3.4 3.4h-25.6c-1.9 0-3.4-1.5-3.4-3.4V84.4c0-1.9 1.5-3.4 3.4-3.4h25.6m0-2h-25.6c-3 0-5.4 2.4-5.4 5.4v50.3c0 3 2.4 5.4 5.4 5.4h25.6c3 0 5.4-2.4 5.4-5.4V84.4c.1-3-2.4-5.4-5.4-5.4z"/>
+  <g>
+    <path class="st1" d="M111.1 84.8h22.4c.9 0 1.7.8 1.7 1.7v41.9c0 .9-.8 1.7-1.7 1.7h-22.4c-.9 0-1.7-.8-1.7-1.7V86.5c0-.9.8-1.7 1.7-1.7z"/>
+    <linearGradient id="SVGID_6_" gradientUnits="userSpaceOnUse" x1="62.995" y1="657.995" x2="135.835" y2="730.835" gradientTransform="translate(.02 -609.83)">
+      <stop offset="0" stop-color="#CCFBFF"/>
+      <stop offset="1" stop-color="#C9E4FF"/>
+    </linearGradient>
+    <path class="st8" d="M133.5 85.8c.4 0 .7.3.7.7v41.9c0 .4-.3.7-.7.7h-22.4c-.4 0-.7-.3-.7-.7V86.5c0-.4.3-.7.7-.7h22.4"/>
+  </g>
+  <linearGradient id="SVGID_7_" gradientUnits="userSpaceOnUse" x1="-73.41" y1="701.6741" x2="262.92" y2="701.6741" gradientTransform="translate(.02 -609.83)">
+    <stop offset="0" stop-color="#00C8D7"/>
+    <stop offset="1" stop-color="#0A84FF"/>
+  </linearGradient>
+  <path class="st9" d="M82.9 97.8c-.6 0-1.1-.2-1.6-.6l-15-12.1c-1.5-1-2.7-2.2-3.7-3.7-3.3-5.1-2.6-11.7 1.7-16 5-5 13-5 17.9 0 .1.1.3.2.5.2s.4-.1.5-.2c5.1-4.8 13.1-4.6 18 .5s4.6 13.1-.5 18c-.5.4-1 .9-1.5 1.2L84.4 97.3c-.4.3-1 .5-1.5.5zm41 23.7l11-9c4.4-3 5.6-9 2.7-13.4-3-4.4-9-5.6-13.4-2.7-.5.3-1 .7-1.4 1.2 0 0-.1.1-.2.1s-.1 0-.2-.1c-3.8-3.8-9.9-3.8-13.7 0-3.2 3.2-3.8 8.3-1.3 12.2.7 1.1 1.7 2.1 2.8 2.8l11.1 9c.7.6 1.8.6 2.6-.1z"/>
+  <path class="st1" d="M73.3 62.8c3.1 0 6.1 1.2 8.3 3.4.3.3.7.5 1.2.5.4 0 .9-.2 1.2-.5 4.6-4.5 12-4.5 16.6.1 4.5 4.6 4.5 12-.1 16.6-.5.5-1.1 1-1.7 1.4l-15 12.2c-.3.2-.6.3-.9.3s-.7-.1-.9-.3L67 84.3c-1.4-.9-2.5-2-3.4-3.4-3-4.6-2.4-10.7 1.5-14.7 2.1-2.1 5.1-3.4 8.2-3.4m0-2c-3.6 0-7.1 1.4-9.7 4-4.6 4.6-5.3 11.8-1.8 17.2 1 1.6 2.3 2.9 3.9 3.9l15 12.1c1.3 1 3.1 1 4.3 0l14.9-12.1c6.3-4.2 8-12.7 3.8-19s-12.7-8-19-3.8c-.7.5-1.3 1-1.9 1.5-2.6-2.4-6-3.8-9.5-3.8z"/>
+  <path class="st3" d="M66.3 76.2h-.2c-1.1-.1-1.9-1-1.8-2.1.3-3.8 3.1-7 6.8-7.8 1-.4 2.2 0 2.6 1s0 2.2-1 2.6c-.2.1-.5.2-.8.2-2.1.5-3.6 2.2-3.8 4.3 0 1-.8 1.7-1.8 1.8z"/>
+  <path class="st1" d="M115.4 95.8c2.3 0 4.5.9 6.1 2.6.2.2.5.4.9.4.3 0 .6-.1.9-.4 3.4-3.4 8.9-3.4 12.3 0 3.4 3.4 3.4 8.9 0 12.3-.4.4-.8.8-1.3 1.1l-11.1 9c-.2.2-.4.2-.7.2-.2 0-.5-.1-.7-.2l-11.1-9c-1-.7-1.9-1.5-2.5-2.5-2.2-3.4-1.7-8 1.1-10.9 1.6-1.7 3.8-2.6 6.1-2.6m0-2c-2.8 0-5.5 1.1-7.5 3.1-3.6 3.6-4.1 9.2-1.4 13.4.8 1.2 1.8 2.2 3 3l11 8.9c1.1.9 2.7.9 3.9 0l11-9c4.9-3.3 6.1-10 2.8-14.8-3.3-4.9-10-6.1-14.8-2.8-.3.2-.7.5-1 .7-2-1.6-4.5-2.6-7-2.5z"/>
+  <path class="st3" d="M110.3 105.7c-.9-.1-1.5-.8-1.4-1.6.2-2.8 2.2-5.2 5-5.8.8-.2 1.6.3 1.8 1.1.2.8-.3 1.6-1.1 1.8h-.1c-1.5.3-2.7 1.6-2.8 3.2-.1.7-.7 1.3-1.4 1.3z"/>
+  <path class="st1" d="M82.7 98.2c-.7 0-1.2-.6-1.2-1.2v-6.2c0-.3.1-.6.4-.9l1.7-1.7-4-4c-.5-.5-.5-1.3 0-1.8l6.3-6.3-3.9-3.9c-.2-.2-.4-.6-.4-.9v-4.6c0-.7.6-1.2 1.2-1.2s1.2.6 1.2 1.2v4.1l4.4 4.4c.5.5.5 1.3 0 1.8l-6.3 6.3 4 4c.5.5.5 1.3 0 1.8L84 91.2v5.7c0 .7-.6 1.3-1.3 1.3zm39.7 23.8c-.7 0-1.2-.6-1.2-1.2V116c0-.3.1-.6.4-.9l1.1-1.1-2.9-2.9c-.2-.2-.4-.6-.4-.9s.1-.6.4-.9l4.6-4.6-2.7-2.7c-.2-.2-.4-.6-.4-.9v-3.5c0-.7.6-1.2 1.2-1.2s1.2.6 1.2 1.2v3l3.2 3.2c.2.2.4.6.4.9s-.1.6-.4.9l-4.6 4.6 2.9 2.9c.5.5.5 1.3 0 1.8l-1.6 1.6v4.2c.1.8-.5 1.3-1.2 1.3z"/>
+</svg>
--- a/browser/themes/shared/jar.inc.mn
+++ b/browser/themes/shared/jar.inc.mn
@@ -103,16 +103,17 @@
   skin/classic/browser/preferences/in-content/search.svg       (../shared/incontentprefs/search.svg)
   skin/classic/browser/preferences/in-content/siteDataSettings.css (../shared/incontentprefs/siteDataSettings.css)
   skin/classic/browser/preferences/in-content/sync-devices.svg (../shared/incontentprefs/sync-devices.svg)
   skin/classic/browser/preferences/in-content/sync.svg         (../shared/incontentprefs/sync.svg)
 * skin/classic/browser/preferences/in-content/containers.css   (../shared/incontentprefs/containers.css)
 * skin/classic/browser/preferences/containers.css              (../shared/preferences/containers.css)
   skin/classic/browser/fxa/default-avatar.svg                  (../shared/fxa/default-avatar.svg)
   skin/classic/browser/fxa/sync-illustration.svg               (../shared/fxa/sync-illustration.svg)
+  skin/classic/browser/fxa/sync-illustration-issue.svg         (../shared/fxa/sync-illustration-issue.svg)
 
 
   skin/classic/browser/accessibility.svg              (../shared/icons/accessibility.svg)
   skin/classic/browser/accessibility-active.svg       (../shared/icons/accessibility-active.svg)
   skin/classic/browser/arrow-left.svg                 (../shared/icons/arrow-left.svg)
   skin/classic/browser/back.svg                       (../shared/icons/back.svg)
   skin/classic/browser/back-12.svg                    (../shared/icons/back-12.svg)
   skin/classic/browser/bookmark.svg                   (../shared/icons/bookmark.svg)
--- a/browser/themes/shared/syncedtabs/sidebar.inc.css
+++ b/browser/themes/shared/syncedtabs/sidebar.inc.css
@@ -208,24 +208,32 @@ body {
   border-top: 0px;
 }
 
 .deck .sync-state.selected {
   display: unset;
   opacity: 100;
 }
 
-.deck .syncIllustration {
+.deck .syncIllustration,
+.deck .syncIllustrationIssue {
   height: 150px;
   margin-top: 20px;
-  background-image: url(chrome://browser/skin/fxa/sync-illustration.svg);
   background-position: center;
   background-repeat: no-repeat;
 }
 
+.deck .syncIllustration {
+  background-image: url(chrome://browser/skin/fxa/sync-illustration.svg);
+}
+
+.deck .syncIllustrationIssue {
+  background-image: url(chrome://browser/skin/fxa/sync-illustration-issue.svg);
+}
+
 .deck .instructions {
   text-align: center;
   padding: 0 11px;
   max-width: 15em;
   margin: 1em auto;
 }
 
 .deck .button {
--- a/browser/tools/mozscreenshots/browser_boundingbox.js
+++ b/browser/tools/mozscreenshots/browser_boundingbox.js
@@ -3,17 +3,17 @@
  */
 
 "use strict";
 
 add_task(async function() {
   const scale = window.QueryInterface(Ci.nsIInterfaceRequestor)
                       .getInterface(Ci.nsIDocShell).QueryInterface(Ci.nsIBaseWindow)
                       .devicePixelsPerDesktopPixel;
-  let rect = TestRunner._findBoundingBox(["#tabbrowser-tabs"]);
+  let {bounds, rects} = TestRunner._findBoundingBox(["#tabbrowser-tabs"]);
   let element = document.querySelector("#tabbrowser-tabs");
   let tabBar = element.ownerDocument.getBoxObjectFor(element);
 
   // Calculate expected values
   let expectedLeft = scale * (tabBar.screenX - TestRunner.croppingPadding);
   let expectedTop = scale * (tabBar.screenY - TestRunner.croppingPadding);
   let expectedRight = scale * (tabBar.width + TestRunner.croppingPadding * 2) + expectedLeft;
   let expectedBottom = scale * (tabBar.height + TestRunner.croppingPadding * 2) + expectedTop;
@@ -25,23 +25,34 @@ add_task(async function() {
   let windowBottom = window.outerHeight * scale + windowTop;
 
   // Adjust values based on browser window
   expectedLeft = Math.max(expectedLeft, windowLeft);
   expectedTop = Math.max(expectedTop, windowTop);
   expectedRight = Math.min(expectedRight, windowRight);
   expectedBottom = Math.min(expectedBottom, windowBottom);
   // Check width calculation on simple example
-  is(rect.width, expectedRight - expectedLeft,
+  is(bounds.width, expectedRight - expectedLeft,
      "Checking _findBoundingBox width calculation");
   // Check height calculation on simple example
-  is(rect.height, expectedBottom - expectedTop,
+  is(bounds.height, expectedBottom - expectedTop,
      "Checking _findBoundingBox height caclulation");
+  is(bounds.left, rects[0].left,
+    "Checking _findBoundingBox union.left and rect.left is the same for a single selector");
+  is(bounds.right, rects[0].right,
+    "Checking _findBoundingBox union.right and rect.right is the same for a single selector");
+  is(bounds.top, rects[0].top,
+    "Checking _findBoundingBox union.top and rect.top is the same for a single selector");
+  is(bounds.bottom, rects[0].bottom,
+    "Checking _findBoundingBox union.bottom and rect.bottom is the same for a single selector");
 
-  rect = TestRunner._findBoundingBox(["#forward-button", "#TabsToolbar"]);
+  let result = TestRunner._findBoundingBox(["#forward-button", "#TabsToolbar"]);
+  bounds = result.bounds;
+  rects = result.rects;
+
   element = document.querySelector("#TabsToolbar");
   let tabToolbar = element.ownerDocument.getBoxObjectFor(element);
   element = document.querySelector("#forward-button");
   let fButton = element.ownerDocument.getBoxObjectFor(element);
 
   // Calculate expected values
   expectedLeft = scale * (Math.min(tabToolbar.screenX, fButton.screenX)
                               - TestRunner.croppingPadding);
@@ -56,25 +67,36 @@ add_task(async function() {
 
   // Adjust values based on browser window
   expectedLeft = Math.max(expectedLeft, windowLeft);
   expectedTop = Math.max(expectedTop, windowTop);
   expectedRight = Math.min(expectedRight, windowRight);
   expectedBottom = Math.min(expectedBottom, windowBottom);
 
   // Check width calculation on union
-  is(rect.width, expectedRight - expectedLeft,
+  is(bounds.width, expectedRight - expectedLeft,
      "Checking _findBoundingBox union width calculation");
   // Check height calculation on union
-  is(rect.height, expectedBottom - expectedTop,
+  is(bounds.height, expectedBottom - expectedTop,
      "Checking _findBoundingBox union height calculation");
+  // Check single selector's left position
+  is(rects[0].left, Math.max(scale * (fButton.screenX - TestRunner.croppingPadding), windowLeft),
+    "Checking single selector's left position when _findBoundingBox has multiple selectors");
+  // Check single selector's right position
+  is(rects[0].right, Math.min(scale * (fButton.width + fButton.screenX + TestRunner.croppingPadding), windowRight),
+    "Checking single selector's right position when _findBoundingBox has multiple selectors");
+  // Check single selector's top position
+  is(rects[0].top, Math.max(scale * (fButton.screenY - TestRunner.croppingPadding), windowTop),
+    "Checking single selector's top position when _findBoundingBox has multiple selectors");
+  // Check single selector's bottom position
+  is(rects[0].bottom, Math.min(scale * (fButton.height + fButton.screenY + TestRunner.croppingPadding), windowBottom),
+    "Checking single selector's bottom position when _findBoundingBox has multiple selectors");
 
     // Check that nonexistent selectors throws an exception
   Assert.throws(() => {
     TestRunner._findBoundingBox(["#does_not_exist"]);
   }, "No element for '#does_not_exist' found.", "Checking that nonexistent selectors throws an exception");
 
   // Check that no selectors throws an exception
   Assert.throws(() => {
-    rect = TestRunner._findBoundingBox([]);
-
+    TestRunner._findBoundingBox([]);
   }, "No selectors specified.", "Checking that no selectors throws an exception");
 });
--- a/browser/tools/mozscreenshots/browser_screenshots_cropping.js
+++ b/browser/tools/mozscreenshots/browser_screenshots_cropping.js
@@ -41,42 +41,71 @@ async function compareImages(window, exp
 
   is(testCanvas.width, expectedCanvas.width, "The test and expected images must be the same size");
   is(testCanvas.height, expectedCanvas.height, "The test and expected images must be the same size");
 
   const nsIDOMWindowUtils = window.getInterface(Ci.nsIDOMWindowUtils);
   return nsIDOMWindowUtils.compareCanvases(expectedCanvas, testCanvas, {});
 }
 
-async function cropAndCompare(window, src, expected, test, region) {
-  await TestRunner._cropImage(window, src, region, test);
+async function cropAndCompare(window, src, expected, test, region, subregions) {
+  await TestRunner._cropImage(window, src, region, subregions, test);
 
   return compareImages(window, expected, OS.Path.toFileURI(test));
 }
 
 add_task(async function crop() {
   const window = Services.wm.getMostRecentWindow("navigator:browser");
 
   const tmp = OS.Constants.Path.tmpDir;
   is(await cropAndCompare(
       window,
       "chrome://mozscreenshots/content/lib/robot.png",
       "chrome://mozscreenshots/content/lib/robot_upperleft.png",
       OS.Path.join(tmp, "test_cropped_upperleft.png"),
-      new Rect(0, 0, 32, 32)
+      new Rect(0, 0, 32, 32),
+      [new Rect(0, 0, 32, 32)]
   ), 0, "The image should be cropped to the upper left quadrant");
 
   is(await cropAndCompare(
       window,
       "chrome://mozscreenshots/content/lib/robot.png",
       "chrome://mozscreenshots/content/lib/robot_center.png",
       OS.Path.join(tmp, "test_cropped_center.png"),
-      new Rect(16, 16, 32, 32)
+      new Rect(16, 16, 32, 32),
+      [new Rect(16, 16, 32, 32)]
   ), 0, "The image should be cropped to the center of the image");
 
   is(await cropAndCompare(
       window,
       "chrome://mozscreenshots/content/lib/robot.png",
       "chrome://mozscreenshots/content/lib/robot_uncropped.png",
       OS.Path.join(tmp, "test_uncropped.png"),
-      new Rect(-8, -9, 80, 80)
+      new Rect(-8, -9, 80, 80),
+      [new Rect(-8, -9, 80, 80)]
   ), 0, "The image should be not be cropped, and the cropping region should be clipped to the size of the image");
+
+  is(await cropAndCompare(
+      window,
+      "chrome://mozscreenshots/content/lib/robot.png",
+      "chrome://mozscreenshots/content/lib/robot_diagonal.png",
+      OS.Path.join(tmp, "test_diagonal.png"),
+      new Rect(0, 0, 64, 64),
+      [
+        new Rect(0, 0, 16, 16),
+        new Rect(16, 16, 16, 16),
+        new Rect(32, 32, 16, 16),
+        new Rect(48, 48, 16, 16)
+      ]
+  ), 0, "The image should be contain squares across the diagonal");
+
+  is(await cropAndCompare(
+      window,
+      "chrome://mozscreenshots/content/lib/robot.png",
+      "chrome://mozscreenshots/content/lib/robot_cropped_diagonal.png",
+      OS.Path.join(tmp, "test_cropped_diagonal.png"),
+      new Rect(16, 16, 48, 48),
+      [
+        new Rect(16, 16, 16, 16),
+        new Rect(32, 32, 16, 16),
+      ]
+  ), 0, "The image should be cropped with squares across the diagonal");
 });
--- a/browser/tools/mozscreenshots/mozscreenshots/extension/TestRunner.jsm
+++ b/browser/tools/mozscreenshots/mozscreenshots/extension/TestRunner.jsm
@@ -204,58 +204,56 @@ this.TestRunner = {
     // Set window type, default "navigator:browser"
     windowType = windowType || "navigator:browser";
     let browserWindow = Services.wm.getMostRecentWindow(windowType);
     // Scale for high-density displays
     const scale = browserWindow.QueryInterface(Ci.nsIInterfaceRequestor)
                         .getInterface(Ci.nsIDocShell).QueryInterface(Ci.nsIBaseWindow)
                         .devicePixelsPerDesktopPixel;
 
-    let finalRect = undefined;
+    const windowLeft = browserWindow.screenX * scale;
+    const windowTop = browserWindow.screenY * scale;
+    const windowWidth = browserWindow.outerWidth * scale;
+    const windowHeight = browserWindow.outerHeight * scale;
+
+    let bounds;
+    const rects = [];
     // Grab bounding boxes and find the union
     for (let selector of selectors) {
       let element;
       // Check for function to find anonymous content
       if (typeof(selector) == "function") {
         element = selector();
       } else {
         element = browserWindow.document.querySelector(selector);
       }
 
       if (!element) {
         throw `No element for '${selector}' found.`;
       }
 
       // Calculate box region, convert to Rect
       let box = element.ownerDocument.getBoxObjectFor(element);
-      let newRect = new Rect(box.screenX * scale, box.screenY * scale,
+      let rect = new Rect(box.screenX * scale, box.screenY * scale,
                              box.width * scale, box.height * scale);
+      rect.inflateFixed(this.croppingPadding * scale);
+      rect.left = Math.max(rect.left, windowLeft);
+      rect.top = Math.max(rect.top, windowTop);
+      rect.right = Math.min(rect.right, windowLeft + windowWidth);
+      rect.bottom = Math.min(rect.bottom, windowTop + windowHeight);
+      rects.push(rect);
 
-      if (!finalRect) {
-        finalRect = newRect;
+      if (!bounds) {
+        bounds = rect;
       } else {
-        finalRect = finalRect.union(newRect);
+        bounds = bounds.union(rect);
       }
     }
 
-    // Add fixed padding
-    finalRect = finalRect.inflateFixed(this.croppingPadding * scale);
-
-    let windowLeft = browserWindow.screenX * scale;
-    let windowTop = browserWindow.screenY * scale;
-    let windowWidth = browserWindow.outerWidth * scale;
-    let windowHeight = browserWindow.outerHeight * scale;
-
-    // Clip dimensions to window only
-    finalRect.left = Math.max(finalRect.left, windowLeft);
-    finalRect.top = Math.max(finalRect.top, windowTop);
-    finalRect.right = Math.min(finalRect.right, windowLeft + windowWidth);
-    finalRect.bottom = Math.min(finalRect.bottom, windowTop + windowHeight);
-
-    return finalRect;
+    return {bounds, rects};
   },
 
   async _performCombo(combo) {
     let paddedComboIndex = padLeft(this.currentComboIndex + 1, String(this.combos.length).length);
     this.mochitestScope.info(
       `Combination ${paddedComboIndex}/${this.combos.length}: ${this._comboName(combo).substring(1)}`
     );
 
@@ -327,70 +325,87 @@ this.TestRunner = {
         this.mochitestScope.ok(false, "All configurations in the combo have a single window type");
         return;
       }
       for (const selector of obj.selectors) {
         finalSelectors.push(selector);
       }
     }
 
-    const rect = this._findBoundingBox(finalSelectors, windowType);
-    this.mochitestScope.ok(rect, "A valid bounding box was found");
-    if (!rect) {
+    const {bounds, rects} = this._findBoundingBox(finalSelectors, windowType);
+    this.mochitestScope.ok(bounds, "A valid bounding box was found");
+    if (!bounds) {
       return;
     }
-    await this._onConfigurationReady(combo, rect);
+    await this._onConfigurationReady(combo, bounds, rects);
   },
 
-  async _onConfigurationReady(combo, rect) {
+  async _onConfigurationReady(combo, bounds, rects) {
     let filename = padLeft(this.currentComboIndex + 1,
                            String(this.combos.length).length) + this._comboName(combo);
     const imagePath = await Screenshot.captureExternal(filename);
 
     let browserWindow = Services.wm.getMostRecentWindow("navigator:browser");
-    await this._cropImage(browserWindow, OS.Path.toFileURI(imagePath), rect, imagePath).catch((msg) => {
+    await this._cropImage(browserWindow, OS.Path.toFileURI(imagePath), bounds, rects, imagePath).catch((msg) => {
       throw `Cropping combo [${combo.map((e) => e.name).join(", ")}] failed: ${msg}`;
     });
     this.completedCombos++;
     this.mochitestScope.info("_onConfigurationReady");
   },
 
   _comboName(combo) {
     return combo.reduce(function(a, b) {
       return a + "_" + b.name;
     }, "");
   },
 
-  async _cropImage(window, srcPath, rect, targetPath) {
+  async _cropImage(window, srcPath, bounds, rects, targetPath) {
     const { document, Image } = window;
     const promise = new Promise((resolve, reject) => {
       const img = new Image();
-      img.onload = function() {
+      img.onload = () => {
         // Clip the cropping region to the size of the screenshot
         // This is necessary mostly to deal with offscreen windows, since we
         // are capturing an image of the operating system's desktop.
-        rect.left = Math.max(0, rect.left);
-        rect.right = Math.min(img.naturalWidth, rect.right);
-        rect.top = Math.max(0, rect.top);
-        rect.bottom = Math.min(img.naturalHeight, rect.bottom);
+        bounds.left = Math.max(0, bounds.left);
+        bounds.right = Math.min(img.naturalWidth, bounds.right);
+        bounds.top = Math.max(0, bounds.top);
+        bounds.bottom = Math.min(img.naturalHeight, bounds.bottom);
 
         // Create a new offscreen canvas with the width and height given by the
         // size of the region we want to crop to
         const canvas = document.createElementNS("http://www.w3.org/1999/xhtml", "canvas");
-        canvas.width = rect.width;
-        canvas.height = rect.height;
+        canvas.width = bounds.width;
+        canvas.height = bounds.height;
         const ctx = canvas.getContext("2d");
-        // By drawing the image with the negative offset, the unwanted regions
-        // are drawn off canvas, and are not captured when the canvas is saved.
-        ctx.drawImage(img, -rect.x, -rect.y);
+
+        for (const rect of rects) {
+          rect.left = Math.max(0, rect.left);
+          rect.right = Math.min(img.naturalWidth, rect.right);
+          rect.top = Math.max(0, rect.top);
+          rect.bottom = Math.min(img.naturalHeight, rect.bottom);
+
+          const width = rect.width;
+          const height = rect.height;
+
+          const screenX = rect.left;
+          const screenY = rect.top;
+
+          const imageX = screenX - bounds.left;
+          const imageY = screenY - bounds.top;
+          ctx.drawImage(img,
+            screenX, screenY, width, height,
+            imageX, imageY, width, height);
+        }
+
         // Converts the canvas to a binary blob, which can be saved to a png
         canvas.toBlob((blob) => {
           // Use a filereader to convert the raw binary blob into a writable buffer
           const fr = new FileReader();
-          fr.onload = function(e) {
+          fr.onload = (e) => {
             const buffer = new Uint8Array(e.target.result);
             // Save the file and complete the promise
             OS.File.writeAtomic(targetPath, buffer, {}).then(resolve);
           };
           // Do the conversion
           fr.readAsArrayBuffer(blob);
         });
       };
new file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..b19ae9e8f25475faba650131775e9c7cf96872c1
GIT binary patch
literal 1901
zc$@)k2a@=SP)<h;3K|Lk000e1NJLTq001xm001xu1^@s6R|5Hm000LxNkl<ZXo2n2
z|5MZV0mt!Z<cnyjuBWq3TkCCpp?9la+TCW?Zmw5tk)o)eC?KSK4+KaEBz#Lk1eEYC
zAwWVPfuI7SfT%c?FM_B+kuPG=si?gvtL@g^dUn0tbkF@@Z~FuG!>7CBdH4F|{oCXH
zzI%%X#A~>*!k_C~LaFlbB+uU$lf{U$zM2N74Xf2eQBgjsj8qiLB)Zg5eD}^Ap1tSE
z^N?kTgzkjr8hYK{Vr0PwbosnXiAY55-d)tzR#IPAO;u$n`2||`h6Up~+sa>ly^v?W
z_2PNrVrY*R9?;<KL%Zi&TonJD;d#Ho;^v8d&K!#KwK(jRm`nz;b5vw&)bux6z(>UM
z-9G~7yKrK&@PO7?K2*$plV0~VTya}R%d8(`6p7Gm{(u^Xg_4p2)R{`sGL^LHQ(>)$
z=h2Jd*hV<LRd_(ltT)N?TE!WUwG6ng<>ai_G0haAdvy_JtC2#(epDH$><-;cWYj)*
zdnW(d^9ndNK*uKG0d}_~Xy*FRHhV2S?rS;fwv6(bGstsy$6&}Kzd%b`hJxUbU2F{t
zq4WLau+f9(2}{AeUU-1P^QUBZujc6Nb#!{HqsDC!X0e!jPfzkRYV`ZFQL5xb#!B!H
z@FO^UFUL2o;OQ^L;Oj0tKt693X74q$%wA9H>~)mS@_qrBKOb$T60I(aRAmwgvRL*>
zLh$wf7?mm+gEpPk<WS)ORWGl@Id?tHUT@>@Sb<4AgG#ZO%%wje_ka$qE)#ib5>i<V
z@rf}=lVS)9--BMCMU|~wcz|>6J2cGw1vQ>Oqg?Dpg;-3nNQ5*hf})ZFw7HoiD-wxM
zj3HhYLy}xddWM1=jhfQZ1HuC=9<O6^n~PaIlX9_`0#8rEfA>DR0v%<>LUg%V$Wo+)
zNkZAaYdfE8^W$T`kJ!9rGh6(9g$F3#c#Vt&uaNTUA|h6=B+&08l9b7qOa{#5Md<Wd
zBqYVL%4a1ndB23$94|aPJa}WiJ6;}c!UG~?@od`_h~KtP@C)!KS{g%f=|O61DlnUi
z$<=2QpBTf!g$oghM2Ke0V8L8BK9hK}Vet<DkSI?iS)R!D-8<R$={_>D)3I7j)Yew~
z_XT8eygbjFIdi;NzU(!8R<C67@)ayux<q(De3F!qh){L}2O^1)pw3F8%veZ8WeLS4
z1>|TmNKB4r{``5w#Ysq!ClD1YAu?7%l3Xf0AR=1A-tfKb3=SkDA{4nY38hMoMwf|3
zmr0shLHMU(Y<TZi><r$C{|<k)?$}B|PyoC31_=-NUklt5NK~u@WqJyE`5KI-LaY`e
z4to{O`f3g}*l0U`h_>U0X=<vYxv`F;%}!d5HqhX#7XAW0+_H&nK>_TG3_~i5L#dLJ
zrBS2bpM$}WhtXJw%~p=xUP+D3jJ>v!`g#j>bry<>^MwcaZS!T5?}u#KvYCLO?F8-#
zB4|$_J9h6RR1!u)ay;posbuA-$kwXS=4O#spd+t9hf=K&9^j}oQ)@F}sVt`0u%E(%
zIt+!m6dUr;<fJ24Bp^+QL7EUvQgR%!q&VWFQAEW^2o4Dr9$>FEW4D!4Yco+(ZKB#z
zinXc~OI0a0s}WnZ5u3G)YHJzRs#2_$QtUNm%8K>a4L^WSpuekwi`^acb)Des=@#0L
zHPCpd8mFU@dPfybM<or8Dh}0IaMo4ftgE8Fu9AAY8Jj6pc);VyQKs&V@aWDk4=1j1
z@1~3Kp$iQ5|DN7+ZJa&TLg$GlPPI33s_h6T+YWQG^#~oUhiPf77as7Z$uS<?9pUla
zQKlxx_~QNzzL*^2(Vbx)OkCym=q1L7`WSX~b9LYxSNhIysrNMf-5vCIbqEg_AL`@A
zwF`_7^)WFrz};~d4{r_e`0fZ(lVeOxj`8^J2!H&;bsl{GLbpc;_-yzh<3ksO2MoKq
z8FqDZ{mS=qbu&8H!_A?7ZjD~z&e#?1-E?t(;u;TcUE{&THSSMbW%A}AcgJ1A18xjm
zVC-ryBZEB*ySf;<e4e4p=ed5Ri_yUzMz8iVI{0EEt{z4Pdl+%`Fzo7Pbg)-=z_V|r
zc=pW{|NPq{{_*wa{Qax@JpJ+>PoKQlw@)Vd_RC4W`}2$QzdpXr*N-On>hqgCd3aNJ
zK=YAWTACdkYp$cM#mTAmMoyn-qO+r!(;dxpo;=F=Gp+QTYv)p5CxZj$xZKytz=clw
zyH5!Z(CRYL=4PVPXQR_+{b&09*&H~aqo^p4va*Ai%_UeYMr_sP)YMd9vz24D8ifaB
z=cq_cPa!r{j#RB8TB#sDQ%!WLf*3^#aq?uOav3S<3RId5)VUhedJXBCEEE}O!UNLO
z3c_VFV$8L~SJttwpoH|s6U3SAC=R!i;yg~Iu^O4<C^CB!k;N6r9ZjejJ5V@}2@jZV
ny6L8yZo28Fn{K-4{%`v?gX#_~t^vsZ00000NkvXXu0mjfgr2yk
new file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0ca3c6851db30aa644ec35d199c169ccdcfa057e
GIT binary patch
literal 3077
zc$@(R4EpnlP)<h;3K|Lk000e1NJLTq002M$002M;1^@s6s%dfF000ZjNkl<Zc%1Fp
zYgChWp1|=aw{X#FdvvPX){bZDZKkz??P+bt?Pb*QGI&Kru14;G1Ofq)kb6Ra5H2AE
zLLh{IARvO&%e3`gTR|z{rM6qOm-Vvk%ue@gx83QSsebo`ZfDQ#yY0++!umbue{#<A
zy!oBa^G{yn{|E#RdeDO&^q>bl=>KhBhrBd@L=Y8pzvsAboeN$PdprE~53UPd4E^u-
zs#e83{9TgPO0D<x`!U=9Np^uC<inunzeTumw?H76^k45a&3@tS@gzBSmZW{h!8e!R
zrxE}59SM4-`77b_+kX=NN+%EqCdCK@0ksb;X&DGw%Kmw)xV|A5p4oMso}T})OD06S
zL*_npEF_ZeB36QMxyUkYaei0$^E3xerzda$hod&|Z%=LK&Z5N6v3E?p-}C-W>K8`-
z<5<|soDYBbc1uJgePPeh89WD9KsbHENZy;bmOoEf&7DWq4+;bV!7XL^x#P=Le{sL(
z{;TDTSe(-x8ih9^lFsm@bcZcvUuYCffm3M>oWX%9FLLAI6(Ic8ovXrS=g!ZM=6Z^j
z`#twxt;~m>p1wD9DZVLDc&0?+nG%V2N)*1ZUtkXipetw&C#Jm2A7-rK%+!}S9kiI?
zDKAbOfd6f$1D@r=ylCEEn?OT|5Oeez?2Wbf+L~zhxpBGdR92Q!tSlg3F6D?Kl|Mcg
z&iB6*ay$M>{|DSJe3s)uiy52oI!6}0M3q2*A#NKjEl%3JO*A;GDYfW`ON?dc&3*jg
z<(Yi{N*K3uX2U?5{{yrKFJeHrn0Er6VRYKx(-<Vg7#@zTR7b1FiQR5Rttm#WRdA}S
z5f%x!z5QYEy#)JL`9HwEz(v%D{+#2%&vGUxlHS0dP%99iUG+N6t&LPwSx~6t6sqL(
z8w+5GfZJ)aVehMOaJ~Nn^alPMbJzk7hb-ZA@Dkn#oP&L`0R5~-aky%!u$m}V7O*{G
zJ1MD2uyitac02;D%V2P&{{wh}9zz?ki2l$e91C7TPtcRpO`e1~I2fzdOsPdjp)#M?
z_-(9DjOWnux$tTTx3eDyN0k2qScN}BIdvgDp^+R4iKIE`Q5*pQlnRBIwF-<THS%H^
zDH$T7H?AW#IgY+p=kd)80k9_6{{dvv7T}orEWM#o><f*gE^z8yfa%lGspRPNB^1b|
zWJ@wg62-G7dNswxxma~Y?8{B?e}IOEBJo8;(H*vw){uGF118}N2tf7t-(e`zqtmO9
z6-W_F(#gt6M=VVzF?k0@V+jrJI{ydoMLb7)#Pc)@pQ0`xi28s4Yytt|)SXmTS<o3&
z<mTs)m6J}EBps<tOp!7ljaET*b(#MIG=|K<9u$EiU@~<90a%1Wl3!bn-lC_bwgSDO
z1WBHlL{S2pwr%1!8`iOU-LF}-b`@)**Z4nx{HIT#obd>GvmRyV!uiCk`!!N|E_S;W
zM_na)V+q;POcpGf&y=ZC2n!D*BqW5NP7fw5B*^~(c1p6?uq}pl8-ByOjnSlu)3H^T
z)6!gz!(qc<RFjpH&dixJ5eNhbCQV{SL=Yc{rm}4I1OUj9<&Z1OVbk_4B<)H<sV>6h
zveVL1|Cb#g$>gDFQwa|bWA2kruxR0YX3w3+V~;=P{{UH1G4VSS*c=;!C|!i2q>!50
z3hJFz*s3gObV_n^vzR`88kw0Q@?_bhW{6115FwR`{U2awnuxgMIJU&b5Wh14nOusz
zScX=wLaSF%sK_UIS0c-P`663lw-CKKn)RF4vvKQ2w#9Are}Erb%#IjRGepRX@-UZb
zskK+&YOJNz(}1t7iH>$R{e2zu_w8oS9xvTpUV6HH^!BvV?rZXYz!hs(vSI5+l2Q^8
zOEQrc%P7$*Fq$-2t!8R#D{#B(@OYdwyB&C1oV2wy;`KICX)E=AfOQ+zuyV}`R<2#e
z#;u!(*|C+aJ7U<peG3VqM6z?UC{h(rqA5nLQ=l`HV7BNnTlB~k`Th^k+Tx(aZKu&`
z!)7&6QLe{YVZdfJqtz54%g;ufosKv=4QXyB5@{xx;#5-8Ma0I(`agiD#ev6NM~mA|
zbCaE>#%f#*)igF#<95~JZmPxYs-ekMgR7w$S7SAvW(PGkBOdDn_z^vIWRQ1`4s!Cy
ze%?IT%fQ}tx;mQhwK{2QZNS&+q`kF)4sRnqZv#GW18rU>Z5{`1dx8H0+`Kl<t*c|)
z_;{4hE}!G-r6DekoM8CWZ#n)}KW`rB<<R~;92n^0Kz}E1^zY`4eVq*M+f8p*oBsp+
z)3x*5xH87gE92a{cAhUjxxg3K&U54AQ9iwVmXF5Y<>JUmMu(1a_VinvIeD0Ok00dJ
z(Lqif8T5aEiz6qwaP9;bM^18i>@-&{4)NKCBiy_)#;t4TxpnP4H?NFw{U6@v(|bGQ
zqw&*xF!~M`M^5-Z!06CXMu(2_{+WAg=qTgE$G9|diVw%%<>T{bxO!=bPcEP1vk%Yl
z>E&~La``ORE)8?#;*kFXTo^gQ`Lo9v8$QP9&=E%7dz+E>-sb%?M;ISI#`xLej1S*k
zV?)On8$QO^&@o1bjxs)c-2VZ-|N0i+e|?KT|LYC@^wsD5`|m&Do8MjKn=kLKZ@;|8
zx4*l_AOHF8^?$kf5ntW7%<n(H#Fw94@_&Ht&K7#RTiM&~rN7t5fq^a#?%%_q!EO!?
zc5~>B9^O8@k7I8Q@b1Y&44;0B_f8(-^oc{9I(oqW0d#s5I)e(mQH|bM^20Kk)RdL!
zsjM_pQ&Wz^QAJ~8E$*f|nw#r!yX$bdYW*KTttqCUD36Q+8Dd2-Y4UusR0`4x@=4Fn
zBU6@(SSBH_D4$}j5`{sF!l<Q4TSC6F(EkAn75OAfB&0i9$Z~o~vQ$yjwVzCfhy2|G
z<oWtYsck~i+JnTihZI{qverEmcMX#7+w1=TKeh~e6SCGG^1R(7TdK))cu26=sjM`!
zyQ7)D-VO!__OQRd3r}e&1v3`WwD2`9hzhy<LY)5tN}D@T?mdj6=OE$+FWQ4Aka+h}
zRcWT(=cc!(o&LT~2KIH*?x;cW@IvZlL~&x?T1Fpv&Hn*XZ1v<cwUbfTOj4<hwCV;D
z3|4At%4zpC(bMf?Z*K>Edw0{>>cSMhko@qcu!b(AFZdV!4<M?rBlGnk^L8VyYo?&B
z8;Q+MeSH-j?QZsTx6#wxMt64`ZEY?@lV_10I*0VoIb?@SfLC<3Zr#APZ5v2T-a%$g
z28AU>R8*GI>~_%ZbJN+;Lc7mRtH(*LtD3aQvq%*_L3-#D<b+NDfXwW4q<LcG#WFNH
zCFNEVPG>bOE%kU?8}YU_(%kI8;i#m<q9HzT7Mp(dH1TtvCT->d{|6{i7NAlWp)-_V
zDbrJ3RgTkHjoV#EOLILgmmNo)jk0nhQdu?$JLA|TPGw_U3~QoS`#*r@-XEn#vzD@Q
z1GXv)c6%lD^;Oi@S5a4I#b&c$G-=3@W|1LIrLZ`Ue7Tgwo$>w;pf{9YFsd<`wUk<P
zR9H<^RavO1DW|%+jB=|HbEyuEPKj8OL28DGY)J+Ng9>Th1UQ+}8`T($Y78dL4+v%D
z2Ffaol$Pl+m^5g0N{W>Qq@;^T+8K}0tif~d>v6y5|IrzYYW@?!XwqOR)nYPhFqkyx
z^eR;9B8pXV@(Lt~Qj^HZ%|d5T<8G>>v9V_20T@jh3`R9tof4(GkV1tFnf&f6I!U4g
z(lS%-9^h7&<7shD0DyZ>+i1G`X)tNf8A>Qo7gMN^kyju=k|!oDGlhhtI7F$*cMorM
zN}LXx{{xsywV2FWjArd$p1xj%Qe8-Xp%h837_lS+QED>V;$qku8$<H0M9gLlzBZTt
z1C&)5DYfV@m+CN;YJUJwloXOzAR$MZK}L2eiK2L-H?QZF6|a!AD-lbXVPc#vSJ_G_
zD>q;=YtZVIC{+ch)WxXOMMz~ih$R`MWu~wrF_sl;R<L623SxF_K~W;d<8ksIralwd
TL#$4h00000NkvXXu0mjf|7ibB
--- a/build/automation.py.in
+++ b/build/automation.py.in
@@ -178,43 +178,32 @@ class Automation(object):
     def kill(self):
       if Automation().IS_WIN32:
         import platform
         pid = "%i" % self.pid
         subprocess.Popen(["taskkill", "/F", "/PID", pid]).wait()
       else:
         os.kill(self.pid, signal.SIGKILL)
 
-  def environment(self, env=None, xrePath=None, crashreporter=True, debugger=False, dmdPath=None, lsanPath=None, ubsanPath=None):
+  def environment(self, env=None, xrePath=None, crashreporter=True, debugger=False, lsanPath=None, ubsanPath=None):
     if xrePath == None:
       xrePath = self.DIST_BIN
     if env == None:
       env = dict(os.environ)
 
     ldLibraryPath = os.path.abspath(os.path.join(SCRIPT_DIR, xrePath))
-    dmdLibrary = None
-    preloadEnvVar = None
     if self.UNIXISH or self.IS_MAC:
       envVar = "LD_LIBRARY_PATH"
-      preloadEnvVar = "LD_PRELOAD"
       if self.IS_MAC:
         envVar = "DYLD_LIBRARY_PATH"
-        dmdLibrary = "libdmd.dylib"
-      else: # unixish
-        dmdLibrary = "libdmd.so"
       if envVar in env:
         ldLibraryPath = ldLibraryPath + ":" + env[envVar]
       env[envVar] = ldLibraryPath
     elif self.IS_WIN32:
       env["PATH"] = env["PATH"] + ";" + str(ldLibraryPath)
-      dmdLibrary = "dmd.dll"
-      preloadEnvVar = "MOZ_REPLACE_MALLOC_LIB"
-
-    if dmdPath and dmdLibrary and preloadEnvVar:
-      env[preloadEnvVar] = os.path.join(dmdPath, dmdLibrary)
 
     if crashreporter and not debugger:
       env['MOZ_CRASHREPORTER_NO_REPORT'] = '1'
       env['MOZ_CRASHREPORTER'] = '1'
     else:
       env['MOZ_CRASHREPORTER_DISABLE'] = '1'
 
     # Crash on non-local network connections by default.
--- a/build/mobile/remoteautomation.py
+++ b/build/mobile/remoteautomation.py
@@ -55,25 +55,22 @@ class RemoteAutomation(Automation):
 
     def setProduct(self, product):
         self._product = product
 
     def setRemoteLog(self, logfile):
         self._remoteLog = logfile
 
     # Set up what we need for the remote environment
-    def environment(self, env=None, xrePath=None, crashreporter=True, debugger=False, dmdPath=None, lsanPath=None, ubsanPath=None):
+    def environment(self, env=None, xrePath=None, crashreporter=True, debugger=False, lsanPath=None, ubsanPath=None):
         # Because we are running remote, we don't want to mimic the local env
         # so no copying of os.environ
         if env is None:
             env = {}
 
-        if dmdPath:
-            env['MOZ_REPLACE_MALLOC_LIB'] = os.path.join(dmdPath, 'libdmd.so')
-
         # Except for the mochitest results table hiding option, which isn't
         # passed to runtestsremote.py as an actual option, but through the
         # MOZ_HIDE_RESULTS_TABLE environment variable.
         if 'MOZ_HIDE_RESULTS_TABLE' in os.environ:
             env['MOZ_HIDE_RESULTS_TABLE'] = os.environ['MOZ_HIDE_RESULTS_TABLE']
 
         if crashreporter and not debugger:
             env['MOZ_CRASHREPORTER_NO_REPORT'] = '1'
--- a/build/moz.configure/memory.configure
+++ b/build/moz.configure/memory.configure
@@ -57,8 +57,19 @@ def replace_malloc(value, jemalloc, mile
         return True
     if milestone.is_nightly and jemalloc and build_project != 'js':
         return True
 
 
 set_config('MOZ_REPLACE_MALLOC', replace_malloc)
 set_define('MOZ_REPLACE_MALLOC', replace_malloc)
 add_old_configure_assignment('MOZ_REPLACE_MALLOC', replace_malloc)
+
+
+@depends(replace_malloc, build_project)
+def replace_malloc_static(replace_malloc, build_project):
+    # Default to statically linking replace-malloc libraries that can be
+    # statically linked, except when building with --enable-project=memory.
+    if replace_malloc and build_project != 'memory':
+        return True
+
+
+set_config('MOZ_REPLACE_MALLOC_STATIC', replace_malloc_static)
new file mode 100644
--- /dev/null
+++ b/devtools/client/jsonview/components/LiveText.js
@@ -0,0 +1,45 @@
+/* -*- indent-tabs-mode: nil; js-indent-level: 2 -*- */
+/* vim: set ft=javascript ts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+define(function (require, exports, module) {
+  const { Component } = require("devtools/client/shared/vendor/react");
+  const PropTypes = require("devtools/client/shared/vendor/react-prop-types");
+  const { findDOMNode } = require("devtools/client/shared/vendor/react-dom");
+  const { pre } = require("devtools/client/shared/vendor/react-dom-factories");
+
+  /**
+   * This object represents a live DOM text node in a <pre>.
+   */
+  class LiveText extends Component {
+    static get propTypes() {
+      return {
+        data: PropTypes.instanceOf(Text),
+      };
+    }
+
+    componentDidMount() {
+      this.componentDidUpdate();
+    }
+
+    componentDidUpdate() {
+      let el = findDOMNode(this);
+      if (el.firstChild === this.props.data) {
+        return;
+      }
+      el.textContent = "";
+      el.append(this.props.data);
+    }
+
+    render() {
+      return pre({className: "data"});
+    }
+  }
+
+  // Exports from this module
+  exports.LiveText = LiveText;
+});
--- a/devtools/client/jsonview/components/MainTabbedArea.js
+++ b/devtools/client/jsonview/components/MainTabbedArea.js
@@ -17,17 +17,17 @@ define(function (require, exports, modul
 
   /**
    * This object represents the root application template
    * responsible for rendering the basic tab layout.
    */
   class MainTabbedArea extends Component {
     static get propTypes() {
       return {
-        jsonText: PropTypes.string,
+        jsonText: PropTypes.instanceOf(Text),
         tabActive: PropTypes.number,
         actions: PropTypes.object,
         headers: PropTypes.object,
         searchFilter: PropTypes.string,
         json: PropTypes.oneOfType([
           PropTypes.string,
           PropTypes.object,
           PropTypes.array,
@@ -37,18 +37,18 @@ define(function (require, exports, modul
         expandedNodes: PropTypes.instanceOf(Set),
       };
     }
 
     constructor(props) {
       super(props);
 
       this.state = {
-        json: {},
-        headers: {},
+        json: props.json,
+        expandedNodes: props.expandedNodes,
         jsonText: props.jsonText,
         tabActive: props.tabActive
       };
 
       this.onTabChanged = this.onTabChanged.bind(this);
     }
 
     onTabChanged(index) {
@@ -59,27 +59,28 @@ define(function (require, exports, modul
       return (
         Tabs({
           tabActive: this.state.tabActive,
           onAfterChange: this.onTabChanged},
           TabPanel({
             className: "json",
             title: JSONView.Locale.$STR("jsonViewer.tab.JSON")},
             JsonPanel({
-              data: this.props.json,
+              data: this.state.json,
               expandedNodes: this.props.expandedNodes,
               actions: this.props.actions,
               searchFilter: this.state.searchFilter
             })
           ),
           TabPanel({
             className: "rawdata",
             title: JSONView.Locale.$STR("jsonViewer.tab.RawData")},
             TextPanel({
-              isValidJson: !(this.props.json instanceof Error),
+              isValidJson: !(this.state.json instanceof Error) &&
+                           document.readyState != "loading",
               data: this.state.jsonText,
               actions: this.props.actions
             })
           ),
           TabPanel({
             className: "headers",
             title: JSONView.Locale.$STR("jsonViewer.tab.Headers")},
             HeadersPanel({
--- a/devtools/client/jsonview/components/TextPanel.js
+++ b/devtools/client/jsonview/components/TextPanel.js
@@ -7,48 +7,46 @@
 "use strict";
 
 define(function (require, exports, module) {
   const { Component } = require("devtools/client/shared/vendor/react");
   const PropTypes = require("devtools/client/shared/vendor/react-prop-types");
   const dom = require("devtools/client/shared/vendor/react-dom-factories");
   const { createFactories } = require("devtools/client/shared/react-utils");
   const { TextToolbar } = createFactories(require("./TextToolbar"));
-
-  const { div, pre } = dom;
+  const { LiveText } = createFactories(require("./LiveText"));
+  const { div } = dom;
 
   /**
    * This template represents the 'Raw Data' panel displaying
    * JSON as a text received from the server.
    */
   class TextPanel extends Component {
     static get propTypes() {
       return {
         isValidJson: PropTypes.bool,
         actions: PropTypes.object,
-        data: PropTypes.string
+        data: PropTypes.instanceOf(Text),
       };
     }
 
     constructor(props) {
       super(props);
       this.state = {};
     }
 
     render() {
       return (
         div({className: "textPanelBox tab-panel-inner"},
           TextToolbar({
             actions: this.props.actions,
             isValidJson: this.props.isValidJson
           }),
           div({className: "panelContent"},
-            pre({className: "data"},
-              this.props.data
-            )
+            LiveText({data: this.props.data})
           )
         )
       );
     }
   }
 
   // Exports from this module
   exports.TextPanel = TextPanel;
--- a/devtools/client/jsonview/components/moz.build
+++ b/devtools/client/jsonview/components/moz.build
@@ -9,13 +9,14 @@ DIRS += [
 ]
 
 DevToolsModules(
     'Headers.js',
     'HeadersPanel.js',
     'HeadersToolbar.js',
     'JsonPanel.js',
     'JsonToolbar.js',
+    'LiveText.js',
     'MainTabbedArea.js',
     'SearchBox.js',
     'TextPanel.js',
     'TextToolbar.js'
 )
--- a/devtools/client/jsonview/converter-child.js
+++ b/devtools/client/jsonview/converter-child.js
@@ -163,16 +163,18 @@ function exportData(win, request) {
   let data = Cu.createObjectIn(win, {
     defineAs: "JSONView"
   });
 
   data.debug = debug;
 
   data.json = new win.Text();
 
+  data.readyState = "uninitialized";
+
   let Locale = {
     $STR: key => {
       try {
         return jsonViewStrings.GetStringFromName(key);
       } catch (err) {
         console.error(err);
         return undefined;
       }
@@ -239,17 +241,16 @@ function initialHTML(doc) {
         element("link", {
           rel: "stylesheet",
           type: "text/css",
           href: baseURI + "css/main.css",
         }),
         element("script", {
           src: baseURI + "lib/require.js",
           "data-main": baseURI + "viewer-config.js",
-          defer: true,
         })
       ]),
       element("body", {}, [
         element("div", {"id": "content"}, [
           element("div", {"id": "json"})
         ])
       ])
     ]).outerHTML;
--- a/devtools/client/jsonview/json-viewer.js
+++ b/devtools/client/jsonview/json-viewer.js
@@ -14,51 +14,36 @@ define(function (require, exports, modul
 
   const AUTO_EXPAND_MAX_SIZE = 100 * 1024;
   const AUTO_EXPAND_MAX_LEVEL = 7;
 
   let prettyURL;
 
   // Application state object.
   let input = {
-    jsonText: JSONView.json.textContent,
+    jsonText: JSONView.json,
     jsonPretty: null,
     headers: JSONView.headers,
     tabActive: 0,
     prettified: false
   };
 
-  try {
-    input.json = JSON.parse(input.jsonText);
-  } catch (err) {
-    input.json = err;
-  }
-
-  // Expand the document by default if its size isn't bigger than 100KB.
-  if (!(input.json instanceof Error) && input.jsonText.length <= AUTO_EXPAND_MAX_SIZE) {
-    input.expandedNodes = TreeViewClass.getExpandedNodes(
-      input.json,
-      {maxLevel: AUTO_EXPAND_MAX_LEVEL}
-    );
-  } else {
-    input.expandedNodes = new Set();
-  }
-
   /**
    * Application actions/commands. This list implements all commands
    * available for the JSON viewer.
    */
   input.actions = {
     onCopyJson: function () {
-      copyString(input.prettified ? input.jsonPretty : input.jsonText);
+      let text = input.prettified ? input.jsonPretty : input.jsonText;
+      copyString(text.textContent);
     },
 
     onSaveJson: function () {
       if (input.prettified && !prettyURL) {
-        prettyURL = URL.createObjectURL(new window.Blob([input.jsonPretty]));
+        prettyURL = URL.createObjectURL(new window.Blob([input.jsonPretty.textContent]));
       }
       dispatchEvent("save", input.prettified ? prettyURL : null);
     },
 
     onCopyHeaders: function () {
       let value = "";
       let isWinNT = document.documentElement.getAttribute("platform") === "win";
       let eol = isWinNT ? "\r\n" : "\n";
@@ -88,17 +73,17 @@ define(function (require, exports, modul
       if (input.json instanceof Error) {
         // Cannot prettify invalid JSON
         return;
       }
       if (input.prettified) {
         theApp.setState({jsonText: input.jsonText});
       } else {
         if (!input.jsonPretty) {
-          input.jsonPretty = JSON.stringify(input.json, null, "  ");
+          input.jsonPretty = new Text(JSON.stringify(input.json, null, "  "));
         }
         theApp.setState({jsonText: input.jsonPretty});
       }
 
       input.prettified = !input.prettified;
     },
   };
 
@@ -134,16 +119,57 @@ define(function (require, exports, modul
     window.dispatchEvent(contentMessageEvent);
   }
 
   /**
    * Render the main application component. It's the main tab bar displayed
    * at the top of the window. This component also represents ReacJS root.
    */
   let content = document.getElementById("content");
+  let promise = (async function parseJSON() {
+    if (document.readyState == "loading") {
+      // If the JSON has not been loaded yet, render the Raw Data tab first.
+      input.json = {};
+      input.expandedNodes = new Set();
+      input.tabActive = 1;
+      return new Promise(resolve => {
+        document.addEventListener("DOMContentLoaded", resolve, {once: true});
+      }).then(parseJSON).then(() => {
+        // Now update the state and switch to the JSON tab.
+        theApp.setState({
+          tabActive: 0,
+          json: input.json,
+          expandedNodes: input.expandedNodes,
+        });
+      });
+    }
+
+    // If the JSON has been loaded, parse it immediately before loading the app.
+    let jsonString = input.jsonText.textContent;
+    try {
+      input.json = JSON.parse(jsonString);
+    } catch (err) {
+      input.json = err;
+    }
+
+    // Expand the document by default if its size isn't bigger than 100KB.
+    if (!(input.json instanceof Error) && jsonString.length <= AUTO_EXPAND_MAX_SIZE) {
+      input.expandedNodes = TreeViewClass.getExpandedNodes(
+        input.json,
+        {maxLevel: AUTO_EXPAND_MAX_LEVEL}
+      );
+    }
+    return undefined;
+  })();
+
   let theApp = render(MainTabbedArea(input), content);
 
-  // Send notification event to the window. Can be useful for
+  // Send readyState change notification event to the window. Can be useful for
   // tests as well as extensions.
-  let event = new CustomEvent("JSONViewInitialized", {});
-  JSONView.initialized = true;
-  window.dispatchEvent(event);
+  JSONView.readyState = "interactive";
+  window.dispatchEvent(new CustomEvent("AppReadyStateChange"));
+
+  promise.then(() => {
+    // Another readyState change notification event.
+    JSONView.readyState = "complete";
+    window.dispatchEvent(new CustomEvent("AppReadyStateChange"));
+  });
 });
--- a/devtools/client/jsonview/test/browser.ini
+++ b/devtools/client/jsonview/test/browser.ini
@@ -17,38 +17,41 @@ support-files =
   simple_json.json
   simple_json.json^headers^
   valid_json.json
   valid_json.json^headers^
   !/devtools/client/commandline/test/head.js
   !/devtools/client/framework/test/head.js
   !/devtools/client/framework/test/shared-head.js
 
+[browser_json_refresh.js]
 [browser_jsonview_bug_1380828.js]
-[browser_jsonview_ignore_charset.js]
+[browser_jsonview_chunked_json.js]
+support-files =
+  chunked_json.sjs
 [browser_jsonview_content_type.js]
 [browser_jsonview_copy_headers.js]
 subsuite = clipboard
 skip-if = (os == 'linux' && bits == 32 && debug) # bug 1328915, disable linux32 debug devtools for timeouts
 [browser_jsonview_copy_json.js]
 subsuite = clipboard
 skip-if = (os == 'linux' && bits == 32 && debug) # bug 1328915, disable linux32 debug devtools for timeouts
 [browser_jsonview_copy_rawdata.js]
 subsuite = clipboard
 skip-if = (os == 'linux' && bits == 32 && debug) # bug 1328915, disable linux32 debug devtools for timeouts
 [browser_jsonview_csp_json.js]
 [browser_jsonview_empty_object.js]
 [browser_jsonview_encoding.js]
 [browser_jsonview_filter.js]
+[browser_jsonview_ignore_charset.js]
 [browser_jsonview_invalid_json.js]
 [browser_jsonview_manifest.js]
 [browser_jsonview_nojs.js]
 [browser_jsonview_nul.js]
 [browser_jsonview_object-type.js]
 [browser_jsonview_row_selection.js]
 [browser_jsonview_save_json.js]
 support-files =
   !/toolkit/content/tests/browser/common/mockTransfer.js
+[browser_jsonview_serviceworker.js]
+[browser_jsonview_slash.js]
 [browser_jsonview_theme.js]
-[browser_jsonview_slash.js]
 [browser_jsonview_valid_json.js]
-[browser_json_refresh.js]
-[browser_jsonview_serviceworker.js]
new file mode 100644
--- /dev/null
+++ b/devtools/client/jsonview/test/browser_jsonview_chunked_json.js
@@ -0,0 +1,80 @@
+/* -*- indent-tabs-mode: nil; js-indent-level: 2 -*- */
+/* vim: set ts=2 et sw=2 tw=80: */
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+const TEST_JSON_URL = URL_ROOT + "chunked_json.sjs";
+
+add_task(async function () {
+  info("Test chunked JSON started");
+
+  await addJsonViewTab(TEST_JSON_URL, {
+    appReadyState: "interactive",
+    docReadyState: "loading",
+  });
+
+  is(await getElementCount(".rawdata.is-active"), 1,
+    "The Raw Data tab is selected.");
+
+  // Write some text and check that it is displayed.
+  await write("[");
+  await checkText();
+
+  // Repeat just in case.
+  await write("1,");
+  await checkText();
+
+  is(await getElementCount("button.prettyprint"), 0,
+    "There is no pretty print button during load");
+
+  await selectJsonViewContentTab("json");
+  is(await getElementText(".jsonPanelBox > .panelContent"), "", "There is no JSON tree");
+
+  await selectJsonViewContentTab("headers");
+  ok(await getElementText(".headersPanelBox .netInfoHeadersTable"),
+    "The headers table has been filled.");
+
+  // Write some text without being in Raw Data, then switch tab and check.
+  await write("2");
+  await selectJsonViewContentTab("rawdata");
+  await checkText();
+
+  // Another text check.
+  await write("]");
+  await checkText();
+
+  // Close the connection.
+  await server("close");
+
+  is(await getElementCount(".json.is-active"), 1, "The JSON tab is selected.");
+
+  is(await getElementCount(".jsonPanelBox .treeTable .treeRow"), 2,
+    "There is a tree with 2 rows.");
+
+  await selectJsonViewContentTab("rawdata");
+  await checkText();
+
+  is(await getElementCount("button.prettyprint"), 1, "There is a pretty print button.");
+  await clickJsonNode("button.prettyprint");
+  await checkText(JSON.stringify(JSON.parse(data), null, 2));
+});
+
+let data = " ";
+async function write(text) {
+  data += text;
+  await server("write", text);
+}
+async function checkText(text = data) {
+  is(await getElementText(".textPanelBox .data"), text, "Got the right text.");
+}
+
+function server(action, value) {
+  return new Promise(resolve => {
+    let xhr = new XMLHttpRequest();
+    xhr.open("GET", TEST_JSON_URL + "?" + action + "=" + value);
+    xhr.addEventListener("load", resolve, {once: true});
+    xhr.send();
+  });
+}
--- a/devtools/client/jsonview/test/browser_jsonview_nojs.js
+++ b/devtools/client/jsonview/test/browser_jsonview_nojs.js
@@ -1,25 +1,25 @@
 /* -*- indent-tabs-mode: nil; js-indent-level: 2 -*- */
 /* vim: set ts=2 et sw=2 tw=80: */
 /* Any copyright is dedicated to the Public Domain.
  * http://creativecommons.org/publicdomain/zero/1.0/ */
 
 "use strict";
 
-add_task(function* () {
+add_task(async function () {
   info("Test JSON without JavaScript started.");
 
   let oldPref = SpecialPowers.getBoolPref("javascript.enabled");
   SpecialPowers.setBoolPref("javascript.enabled", false);
 
   const TEST_JSON_URL = "data:application/json,[1,2,3]";
-  yield addJsonViewTab(TEST_JSON_URL, 0).catch(() => {
-    info("JSON Viewer did not load");
-    return executeInContent("Test:JsonView:GetElementVisibleText", {selector: "html"})
-    .then(result => {
-      info("Checking visible text contents.");
-      is(result.text, "[1,2,3]", "The raw source should be visible.");
-    });
-  });
+
+  // "uninitialized" will be the last app readyState because JS is disabled.
+  await addJsonViewTab(TEST_JSON_URL, {appReadyState: "uninitialized"});
+
+  info("Checking visible text contents.");
+  let {text} = await executeInContent("Test:JsonView:GetElementVisibleText",
+    {selector: "html"});
+  is(text, "[1,2,3]", "The raw source should be visible.");
 
   SpecialPowers.setBoolPref("javascript.enabled", oldPref);
 });
new file mode 100644
--- /dev/null
+++ b/devtools/client/jsonview/test/chunked_json.sjs
@@ -0,0 +1,38 @@
+/* -*- indent-tabs-mode: nil; js-indent-level: 2 -*- */
+/* vim: set ts=2 et sw=2 tw=80: */
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+const key = "json-viewer-chunked-response";
+function setResponse(response) {
+  setObjectState(key, response);
+}
+function getResponse() {
+  let response;
+  getObjectState(key, v => { response = v });
+  return response;
+}
+
+function handleRequest(request, response) {
+  let {queryString} = request;
+  if (!queryString) {
+    response.processAsync();
+    setResponse(response);
+    response.setHeader("Content-Type", "application/json");
+    // Write something so that the JSON viewer app starts loading.
+    response.write(" ");
+    return;
+  }
+  let [command, value] = queryString.split('=');
+  switch (command) {
+    case "write":
+      getResponse().write(value);
+      break;
+    case "close":
+      getResponse().finish();
+      setResponse(null);
+      break;
+  }
+  response.setHeader("Content-Type", "text/plain");
+  response.write("ok");
+}
--- a/devtools/client/jsonview/test/doc_frame_script.js
+++ b/devtools/client/jsonview/test/doc_frame_script.js
@@ -20,27 +20,29 @@ EventUtils._EU_Ci = Components.interface
 EventUtils._EU_Cc = Components.classes; // eslint-disable-line
 EventUtils.navigator = content.navigator;
 EventUtils.KeyboardEvent = content.KeyboardEvent;
 
 Services.scriptloader.loadSubScript(
   "chrome://mochikit/content/tests/SimpleTest/EventUtils.js", EventUtils);
 
 /**
- * When the JSON View is done rendering it triggers custom event
- * "JSONViewInitialized", then the Test:TestPageProcessingDone message
- * will be sent to the parent process for tests to wait for this event
- * if needed.
+ * When the ready state of the JSON View app changes, it triggers custom event
+ * "AppReadyStateChange", then the "Test:JsonView:AppReadyStateChange" message
+ * will be sent to the parent process for tests to wait for this event if needed.
  */
-content.addEventListener("JSONViewInitialized", () => {
-  sendAsyncMessage("Test:JsonView:JSONViewInitialized");
+content.addEventListener("AppReadyStateChange", () => {
+  sendAsyncMessage("Test:JsonView:AppReadyStateChange");
 });
 
-content.addEventListener("load", () => {
-  sendAsyncMessage("Test:JsonView:load");
+/**
+ * Analogous for the standard "readystatechange" event of the document.
+ */
+content.document.addEventListener("readystatechange", () => {
+  sendAsyncMessage("Test:JsonView:DocReadyStateChange");
 });
 
 addMessageListener("Test:JsonView:GetElementCount", function (msg) {
   let {selector} = msg.data;
   let nodeList = content.document.querySelectorAll(selector);
   sendAsyncMessage(msg.name, {count: nodeList.length});
 });
 
--- a/devtools/client/jsonview/test/head.js
+++ b/devtools/client/jsonview/test/head.js
@@ -21,63 +21,89 @@ registerCleanupFunction(() => {
 });
 
 // XXX move some API into devtools/framework/test/shared-head.js
 
 /**
  * Add a new test tab in the browser and load the given url.
  * @param {String} url
  *   The url to be loaded in the new tab.
- * @param {Number} timeout [optional]
- *   The maximum number of milliseconds allowed before the initialization of the
- *   JSON Viewer once the tab has been loaded. If exceeded, the initialization
- *   will be considered to have failed, and the returned promise will be rejected.
- *   If this parameter is not passed or is negative, it will be ignored.
+ *
+ * @param {Object} [optional]
+ *   An object with the following optional properties:
+ *   - appReadyState: The readyState of the JSON Viewer app that you want to
+ *     wait for. Its value can be one of:
+ *      - "uninitialized": The converter has started the request.
+ *        If JavaScript is disabled, there will be no more readyState changes.
+ *      - "loading": RequireJS started loading the scripts for the JSON Viewer.
+ *        If the load timeouts, there will be no more readyState changes.
+ *      - "interactive": The JSON Viewer app loaded, but possibly not all the JSON
+ *        data has been received.
+ *      - "complete" (default): The app is fully loaded with all the JSON.
+ *   - docReadyState: The standard readyState of the document that you want to
+ *     wait for. Its value can be one of:
+ *      - "loading": The JSON data has not been completely loaded (but the app might).
+ *      - "interactive": All the JSON data has been received.
+ *      - "complete" (default): Since there aren't sub-resources like images,
+ *        behaves as "interactive". Note the app might not be loaded yet.
  */
-async function addJsonViewTab(url, timeout = -1) {
-  info("Adding a new JSON tab with URL: '" + url + "'");
+async function addJsonViewTab(url, {
+  appReadyState = "complete",
+  docReadyState = "complete",
+} = {}) {
+  let docReadyStates = ["loading", "interactive", "complete"];
+  let docReadyIndex = docReadyStates.indexOf(docReadyState);
+  let appReadyStates = ["uninitialized", ...docReadyStates];
+  let appReadyIndex = appReadyStates.indexOf(appReadyState);
+  if (docReadyIndex < 0 || appReadyIndex < 0) {
+    throw new Error("Invalid app or doc readyState parameter.");
+  }
 
-  let tab = await addTab(url);
+  info("Adding a new JSON tab with URL: '" + url + "'");
+  let tabLoaded = addTab(url);
+  let tab = gBrowser.selectedTab;
   let browser = tab.linkedBrowser;
+  await Promise.race([tabLoaded, new Promise(resolve => {
+    browser.webProgress.addProgressListener({
+      QueryInterface: XPCOMUtils.generateQI(["nsIWebProgressListener",
+                                             "nsISupportsWeakReference"]),
+      onLocationChange(webProgress) {
+        // Fires when the tab is ready but before completely loaded.
+        webProgress.removeProgressListener(this);
+        resolve();
+      },
+    }, Ci.nsIWebProgress.NOTIFY_LOCATION);
+  })]);
 
   // Load devtools/shared/frame-script-utils.js
   getFrameScript();
 
   // Load frame script with helpers for JSON View tests.
   let rootDir = getRootDirectory(gTestPath);
   let frameScriptUrl = rootDir + "doc_frame_script.js";
   browser.messageManager.loadFrameScript(frameScriptUrl, false);
 
   // Check if there is a JSONView object.
-  if (!content.window.wrappedJSObject.JSONView) {
-    throw new Error("JSON Viewer did not load.");
-  }
-
-  // Resolve if the JSONView is fully loaded.
-  if (content.window.wrappedJSObject.JSONView.initialized) {
-    return tab;
+  let JSONView = content.window.wrappedJSObject.JSONView;
+  if (!JSONView) {
+    throw new Error("The JSON Viewer did not load.");
   }
 
-  // Otherwise wait for an initialization event, possibly with a time limit.
-  const onJSONViewInitialized =
-    waitForContentMessage("Test:JsonView:JSONViewInitialized")
-    .then(() => tab);
-
-  if (!(timeout >= 0)) {
-    return onJSONViewInitialized;
+  // Wait until the document readyState suffices.
+  let {document} = content.window;
+  while (docReadyStates.indexOf(document.readyState) < docReadyIndex) {
+    await waitForContentMessage("Test:JsonView:DocReadyStateChange");
   }
 
-  if (content.window.document.readyState !== "complete") {
-    await waitForContentMessage("Test:JsonView:load");
+  // Wait until the app readyState suffices.
+  while (appReadyStates.indexOf(JSONView.readyState) < appReadyIndex) {
+    await waitForContentMessage("Test:JsonView:AppReadyStateChange");
   }
 
-  let onTimeout = new Promise((_, reject) =>
-    setTimeout(() => reject(new Error("JSON Viewer did not load.")), timeout));
-
-  return Promise.race([onJSONViewInitialized, onTimeout]);
+  return tab;
 }
 
 /**
  * Expanding a node in the JSON tree
  */
 function clickJsonNode(selector) {
   info("Expanding node: '" + selector + "'");
 
--- a/devtools/client/jsonview/viewer-config.js
+++ b/devtools/client/jsonview/viewer-config.js
@@ -2,16 +2,20 @@
 /* vim: set ft=javascript ts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 /* global requirejs */
 
 "use strict";
 
+// Send readyState change notification event to the window. It's useful for tests.
+JSONView.readyState = "loading";
+window.dispatchEvent(new CustomEvent("AppReadyStateChange"));
+
 /**
  * RequireJS configuration for JSON Viewer.
  *
  * ReactJS library is shared among DevTools. The minified (production) version
  * of the library is always available, and is used by default.
  *
  * In order to use the developer version you need to specify the following
  * in your .mozconfig (see also bug 1181646):
--- a/devtools/shared/system.js
+++ b/devtools/shared/system.js
@@ -127,19 +127,16 @@ function* getSystemInfo() {
     // Typically, the version of Firefox, for example.
     // It is different than the version of Gecko or the XULRunner platform.
     // On B2G, this is the Gaia version.
     version,
 
     // The application's build ID/date, for example "2004051604".
     appbuildid: appInfo.appBuildID,
 
-    // The application's changeset.
-    changeset: exports.getAppIniString("App", "SourceStamp"),
-
     // The build ID/date of Gecko and the XULRunner platform.
     platformbuildid: appInfo.platformBuildID,
     geckobuildid: appInfo.platformBuildID,
 
     // The version of Gecko or XULRunner platform, for example "1.8.1.19" or
     // "1.9.3pre". In "Firefox 3.7 alpha 1" the application version is "3.7a1pre"
     // while the platform version is "1.9.3pre"
     platformversion: geckoVersion,
@@ -212,38 +209,16 @@ function getProfileLocation() {
     }
 
     return profd.leafName;
   } catch (e) {
     return "";
   }
 }
 
-function getAppIniString(section, key) {
-  let inifile = Services.dirsvc.get("GreD", Ci.nsIFile);
-  inifile.append("application.ini");
-
-  if (!inifile.exists()) {
-    inifile = Services.dirsvc.get("CurProcD", Ci.nsIFile);
-    inifile.append("application.ini");
-  }
-
-  if (!inifile.exists()) {
-    return undefined;
-  }
-
-  let iniParser = Cc["@mozilla.org/xpcom/ini-parser-factory;1"]
-                    .getService(Ci.nsIINIParserFactory).createINIParser(inifile);
-  try {
-    return iniParser.getString(section, key);
-  } catch (e) {
-    return undefined;
-  }
-}
-
 /**
  * Function for fetching screen dimensions and returning
  * an enum for Telemetry.
  */
 function getScreenDimensions() {
   let width = {};
   let height = {};
 
@@ -345,13 +320,12 @@ function getSetting(name) {
     });
   } else {
     deferred.reject(new Error("No settings service"));
   }
   return deferred.promise;
 }
 
 exports.getSystemInfo = Task.async(getSystemInfo);
-exports.getAppIniString = getAppIniString;
 exports.getSetting = getSetting;
 exports.getScreenDimensions = getScreenDimensions;
 exports.getOSCPU = getOSCPU;
 exports.constants = AppConstants;
--- a/docshell/base/nsDefaultURIFixup.cpp
+++ b/docshell/base/nsDefaultURIFixup.cpp
@@ -919,63 +919,63 @@ nsDefaultURIFixup::KeywordURIFixup(const
 
   // If there are only colons and only hexadecimal characters ([a-z][0-9])
   // enclosed in [], then don't do a keyword lookup
   if (looksLikeIpv6) {
     return NS_OK;
   }
 
   nsAutoCString asciiHost;
-  nsAutoCString host;
+  nsAutoCString displayHost;
 
-  bool isValidAsciiHost =
+  bool isValidHost =
     aFixupInfo->mFixedURI &&
     NS_SUCCEEDED(aFixupInfo->mFixedURI->GetAsciiHost(asciiHost)) &&
     !asciiHost.IsEmpty();
 
-  bool isValidHost =
+  bool isValidDisplayHost =
     aFixupInfo->mFixedURI &&
-    NS_SUCCEEDED(aFixupInfo->mFixedURI->GetHost(host)) &&
-    !host.IsEmpty();
+    NS_SUCCEEDED(aFixupInfo->mFixedURI->GetDisplayHost(displayHost)) &&
+    !displayHost.IsEmpty();
 
   nsresult rv = NS_OK;
   // We do keyword lookups if a space or quote preceded the dot, colon
   // or question mark (or if the latter is not found, or if the input starts
   // with a question mark)
   if (((firstSpaceLoc < firstDotLoc || firstQuoteLoc < firstDotLoc) &&
        (firstSpaceLoc < firstColonLoc || firstQuoteLoc < firstColonLoc) &&
        (firstSpaceLoc < firstQMarkLoc || firstQuoteLoc < firstQMarkLoc)) ||
       firstQMarkLoc == 0) {
     rv = TryKeywordFixupForURIInfo(aFixupInfo->mOriginalInput, aFixupInfo,
                                    aPostData);
-    // ... or when the host is the same as asciiHost and there are no
+    // ... or when the asciiHost is the same as displayHost and there are no
     // characters from [a-z][A-Z]
-  } else if (isValidAsciiHost && isValidHost && !hasAsciiAlpha &&
-             host.EqualsIgnoreCase(asciiHost.get())) {
+  } else if (isValidHost && isValidDisplayHost && !hasAsciiAlpha &&
+             asciiHost.EqualsIgnoreCase(displayHost.get())) {
     if (!sDNSFirstForSingleWords) {
       rv = TryKeywordFixupForURIInfo(aFixupInfo->mOriginalInput, aFixupInfo,
                                      aPostData);
     }
   }
   // ... or if there is no question mark or colon, and there is either no
   // dot, or exactly 1 and it is the first or last character of the input:
   else if ((firstDotLoc == uint32_t(kNotFound) ||
             (foundDots == 1 && (firstDotLoc == 0 ||
                                 firstDotLoc == aURIString.Length() - 1))) &&
            firstColonLoc == uint32_t(kNotFound) &&
            firstQMarkLoc == uint32_t(kNotFound)) {
-    if (isValidAsciiHost && IsDomainWhitelisted(asciiHost, firstDotLoc)) {
+    if (isValidHost && IsDomainWhitelisted(asciiHost, firstDotLoc)) {
       return NS_OK;
     }
 
     // ... unless there are no dots, and a slash, and alpha characters, and
     // this is a valid host:
     if (firstDotLoc == uint32_t(kNotFound) &&
         lastSlashLoc != uint32_t(kNotFound) &&
-        hasAsciiAlpha && isValidAsciiHost) {
+        hasAsciiAlpha && isValidHost) {
       return NS_OK;
     }
 
     // If we get here, we don't have a valid URI, or we did but the
     // host is not whitelisted, so we do a keyword search *anyway*:
     rv = TryKeywordFixupForURIInfo(aFixupInfo->mOriginalInput, aFixupInfo,
                                    aPostData);
   }
--- a/docshell/test/browser/browser_bug1347823.js
+++ b/docshell/test/browser/browser_bug1347823.js
@@ -47,18 +47,20 @@ add_task(async function testExpiredCache
         content.document.body.textContent = "modified";
       });
 
       // Load a random page.
       BrowserTestUtils.loadURI(browser, "data:text/html;charset=utf-8,page2");
       await BrowserTestUtils.browserLoaded(browser);
 
       // Wait for 3 times of expiration timeout, hopefully it's evicted...
-      await new Promise(resolve => {
-        setTimeout(resolve, 3000);
+      await ContentTask.spawn(browser, null, () => {
+        return new Promise(resolve => {
+          content.setTimeout(resolve, 3000);
+        });
       });
 
       // Go back and verify text content.
       let awaitPageShow = BrowserTestUtils.waitForContentEvent(browser, "pageshow");
       browser.goBack();
       await awaitPageShow;
       await ContentTask.spawn(browser, null, () => {
         is(content.document.body.textContent, "page1");
--- a/docshell/test/unit/test_nsDefaultURIFixup_info.js
+++ b/docshell/test/unit/test_nsDefaultURIFixup_info.js
@@ -477,17 +477,22 @@ var testcases = [ {
   }, {
     input: "localhost:8080",
     fixedURI: "http://localhost:8080/",
     protocolChange: true,
   }, {
     input: "plonk:8080",
     fixedURI: "http://plonk:8080/",
     protocolChange: true,
-  }
+  }, {
+    input: "\u10E0\u10D4\u10D2\u10D8\u10E1\u10E2\u10E0\u10D0\u10EA\u10D8\u10D0.\u10D2\u10D4",
+    fixedURI: "http://xn--lodaehvb5cdik4g.xn--node/",
+    alternateURI: "http://www.xn--lodaehvb5cdik4g.xn--node/",
+    protocolChange: true,
+  },
 ];
 
 if (Services.appinfo.OS.toLowerCase().startsWith("win")) {
   testcases.push({
     input: "C:\\some\\file.txt",
     fixedURI: "file:///C:/some/file.txt",
     protocolChange: true,
   });
--- a/dom/bindings/Bindings.conf
+++ b/dom/bindings/Bindings.conf
@@ -678,16 +678,22 @@ DOMInterfaces = {
 },
 
 'PeerConnectionImpl': {
     'nativeType': 'mozilla::PeerConnectionImpl',
     'headerFile': 'PeerConnectionImpl.h',
     'wrapperCache': False
 },
 
+'TransceiverImpl': {
+    'nativeType': 'mozilla::TransceiverImpl',
+    'headerFile': 'TransceiverImpl.h',
+    'wrapperCache': False
+},
+
 'Plugin': {
     'headerFile' : 'nsPluginArray.h',
     'nativeType': 'nsPluginElement',
 },
 
 'PluginArray': {
     'nativeType': 'nsPluginArray',
 },
--- a/dom/media/BaseMediaResource.h
+++ b/dom/media/BaseMediaResource.h
@@ -59,17 +59,17 @@ public:
   // since we don't expect to resume again any time soon. Otherwise we
   // may resume again soon so resources should be held for a little
   // while.
   virtual void Suspend(bool aCloseImmediately) = 0;
 
   // Resume any downloads that have been suspended.
   virtual void Resume() = 0;
 
-  // The mode is initially MODE_PLAYBACK.
+  // The mode is initially MODE_METADATA.
   virtual void SetReadMode(MediaCacheStream::ReadMode aMode) = 0;
 
   // Returns true if the resource can be seeked to unbuffered ranges, i.e.
   // for an HTTP network stream this returns true if HTTP1.1 Byte Range
   // requests are supported by the connection/server.
   virtual bool IsTransportSeekable() = 0;
 
   // Get the current principal for the channel
--- a/dom/media/ChannelMediaDecoder.cpp
+++ b/dom/media/ChannelMediaDecoder.cpp
@@ -250,19 +250,16 @@ ChannelMediaDecoder::Load(nsIChannel* aC
   nsresult rv = MediaShutdownManager::Instance().Register(this);
   if (NS_WARN_IF(NS_FAILED(rv))) {
     return rv;
   }
 
   rv = mResource->Open(aStreamListener);
   NS_ENSURE_SUCCESS(rv, rv);
 
-  // Set mode to METADATA since we are about to read metadata.
-  mResource->SetReadMode(MediaCacheStream::MODE_METADATA);
-
   SetStateMachine(CreateStateMachine());
   NS_ENSURE_TRUE(GetStateMachine(), NS_ERROR_FAILURE);
 
   return InitializeStateMachine();
 }
 
 nsresult
 ChannelMediaDecoder::Load(BaseMediaResource* aOriginal)
--- a/dom/media/MediaCache.cpp
+++ b/dom/media/MediaCache.cpp
@@ -422,16 +422,18 @@ protected:
   // Guess the duration until the next incoming data on aStream will be used
   TimeDuration PredictNextUseForIncomingData(AutoLock&,
                                              MediaCacheStream* aStream);
 
   // Truncate the file and index array if there are free blocks at the
   // end
   void Truncate();
 
+  void FlushInternal(AutoLock&);
+
   // There is at most one file-backed media cache.
   // It is owned by all MediaCacheStreams that use it.
   // This is a raw pointer set by GetMediaCache(), and reset by ~MediaCache(),
   // both on the main thread; and is not accessed anywhere else.
   static MediaCache* gMediaCache;
 
   // This member is main-thread only. It's used to allocate unique
   // resource IDs to streams.
@@ -500,17 +502,16 @@ MediaCacheStream::MediaCacheStream(Chann
   : mMediaCache(nullptr)
   , mClient(aClient)
   , mIsTransportSeekable(false)
   , mCacheSuspended(false)
   , mChannelEnded(false)
   , mStreamOffset(0)
   , mPlaybackBytesPerSecond(10000)
   , mPinCount(0)
-  , mCurrentMode(MODE_PLAYBACK)
   , mMetadataInPartialBlockBuffer(false)
   , mIsPrivateBrowsing(aIsPrivateBrowsing)
 {
 }
 
 size_t MediaCacheStream::SizeOfExcludingThis(
                                 MallocSizeOf aMallocSizeOf) const
 {
@@ -690,33 +691,42 @@ MediaCacheStream::BlockList::NotifyBlock
   if (e2) {
     e2 = mEntries.PutEntry(aBlockIndex1);
     e2->mNextBlock = e2Next;
     e2->mPrevBlock = e2Prev;
   }
 }
 
 void
-MediaCache::Flush()
+MediaCache::FlushInternal(AutoLock& aLock)
 {
-  NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
-  AutoLock lock(mMonitor);
-
   for (uint32_t blockIndex = 0; blockIndex < mIndex.Length(); ++blockIndex) {
-    FreeBlock(lock, blockIndex);
+    FreeBlock(aLock, blockIndex);
   }
 
   // Truncate index array.
   Truncate();
   NS_ASSERTION(mIndex.Length() == 0, "Blocks leaked?");
   // Reset block cache to its pristine state.
   mBlockCache->Flush();
 }
 
 void
+MediaCache::Flush()
+{
+  MOZ_ASSERT(NS_IsMainThread());
+  nsCOMPtr<nsIRunnable> r = NS_NewRunnableFunction(
+    "MediaCache::Flush", [self = RefPtr<MediaCache>(this)]() {
+      AutoLock lock(self->mMonitor);
+      self->FlushInternal(lock);
+    });
+  sThread->Dispatch(r.forget());
+}
+
+void
 MediaCache::CloseStreamsForPrivateBrowsing()
 {
   MOZ_ASSERT(NS_IsMainThread());
   for (MediaCacheStream* s : mStreams) {
     if (s->mIsPrivateBrowsing) {
       s->mClient->Close();
     }
   }
@@ -2495,22 +2505,26 @@ MediaCacheStream::GetNextCachedDataInter
 
   NS_NOTREACHED("Should return in loop");
   return -1;
 }
 
 void
 MediaCacheStream::SetReadMode(ReadMode aMode)
 {
-  // TODO: Assert non-main thread.
-  AutoLock lock(mMediaCache->Monitor());
-  if (aMode == mCurrentMode)
-    return;
-  mCurrentMode = aMode;
-  mMediaCache->QueueUpdate(lock);
+  nsCOMPtr<nsIRunnable> r = NS_NewRunnableFunction(
+    "MediaCacheStream::SetReadMode",
+    [ this, client = RefPtr<ChannelMediaResource>(mClient), aMode ]() {
+      AutoLock lock(mMediaCache->Monitor());
+      if (!mClosed && mCurrentMode != aMode) {
+        mCurrentMode = aMode;
+        mMediaCache->QueueUpdate(lock);
+      }
+    });
+  OwnerThread()->Dispatch(r.forget());
 }
 
 void
 MediaCacheStream::SetPlaybackRate(uint32_t aBytesPerSecond)
 {
   MOZ_ASSERT(aBytesPerSecond > 0, "Zero playback rate not allowed");
 
   nsCOMPtr<nsIRunnable> r = NS_NewRunnableFunction(
--- a/dom/media/MediaCache.h
+++ b/dom/media/MediaCache.h
@@ -518,17 +518,17 @@ private:
   // corresponding Unpin
   uint32_t          mPinCount;
   // True if CacheClientNotifyDataEnded has been called for this stream.
   bool              mDidNotifyDataEnded = false;
   // The status used when we did CacheClientNotifyDataEnded. Only valid
   // when mDidNotifyDataEnded is true.
   nsresult          mNotifyDataEndedStatus;
   // The last reported read mode
-  ReadMode          mCurrentMode;
+  ReadMode mCurrentMode = MODE_METADATA;
   // True if some data in mPartialBlockBuffer has been read as metadata
   bool              mMetadataInPartialBlockBuffer;
   // The load ID of the current channel. Used to check whether the data is
   // coming from an old channel and should be discarded.
   uint32_t mLoadID = 0;
   // The seek target initiated by MediaCache. -1 if no seek is going on.
   int64_t mSeekTarget = -1;
 
--- a/dom/media/PeerConnection.js
+++ b/dom/media/PeerConnection.js
@@ -20,28 +20,30 @@ const PC_CONTRACT = "@mozilla.org/dom/pe
 const PC_OBS_CONTRACT = "@mozilla.org/dom/peerconnectionobserver;1";
 const PC_ICE_CONTRACT = "@mozilla.org/dom/rtcicecandidate;1";
 const PC_SESSION_CONTRACT = "@mozilla.org/dom/rtcsessiondescription;1";
 const PC_MANAGER_CONTRACT = "@mozilla.org/dom/peerconnectionmanager;1";
 const PC_STATS_CONTRACT = "@mozilla.org/dom/rtcstatsreport;1";
 const PC_STATIC_CONTRACT = "@mozilla.org/dom/peerconnectionstatic;1";
 const PC_SENDER_CONTRACT = "@mozilla.org/dom/rtpsender;1";
 const PC_RECEIVER_CONTRACT = "@mozilla.org/dom/rtpreceiver;1";
+const PC_TRANSCEIVER_CONTRACT = "@mozilla.org/dom/rtptransceiver;1";
 const PC_COREQUEST_CONTRACT = "@mozilla.org/dom/createofferrequest;1";
 const PC_DTMF_SENDER_CONTRACT = "@mozilla.org/dom/rtcdtmfsender;1";
 
 const PC_CID = Components.ID("{bdc2e533-b308-4708-ac8e-a8bfade6d851}");
 const PC_OBS_CID = Components.ID("{d1748d4c-7f6a-4dc5-add6-d55b7678537e}");
 const PC_ICE_CID = Components.ID("{02b9970c-433d-4cc2-923d-f7028ac66073}");
 const PC_SESSION_CID = Components.ID("{1775081b-b62d-4954-8ffe-a067bbf508a7}");
 const PC_MANAGER_CID = Components.ID("{7293e901-2be3-4c02-b4bd-cbef6fc24f78}");
 const PC_STATS_CID = Components.ID("{7fe6e18b-0da3-4056-bf3b-440ef3809e06}");
 const PC_STATIC_CID = Components.ID("{0fb47c47-a205-4583-a9fc-cbadf8c95880}");
 const PC_SENDER_CID = Components.ID("{4fff5d46-d827-4cd4-a970-8fd53977440e}");
 const PC_RECEIVER_CID = Components.ID("{d974b814-8fde-411c-8c45-b86791b81030}");
+const PC_TRANSCEIVER_CID = Components.ID("{09475754-103a-41f5-a2d0-e1f27eb0b537}");
 const PC_COREQUEST_CID = Components.ID("{74b2122d-65a8-4824-aa9e-3d664cb75dc2}");
 const PC_DTMF_SENDER_CID = Components.ID("{3610C242-654E-11E6-8EC0-6D1BE389A607}");
 
 function logMsg(msg, file, line, flag, winID) {
   let scriptErrorClass = Cc["@mozilla.org/scripterror;1"];
   let scriptError = scriptErrorClass.createInstance(Ci.nsIScriptError);
   scriptError.initWithWindowID(msg, file, null, line, 0, flag,
                                "content javascript", winID);
@@ -143,19 +145,18 @@ class GlobalPCList {
       this.handleGMPCrash(data);
     }
   }
 
   observe(subject, topic, data) {
     let cleanupPcRef = function(pcref) {
       let pc = pcref.get();
       if (pc) {
-        pc._pc.close();
-        delete pc._observer;
-        pc._pc = null;
+        pc._suppressEvents = true;
+        pc.close();
       }
     };
 
     let cleanupWinId = function(list, winID) {
       if (list.hasOwnProperty(winID)) {
         list[winID].forEach(cleanupPcRef);
         delete list[winID];
       }
@@ -342,18 +343,18 @@ setupPrototype(RTCStatsReport, {
         "candidate-pair": "candidatepair",
         "local-candidate": "localcandidate",
         "remote-candidate": "remotecandidate"
   }
 });
 
 class RTCPeerConnection {
   constructor() {
-    this._senders = [];
-    this._receivers = [];
+    this._receiveStreams = new Map();
+    this._transceivers = [];
 
     this._pc = null;
     this._closed = false;
 
     this._localType = null;
     this._remoteType = null;
     // http://rtcweb-wg.github.io/jsep/#rfc.section.4.1.9
     // canTrickle == null means unknown; when a remote description is received it
@@ -584,16 +585,28 @@ class RTCPeerConnection {
 
     try {
       wrapCallback(onSucc)(await func());
     } catch (e) {
       wrapCallback(onErr)(e);
     }
   }
 
+  // This implements the fairly common "Queue a task" logic
+  async _queueTaskWithClosedCheck(func) {
+    return new this._win.Promise(resolve => {
+      Services.tm.dispatchToMainThread({ run() {
+        if (!this._closed) {
+          func();
+          resolve();
+        }
+      }});
+    });
+  }
+
   /**
    * An RTCConfiguration may look like this:
    *
    * { "iceServers": [ { urls: "stun:stun.example.org", },
    *                   { url: "stun:stun.example.org", }, // deprecated version
    *                   { urls: ["turn:turn1.x.org", "turn:turn2.x.org"],
    *                     username:"jib", credential:"mypass"} ] }
    *
@@ -685,17 +698,17 @@ class RTCPeerConnection {
       throw new this._win.DOMException("Peer connection is closed",
                                        "InvalidStateError");
     }
   }
 
   dispatchEvent(event) {
     // PC can close while events are firing if there is an async dispatch
     // in c++ land. But let through "closed" signaling and ice connection events.
-    if (!this._closed || this._inClose) {
+    if (!this._suppressEvents) {
       this.__DOM_IMPL__.dispatchEvent(event);
     }
   }
 
   // Log error message to web console and window.onerror, if present.
   logErrorAndCallOnError(e) {
     this.logMsg(e.message, e.fileName, e.lineNumber, Ci.nsIScriptError.exceptionFlag);
 
@@ -752,21 +765,70 @@ class RTCPeerConnection {
                             set(h) {
                               this.logWarning(name + " is deprecated! " + msg);
                               return this.setEH(name, h);
                             }
                           });
   }
 
   createOffer(optionsOrOnSucc, onErr, options) {
-    // This entry-point handles both new and legacy call sig. Decipher which one
+    let onSuccess = null;
     if (typeof optionsOrOnSucc == "function") {
-      return this._legacy(optionsOrOnSucc, onErr, () => this._createOffer(options));
+      onSuccess = optionsOrOnSucc;
+    } else {
+      options = optionsOrOnSucc;
+    }
+
+    // Spec language implies that this needs to happen as if it were called
+    // before createOffer, so we do this as early as possible.
+    this._ensureTransceiversForOfferToReceive(options);
+
+    // This entry-point handles both new and legacy call sig. Decipher which one
+    if (onSuccess) {
+      return this._legacy(onSuccess, onErr, () => this._createOffer(options));
     }
-    return this._async(() => this._createOffer(optionsOrOnSucc));
+
+    return this._async(() => this._createOffer(options));
+  }
+
+  // Ensures that we have at least one transceiver of |kind| that is
+  // configured to receive. It will create one if necessary.
+  _ensureOfferToReceive(kind) {
+    let hasRecv = this._transceivers.some(
+      transceiver =>
+        transceiver.getKind() == kind &&
+        (transceiver.direction == "sendrecv" || transceiver.direction == "recvonly") &&
+        !transceiver.stopped);
+
+    if (!hasRecv) {
+      this._addTransceiverNoEvents(kind, {direction: "recvonly"});
+    }
+  }
+
+  // Handles offerToReceiveAudio/Video
+  _ensureTransceiversForOfferToReceive(options) {
+    if (options.offerToReceiveVideo) {
+      this._ensureOfferToReceive("video");
+    }
+
+    if (options.offerToReceiveVideo === false) {
+      this.logWarning("offerToReceiveVideo: false is ignored now. If you " +
+                      "want to disallow a recv track, use " +
+                      "RTCRtpTransceiver.direction");
+    }
+
+    if (options.offerToReceiveAudio) {
+      this._ensureOfferToReceive("audio");
+    }
+
+    if (options.offerToReceiveAudio === false) {
+      this.logWarning("offerToReceiveAudio: false is ignored now. If you " +
+                      "want to disallow a recv track, use " +
+                      "RTCRtpTransceiver.direction");
+    }
   }
 
   async _createOffer(options) {
     this._checkClosed();
     let origin = Cu.getWebIDLCallerPrincipal().origin;
     return this._chain(async () => {
       let haveAssertion;
       if (this._localIdp.enabled) {
@@ -1061,118 +1123,209 @@ class RTCPeerConnection {
     stream.getTracks().forEach(track => this.addTrack(track, stream));
   }
 
   addTrack(track, stream) {
     if (stream.currentTime === undefined) {
       throw new this._win.DOMException("invalid stream.", "InvalidParameterError");
     }
     this._checkClosed();
-    this._senders.forEach(sender => {
-      if (sender.track == track) {
-        throw new this._win.DOMException("already added.",
-                                         "InvalidParameterError");
-      }
+
+    if (this._transceivers.some(
+          transceiver => transceiver.sender.track == track)) {
+      throw new this._win.DOMException("This track is already set on a sender.",
+                                       "InvalidAccessError");
+    }
+
+    let transceiver = this._transceivers.find(transceiver => {
+      return transceiver.sender.track == null &&
+             transceiver.getKind() == track.kind &&
+             !transceiver.stopped &&
+             !transceiver.hasBeenUsedToSend();
     });
-    this._impl.addTrack(track, stream);
-    let sender = this._win.RTCRtpSender._create(this._win,
-                                                new RTCRtpSender(this, track,
-                                                                 stream));
-    this._senders.push(sender);
-    return sender;
+
+    if (transceiver) {
+      transceiver.sender.setTrack(track);
+      transceiver.sender.setStreams([stream]);
+      if (transceiver.direction == "recvonly") {
+        transceiver.setDirectionInternal("sendrecv");
+      } else if (transceiver.direction == "inactive") {
+        transceiver.setDirectionInternal("sendonly");
+      }
+    } else {
+      transceiver = this._addTransceiverNoEvents(track, {
+        streams: [stream],
+        direction: "sendrecv"
+      });
+    }
+
+    transceiver.setAddTrackMagic();
+    transceiver.sync();
+    this.updateNegotiationNeeded();
+    return transceiver.sender;
   }
 
   removeTrack(sender) {
     this._checkClosed();
-    var i = this._senders.indexOf(sender);
-    if (i >= 0) {
-      this._senders.splice(i, 1);
-      this._impl.removeTrack(sender.track); // fires negotiation needed
+
+    sender.checkWasCreatedByPc(this.__DOM_IMPL__);
+
+    let transceiver =
+      this._transceivers.find(transceiver => transceiver.sender == sender);
+
+    // If the transceiver was removed due to rollback, let it slide.
+    if (!transceiver || !sender.track) {
+      return;
+    }
+
+    // TODO(bug 1401983): Move to TransceiverImpl?
+    this._impl.removeTrack(sender.track);
+
+    sender.setTrack(null);
+    if (transceiver.direction == "sendrecv") {
+      transceiver.setDirectionInternal("recvonly");
+    } else if (transceiver.direction == "sendonly") {
+      transceiver.setDirectionInternal("inactive");
     }
+
+    transceiver.sync();
+    this.updateNegotiationNeeded();
+  }
+
+  _addTransceiverNoEvents(sendTrackOrKind, init) {
+    let sendTrack = null;
+    let kind;
+    if (typeof(sendTrackOrKind) == "string") {
+      kind = sendTrackOrKind;
+      switch (kind) {
+        case "audio":
+        case "video":
+          break;
+        default:
+          throw new this._win.TypeError("Invalid media kind");
+      }
+    } else {
+      sendTrack = sendTrackOrKind;
+      kind = sendTrack.kind;
+    }
+
+    let transceiverImpl = this._impl.createTransceiverImpl(kind, sendTrack);
+    let transceiver = this._win.RTCRtpTransceiver._create(
+        this._win,
+        new RTCRtpTransceiver(this, transceiverImpl, init, kind, sendTrack));
+    transceiver.sync();
+    this._transceivers.push(transceiver);
+    return transceiver;
   }
 
-  _insertDTMF(sender, tones, duration, interToneGap) {
-    return this._impl.insertDTMF(sender.__DOM_IMPL__, tones, duration, interToneGap);
+  _onTransceiverNeeded(kind, transceiverImpl) {
+    let init = {direction: "recvonly"};
+    let transceiver = this._win.RTCRtpTransceiver._create(
+        this._win,
+        new RTCRtpTransceiver(this, transceiverImpl, init, kind, null));
+    transceiver.sync();
+    this._transceivers.push(transceiver);
+  }
+
+  addTransceiver(sendTrackOrKind, init) {
+    let transceiver = this._addTransceiverNoEvents(sendTrackOrKind, init);
+    this.updateNegotiationNeeded();
+    return transceiver;
+  }
+
+  _syncTransceivers() {
+    this._transceivers.forEach(transceiver => transceiver.sync());
+  }
+
+  updateNegotiationNeeded() {
+    if (this._closed || this.signalingState != "stable") {
+      return;
+    }
+
+    let negotiationNeeded = this._impl.checkNegotiationNeeded();
+    if (!negotiationNeeded) {
+      this._negotiationNeeded = false;
+      return;
+    }
+
+    if (this._negotiationNeeded) {
+      return;
+    }
+
+    this._negotiationNeeded = true;
+
+    this._queueTaskWithClosedCheck(() => {
+      if (this._negotiationNeeded) {
+        this.dispatchEvent(new this._win.Event("negotiationneeded"));
+      }
+    });
+  }
+
+  _getOrCreateStream(id) {
+    if (!this._receiveStreams.has(id)) {
+      let stream = new this._win.MediaStream();
+      stream.assignId(id);
+      // Legacy event, remove eventually
+      let ev = new this._win.MediaStreamEvent("addstream", { stream });
+      this.dispatchEvent(ev);
+      this._receiveStreams.set(id, stream);
+    }
+
+    return this._receiveStreams.get(id);
+  }
+
+  _insertDTMF(transceiverImpl, tones, duration, interToneGap) {
+    return this._impl.insertDTMF(transceiverImpl, tones, duration, interToneGap);
   }
 
   _getDTMFToneBuffer(sender) {
     return this._impl.getDTMFToneBuffer(sender.__DOM_IMPL__);
   }
 
-  async _replaceTrack(sender, withTrack) {
+  _replaceTrack(transceiverImpl, withTrack) {
     this._checkClosed();
-    return this._chain(() => new Promise((resolve, reject) => {
-      this._onReplaceTrackSender = sender;
-      this._onReplaceTrackWithTrack = withTrack;
-      this._onReplaceTrackSuccess = resolve;
-      this._onReplaceTrackFailure = reject;
-      this._impl.replaceTrack(sender.track, withTrack);
-    }));
-  }
-
-  _setParameters({ track }, parameters) {
-    if (!Services.prefs.getBoolPref("media.peerconnection.simulcast")) {
-      return;
-    }
-    // validate parameters input
-    var encodings = parameters.encodings || [];
-
-    encodings.reduce((uniqueRids, { rid, scaleResolutionDownBy }) => {
-      if (scaleResolutionDownBy < 1.0) {
-        throw new this._win.RangeError("scaleResolutionDownBy must be >= 1.0");
-      }
-      if (!rid && encodings.length > 1) {
-        throw new this._win.DOMException("Missing rid", "TypeError");
-      }
-      if (uniqueRids[rid]) {
-        throw new this._win.DOMException("Duplicate rid", "TypeError");
-      }
-      uniqueRids[rid] = true;
-      return uniqueRids;
-    }, {});
-
-    this._impl.setParameters(track, parameters);
-  }
-
-  _getParameters({ track }) {
-    if (!Services.prefs.getBoolPref("media.peerconnection.simulcast")) {
-      return null;
-    }
-    return this._impl.getParameters(track);
+    this._impl.replaceTrackNoRenegotiation(transceiverImpl, withTrack);
   }
 
   close() {
     if (this._closed) {
       return;
     }
     this._closed = true;
-    this._inClose = true;
     this.changeIceConnectionState("closed");
     this._localIdp.close();
     this._remoteIdp.close();
     this._impl.close();
-    this._inClose = false;
+    this._suppressEvents = true;
+    delete this._pc;
+    delete this._observer;
   }
 
   getLocalStreams() {
     this._checkClosed();
-    return this._impl.getLocalStreams();
+    let localStreams = new Set();
+    this._transceivers.forEach(transceiver => {
+      transceiver.sender.getStreams().forEach(stream => {
+        localStreams.add(stream);
+      });
+    });
+    return [...localStreams.values()];
   }
 
   getRemoteStreams() {
     this._checkClosed();
-    return this._impl.getRemoteStreams();
+    return [...this._receiveStreams.values()];
   }
 
   getSenders() {
-    return this._senders;
+    return this.getTransceivers().map(transceiver => transceiver.sender);
   }
 
   getReceivers() {
-    return this._receivers;
+    return this.getTransceivers().map(transceiver => transceiver.receiver);
   }
 
   mozAddRIDExtension(receiver, extensionId) {
     this._impl.addRIDExtension(receiver.track, extensionId);
   }
 
   mozAddRIDFilter(receiver, rid) {
     this._impl.addRIDFilter(receiver.track, rid);
@@ -1185,16 +1338,20 @@ class RTCPeerConnection {
   mozEnablePacketDump(level, type, sending) {
     this._impl.enablePacketDump(level, type, sending);
   }
 
   mozDisablePacketDump(level, type, sending) {
     this._impl.disablePacketDump(level, type, sending);
   }
 
+  getTransceivers() {
+    return this._transceivers;
+  }
+
   get localDescription() {
     this._checkClosed();
     let sdp = this._impl.localDescription;
     if (sdp.length == 0) {
       return null;
     }
     return new this._win.RTCSessionDescription({ type: this._localType, sdp });
   }
@@ -1322,19 +1479,27 @@ class RTCPeerConnection {
     if (maxPacketLifeTime) {
       type = Ci.IPeerConnection.kDataChannelPartialReliableTimed;
     } else if (maxRetransmits) {
       type = Ci.IPeerConnection.kDataChannelPartialReliableRexmit;
     } else {
       type = Ci.IPeerConnection.kDataChannelReliable;
     }
     // Synchronous since it doesn't block.
-    return this._impl.createDataChannel(label, protocol, type, ordered,
-                                        maxPacketLifeTime, maxRetransmits,
-                                        negotiated, id);
+    let dataChannel =
+      this._impl.createDataChannel(label, protocol, type, ordered,
+                                   maxPacketLifeTime, maxRetransmits,
+                                   negotiated, id);
+
+    // Spec says to only do this if this is the first DataChannel created,
+    // but the c++ code that does the "is negotiation needed" checking will
+    // only ever return true on the first one.
+    this.updateNegotiationNeeded();
+
+    return dataChannel;
   }
 }
 setupPrototype(RTCPeerConnection, {
   classID: PC_CID,
   contractID: PC_CONTRACT,
   QueryInterface: XPCOMUtils.generateQI([Ci.nsISupports,
                                          Ci.nsIDOMGlobalPropertyInitializer]),
   _actions: {
@@ -1390,20 +1555,26 @@ class PeerConnectionObserver {
     this._dompc._onCreateAnswerSuccess(sdp);
   }
 
   onCreateAnswerError(code, message) {
     this._dompc._onCreateAnswerFailure(this.newError(message, code));
   }
 
   onSetLocalDescriptionSuccess() {
+    this._dompc._syncTransceivers();
+    this._negotiationNeeded = false;
+    this._dompc.updateNegotiationNeeded();
     this._dompc._onSetLocalDescriptionSuccess();
   }
 
   onSetRemoteDescriptionSuccess() {
+    this._dompc._syncTransceivers();
+    this._negotiationNeeded = false;
+    this._dompc.updateNegotiationNeeded();
     this._dompc._onSetRemoteDescriptionSuccess();
   }
 
   onSetLocalDescriptionError(code, message) {
     this._localType = null;
     this._dompc._onSetLocalDescriptionFailure(this.newError(message, code));
   }
 
@@ -1430,20 +1601,16 @@ class PeerConnectionObserver {
     } else {
       candidate = null;
 
     }
     this.dispatchEvent(new win.RTCPeerConnectionIceEvent("icecandidate",
                                                          { candidate }));
   }
 
-  onNegotiationNeeded() {
-    this.dispatchEvent(new this._win.Event("negotiationneeded"));
-  }
-
   // This method is primarily responsible for updating iceConnectionState.
   // This state is defined in the WebRTC specification as follows:
   //
   // iceConnectionState:
   // -------------------
   //   new           Any of the RTCIceTransports are in the new state and none
   //                 of them are in the checking, failed or disconnected state.
   //
@@ -1553,81 +1720,85 @@ class PeerConnectionObserver {
                               pc._onGetStatsIsLegacy);
     pc._onGetStatsSuccess(webidlobj);
   }
 
   onGetStatsError(code, message) {
     this._dompc._onGetStatsFailure(this.newError(message, code));
   }
 
-  onAddStream(stream) {
-    let ev = new this._dompc._win.MediaStreamEvent("addstream", { stream });
-    this.dispatchEvent(ev);
-  }
-
   onRemoveStream(stream) {
     this.dispatchEvent(new this._dompc._win.MediaStreamEvent("removestream",
                                                              { stream }));
   }
 
-  onAddTrack(track, streams) {
+  _getTransceiverWithRecvTrack(webrtcTrackId) {
+    return this._dompc.getTransceivers().find(
+        transceiver => transceiver.remoteTrackIdIs(webrtcTrackId));
+  }
+
+  onTrack(webrtcTrackId, streamIds) {
     let pc = this._dompc;
-    let receiver = pc._win.RTCRtpReceiver._create(pc._win,
-                                                  new RTCRtpReceiver(pc,
-                                                                     track));
-    pc._receivers.push(receiver);
-    let ev = new pc._win.RTCTrackEvent("track", { receiver, track, streams });
+    let matchingTransceiver = this._getTransceiverWithRecvTrack(webrtcTrackId);
+
+    // Get or create MediaStreams, and add the new track to them.
+    let streams = streamIds.map(id => this._dompc._getOrCreateStream(id));
+
+    streams.forEach(stream => {
+      stream.addTrack(matchingTransceiver.receiver.track);
+      // Adding tracks from JS does not result in the stream getting
+      // onaddtrack, so we need to do that here. The mediacapture spec says
+      // this needs to be queued, also.
+      pc._queueTaskWithClosedCheck(() => {
+        stream.dispatchEvent(
+            new pc._win.MediaStreamTrackEvent(
+              "addtrack", { track: matchingTransceiver.receiver.track }));
+      });
+    });
+
+
+    let ev = new pc._win.RTCTrackEvent("track", {
+      receiver: matchingTransceiver.receiver,
+      track: matchingTransceiver.receiver.track,
+      streams,
+      transceiver: matchingTransceiver });
     this.dispatchEvent(ev);
 
     // Fire legacy event as well for a little bit.
-    ev = new pc._win.MediaStreamTrackEvent("addtrack", { track });
+    ev = new pc._win.MediaStreamTrackEvent("addtrack",
+        { track: matchingTransceiver.receiver.track });
     this.dispatchEvent(ev);
   }
 
-  onRemoveTrack(track) {
-    let pc = this._dompc;
-    let i = pc._receivers.findIndex(receiver => receiver.track == track);
-    if (i >= 0) {
-      pc._receivers.splice(i, 1);
-    }
-  }
-
-  onReplaceTrackSuccess() {
-    var pc = this._dompc;
-    pc._onReplaceTrackSender.track = pc._onReplaceTrackWithTrack;
-    pc._onReplaceTrackWithTrack = null;
-    pc._onReplaceTrackSender = null;
-    pc._onReplaceTrackSuccess();
-  }
-
-  onReplaceTrackError(code, message) {
-    var pc = this._dompc;
-    pc._onReplaceTrackWithTrack = null;
-    pc._onReplaceTrackSender = null;
-    pc._onReplaceTrackFailure(this.newError(message, code));
+  onTransceiverNeeded(kind, transceiverImpl) {
+    this._dompc._onTransceiverNeeded(kind, transceiverImpl);
   }
 
   notifyDataChannel(channel) {
     this.dispatchEvent(new this._dompc._win.RTCDataChannelEvent("datachannel",
                                                                 { channel }));
   }
 
-  onDTMFToneChange(trackId, tone) {
+  onDTMFToneChange(track, tone) {
     var pc = this._dompc;
-    var sender = pc._senders.find(({track}) => track.id == trackId);
+    var sender = pc.getSenders().find(sender => sender.track == track);
     sender.dtmf.dispatchEvent(new pc._win.RTCDTMFToneChangeEvent("tonechange",
                                                                  { tone }));
   }
 
   onPacket(level, type, sending, packet) {
     var pc = this._dompc;
     if (pc._onPacket) {
       pc._onPacket(level, type, sending, packet);
     }
   }
+
+  syncTransceivers() {
+    this._dompc._syncTransceivers();
+  }
 }
 setupPrototype(PeerConnectionObserver, {
   classID: PC_OBS_CID,
   contractID: PC_OBS_CONTRACT,
   QueryInterface: XPCOMUtils.generateQI([Ci.nsISupports,
                                          Ci.nsIDOMGlobalPropertyInitializer])
 });
 
@@ -1662,87 +1833,352 @@ class RTCDTMFSender {
   }
 
   set ontonechange(handler) {
     this.__DOM_IMPL__.setEventHandler("ontonechange", handler);
   }
 
   insertDTMF(tones, duration, interToneGap) {
     this._sender._pc._checkClosed();
-
-    if (this._sender._pc._senders.indexOf(this._sender.__DOM_IMPL__) == -1) {
-      throw new this._sender._pc._win.DOMException("RTCRtpSender is stopped",
-                                                   "InvalidStateError");
-    }
-
-    duration = Math.max(40, Math.min(duration, 6000));
-    if (interToneGap < 30) interToneGap = 30;
-
-    tones = tones.toUpperCase();
-
-    if (tones.match(/[^0-9A-D#*,]/)) {
-      throw new this._sender._pc._win.DOMException("Invalid DTMF characters",
-                                                   "InvalidCharacterError");
-    }
-
-    this._sender._pc._insertDTMF(this._sender, tones, duration, interToneGap);
+    this._sender._transceiver.insertDTMF(tones, duration, interToneGap);
   }
 }
 setupPrototype(RTCDTMFSender, {
   classID: PC_DTMF_SENDER_CID,
   contractID: PC_DTMF_SENDER_CONTRACT,
   QueryInterface: XPCOMUtils.generateQI([Ci.nsISupports])
 });
 
 class RTCRtpSender {
-  constructor(pc, track, stream) {
-    let dtmf = pc._win.RTCDTMFSender._create(pc._win, new RTCDTMFSender(this));
-    Object.assign(this, { _pc: pc, track, _stream: stream, dtmf });
+  constructor(pc, transceiverImpl, transceiver, track, streams) {
+    let dtmf = pc._win.RTCDTMFSender._create(
+        pc._win, new RTCDTMFSender(this));
+
+    Object.assign(this, {
+      _pc: pc,
+      _transceiverImpl: transceiverImpl,
+      _transceiver: transceiver,
+      track,
+      _streams: streams,
+      dtmf });
   }
 
   replaceTrack(withTrack) {
-    return this._pc._async(() => this._pc._replaceTrack(this, withTrack));
+    // async functions in here return a chrome promise, which is not something
+    // content can use. This wraps that promise in something content can use.
+    return this._pc._win.Promise.resolve(this._replaceTrack(withTrack));
+  }
+
+  async _replaceTrack(withTrack) {
+    this._pc._checkClosed();
+
+    if (this._transceiver.stopped) {
+      throw new this._pc._win.DOMException(
+          "Cannot call replaceTrack when transceiver is stopped",
+          "InvalidStateError");
+    }
+
+    if (withTrack && (withTrack.kind != this._transceiver.getKind())) {
+      throw new this._pc._win.DOMException(
+          "Cannot replaceTrack with a different kind!",
+          "TypeError");
+    }
+
+    // Updates the track on the MediaPipeline; this is needed whether or not
+    // we've associated this transceiver, the spec language notwithstanding.
+    // Synchronous, and will throw on failure.
+    this._pc._replaceTrack(this._transceiverImpl, withTrack);
+
+    let setTrack = () => {
+      this.track = withTrack;
+      this._transceiver.sync();
+    };
+
+    // Spec is a little weird here; we only queue if the transceiver was
+    // associated, otherwise we update the track synchronously.
+    if (this._transceiver.mid == null) {
+      setTrack();
+    } else {
+      // We're supposed to queue a task if the transceiver is associated
+      await this._pc._queueTaskWithClosedCheck(setTrack);
+    }
   }
 
   setParameters(parameters) {
-    return this._pc._win.Promise.resolve()
-      .then(() => this._pc._setParameters(this, parameters));
+    return this._pc._win.Promise.resolve(this._setParameters(parameters));
+  }
+
+  async _setParameters(parameters) {
+    this._pc._checkClosed();
+
+    if (this._transceiver.stopped) {
+      throw new this._pc._win.DOMException(
+          "This sender's transceiver is stopped", "InvalidStateError");
+    }
+
+    if (!Services.prefs.getBoolPref("media.peerconnection.simulcast")) {
+      return;
+    }
+
+    parameters.encodings = parameters.encodings || [];
+
+    parameters.encodings.reduce((uniqueRids, { rid, scaleResolutionDownBy }) => {
+      if (scaleResolutionDownBy < 1.0) {
+        throw new this._pc._win.RangeError("scaleResolutionDownBy must be >= 1.0");
+      }
+      if (!rid && parameters.encodings.length > 1) {
+        throw new this._pc._win.DOMException("Missing rid", "TypeError");
+      }
+      if (uniqueRids[rid]) {
+        throw new this._pc._win.DOMException("Duplicate rid", "TypeError");
+      }
+      uniqueRids[rid] = true;
+      return uniqueRids;
+    }, {});
+
+    // TODO(bug 1401592): transaction ids, timing changes
+
+    await this._pc._queueTaskWithClosedCheck(() => {
+      this.parameters = parameters;
+      this._transceiver.sync();
+    });
   }
 
   getParameters() {
-    return this._pc._getParameters(this);
+    // TODO(bug 1401592): transaction ids
+
+    // All the other stuff that the spec says to update is handled when
+    // transceivers are synced.
+    return this.parameters;
+  }
+
+  setStreams(streams) {
+    this._streams = streams;
+  }
+
+  getStreams() {
+    return this._streams;
+  }
+
+  setTrack(track) {
+    this.track = track;
   }
 
   getStats() {
     return this._pc._async(
       async () => this._pc._getStats(this.track));
   }
+
+  checkWasCreatedByPc(pc) {
+    if (pc != this._pc.__DOM_IMPL__) {
+      throw new this._pc._win.DOMException(
+          "This sender was not created by this PeerConnection",
+          "InvalidAccessError");
+    }
+  }
 }
 setupPrototype(RTCRtpSender, {
   classID: PC_SENDER_CID,
   contractID: PC_SENDER_CONTRACT,
   QueryInterface: XPCOMUtils.generateQI([Ci.nsISupports])
 });
 
 class RTCRtpReceiver {
-  constructor(pc, track) {
-    Object.assign(this, { _pc: pc, track });
+  constructor(pc, transceiverImpl) {
+    // We do not set the track here; that is done when _transceiverImpl is set
+    Object.assign(this,
+        {
+          _pc: pc,
+          _transceiverImpl: transceiverImpl,
+          track: transceiverImpl.getReceiveTrack()
+        });
   }
 
+  // TODO(bug 1401983): Create a getStats binding on TransceiverImpl, and use
+  // that here.
   getStats() {
     return this._pc._async(
       async () => this._pc.getStats(this.track));
   }
 }
 setupPrototype(RTCRtpReceiver, {
   classID: PC_RECEIVER_CID,
   contractID: PC_RECEIVER_CONTRACT,
   QueryInterface: XPCOMUtils.generateQI([Ci.nsISupports])
 });
 
+class RTCRtpTransceiver {
+  constructor(pc, transceiverImpl, init, kind, sendTrack) {
+    let receiver = pc._win.RTCRtpReceiver._create(
+        pc._win, new RTCRtpReceiver(pc, transceiverImpl, kind));
+    let streams = (init && init.streams) || [];
+    let sender = pc._win.RTCRtpSender._create(
+        pc._win, new RTCRtpSender(pc, transceiverImpl, this, sendTrack, streams));
+
+    let direction = (init && init.direction) || "sendrecv";
+    Object.assign(this,
+        {
+          _pc: pc,
+          mid: null,
+          sender,
+          receiver,
+          stopped: false,
+          _direction: direction,
+          currentDirection: null,
+          _remoteTrackId: null,
+          addTrackMagic: false,
+          _hasBeenUsedToSend: false,
+          // the receiver starts out without a track, so record this here
+          _kind: kind,
+          _transceiverImpl: transceiverImpl
+        });
+  }
+
+  set direction(direction) {
+    this._pc._checkClosed();
+
+    if (this.stopped) {
+      throw new this._pc._win.DOMException("Transceiver is stopped!",
+                                           "InvalidStateError");
+    }
+
+    if (this._direction == direction) {
+      return;
+    }
+
+    this._direction = direction;
+    this.sync();
+    this._pc.updateNegotiationNeeded();
+  }
+
+  get direction() {
+    return this._direction;
+  }
+
+  setDirectionInternal(direction) {
+    this._direction = direction;
+  }
+
+  stop() {
+    if (this.stopped) {
+      return;
+    }
+
+    this._pc._checkClosed();
+
+    this.setStopped();
+    this.sync();
+    this._pc.updateNegotiationNeeded();
+  }
+
+  setStopped() {
+    this.stopped = true;
+    this.currentDirection = null;
+  }
+
+  remove() {
+    var index = this._pc._transceivers.indexOf(this.__DOM_IMPL__);
+    if (index != -1) {
+      this._pc._transceivers.splice(index, 1);
+    }
+  }
+
+  getKind() {
+    return this._kind;
+  }
+
+  hasBeenUsedToSend() {
+    return this._hasBeenUsedToSend;
+  }
+
+  setRemoteTrackId(webrtcTrackId) {
+    this._remoteTrackId = webrtcTrackId;
+  }
+
+  remoteTrackIdIs(webrtcTrackId) {
+    return this._remoteTrackId == webrtcTrackId;
+  }
+
+  getRemoteTrackId() {
+    return this._remoteTrackId;
+  }
+
+  setAddTrackMagic() {
+    this.addTrackMagic = true;
+  }
+
+  sync() {
+    if (this._syncing) {
+      throw new DOMException("Reentrant sync! This is a bug!", "InternalError");
+    }
+    this._syncing = true;
+    this._transceiverImpl.syncWithJS(this.__DOM_IMPL__);
+    this._syncing = false;
+  }
+
+  // Used by _transceiverImpl.syncWithJS, don't call sync again!
+  setCurrentDirection(direction) {
+    if (this.stopped) {
+      return;
+    }
+
+    switch (direction) {
+      case "sendrecv":
+      case "sendonly":
+        this._hasBeenUsedToSend = true;
+        break;
+      default:
+    }
+
+    this.currentDirection = direction;
+  }
+
+  // Used by _transceiverImpl.syncWithJS, don't call sync again!
+  setMid(mid) {
+    this.mid = mid;
+  }
+
+  // Used by _transceiverImpl.syncWithJS, don't call sync again!
+  unsetMid() {
+    this.mid = null;
+  }
+
+  insertDTMF(tones, duration, interToneGap) {
+    if (this.stopped) {
+      throw new this._pc._win.DOMException("Transceiver is stopped!",
+                                           "InvalidStateError");
+    }
+
+    if (!this.sender.track) {
+      throw new this._pc._win.DOMException("RTCRtpSender has no track",
+                                           "InvalidStateError");
+    }
+
+    duration = Math.max(40, Math.min(duration, 6000));
+    if (interToneGap < 30) interToneGap = 30;
+
+    tones = tones.toUpperCase();
+
+    if (tones.match(/[^0-9A-D#*,]/)) {
+      throw new this._pc._win.DOMException("Invalid DTMF characters",
+                                           "InvalidCharacterError");
+    }
+
+    // TODO (bug 1401983): Move this API to TransceiverImpl so we don't need the
+    // extra hops through RTCPeerConnection and PeerConnectionImpl
+    this._pc._insertDTMF(this._transceiverImpl, tones, duration, interToneGap);
+  }
+}
+
+setupPrototype(RTCRtpTransceiver, {
+  classID: PC_TRANSCEIVER_CID,
+  contractID: PC_TRANSCEIVER_CONTRACT,
+  QueryInterface: XPCOMUtils.generateQI([Ci.nsISupports])
+});
+
 class CreateOfferRequest {
   constructor(windowID, innerWindowID, callID, isSecure) {
     Object.assign(this, { windowID, innerWindowID, callID, isSecure });
   }
 }
 setupPrototype(CreateOfferRequest, {
   classID: PC_COREQUEST_CID,
   contractID: PC_COREQUEST_CONTRACT,
@@ -1753,12 +2189,13 @@ this.NSGetFactory = XPCOMUtils.generateN
   [GlobalPCList,
    RTCDTMFSender,
    RTCIceCandidate,
    RTCSessionDescription,
    RTCPeerConnection,
    RTCPeerConnectionStatic,
    RTCRtpReceiver,
    RTCRtpSender,
+   RTCRtpTransceiver,
    RTCStatsReport,
    PeerConnectionObserver,
    CreateOfferRequest]
 );
--- a/dom/media/mp3/MP3Demuxer.cpp
+++ b/dom/media/mp3/MP3Demuxer.cpp
@@ -536,17 +536,17 @@ MP3TrackDemuxer::FindNextFrame()
       MP3LOG("FindNext() EOS or exceeded maxSkippeableBytes without a frame");
       // This is not a valid MPEG audio stream or we've reached EOS, give up.
       break;
     }
 
     BufferReader reader(buffer, read);
     uint32_t bytesToSkip = 0;
     auto res = mParser.Parse(&reader, &bytesToSkip);
-    foundFrame = res.isOk() ? res.unwrap() : false;
+    foundFrame = res.unwrapOr(false);
     frameHeaderOffset =
       mOffset + reader.Offset() - FrameParser::FrameHeader::SIZE;
 
     // If we've found neither an MPEG frame header nor an ID3v2 tag,
     // the reader shouldn't have any bytes remaining.
     MOZ_ASSERT(foundFrame || bytesToSkip || !reader.Remaining());
 
     if (foundFrame && mParser.FirstFrame().Length() &&
--- a/dom/media/platforms/agnostic/bytestreams/H264.cpp
+++ b/dom/media/platforms/agnostic/bytestreams/H264.cpp
@@ -806,20 +806,20 @@ H264::GetFrameType(const mozilla::MediaR
 
   int nalLenSize = ((*aSample->mExtraData)[4] & 3) + 1;
 
   BufferReader reader(aSample->Data(), aSample->Size());
 
   while (reader.Remaining() >= nalLenSize) {
     uint32_t nalLen = 0;
     switch (nalLenSize) {
-      case 1: Unused << reader.ReadU8().map([&] (uint8_t x) mutable { return nalLen = x; }); break;
-      case 2: Unused << reader.ReadU16().map([&] (uint16_t x) mutable { return nalLen = x; }); break;
-      case 3: Unused << reader.ReadU24().map([&] (uint32_t x) mutable { return nalLen = x; }); break;
-      case 4: Unused << reader.ReadU32().map([&] (uint32_t x) mutable { return nalLen = x; }); break;
+      case 1: nalLen = reader.ReadU8().unwrapOr(0); break;
+      case 2: nalLen = reader.ReadU16().unwrapOr(0); break;
+      case 3: nalLen = reader.ReadU24().unwrapOr(0); break;
+      case 4: nalLen = reader.ReadU32().unwrapOr(0); break;
     }
     if (!nalLen) {
       continue;
     }
     const uint8_t* p = reader.Read(nalLen);
     if (!p) {
       return FrameType::INVALID;
     }
--- a/dom/media/tests/mochitest/head.js
+++ b/dom/media/tests/mochitest/head.js
@@ -314,16 +314,17 @@ function setupEnvironment() {
 
   var defaultMochitestPrefs = {
     'set': [
       ['media.peerconnection.enabled', true],
       ['media.peerconnection.identity.enabled', true],
       ['media.peerconnection.identity.timeout', 120000],
       ['media.peerconnection.ice.stun_client_maximum_transmits', 14],
       ['media.peerconnection.ice.trickle_grace_period', 30000],
+      ['media.peerconnection.remoteTrackId.enabled', true],
       ['media.navigator.permission.disabled', true],
       ['media.navigator.streams.fake', FAKE_ENABLED],
       ['media.getusermedia.screensharing.enabled', true],
       ['media.getusermedia.audiocapture.enabled', true],
       ['media.recorder.audio_node.enabled', true]
     ]
   };
 
--- a/dom/media/tests/mochitest/mochitest.ini
+++ b/dom/media/tests/mochitest/mochitest.ini
@@ -93,16 +93,18 @@ skip-if = toolkit == 'android' # no scre
 [test_getUserMedia_trackCloneCleanup.html]
 [test_getUserMedia_trackEnded.html]
 [test_getUserMedia_peerIdentity.html]
 [test_peerConnection_addIceCandidate.html]
 [test_peerConnection_addtrack_removetrack_events.html]
 skip-if = android_version == '18' # android(Bug 1189784, timeouts on 4.3 emulator)
 [test_peerConnection_audioCodecs.html]
 skip-if = (android_version == '18') # android(Bug 1189784, timeouts on 4.3 emulator)
+[test_peerConnection_transceivers.html]
+skip-if = (android_version == '18') # android(Bug 1189784, timeouts on 4.3 emulator)
 [test_peerConnection_basicAudio.html]
 skip-if = (android_version == '18') # android(Bug 1189784, timeouts on 4.3 emulator)
 [test_peerConnection_checkPacketDumpHook.html]
 skip-if = (android_version == '18') # android(Bug 1189784, timeouts on 4.3 emulator)
 [test_peerConnection_basicAudioNATSrflx.html]
 skip-if = toolkit == 'android' || (os == 'linux' && (debug || asan)) # websockets don't work on android (bug 1266217), linux hang (bug 1339568)
 [test_peerConnection_basicAudioNATRelay.html]
 skip-if = toolkit == 'android' || (os == 'linux' && (debug || asan)) # websockets don't work on android (bug 1266217), linux hang (bug 1339568)
@@ -123,16 +125,18 @@ skip-if = (android_version == '18') # an
 [test_peerConnection_basicAudioVideoCombined.html]
 skip-if = toolkit == 'android'  # Bug 1189784
 [test_peerConnection_basicAudioVideoNoBundle.html]
 skip-if = (android_version == '18') # android(Bug 1189784, timeouts on 4.3 emulator)
 [test_peerConnection_basicAudioVideoNoBundleNoRtcpMux.html]
 skip-if = android_version == '18' # android(Bug 1189784, timeouts on 4.3 emulator)
 [test_peerConnection_basicAudioVideoNoRtcpMux.html]
 skip-if = android_version == '18' # android(Bug 1189784, timeouts on 4.3 emulator)
+[test_peerConnection_basicAudioVideoTransceivers.html]
+skip-if = android_version == '18' # android(Bug 1189784, timeouts on 4.3 emulator)
 [test_peerConnection_basicVideo.html]
 skip-if = (android_version == '18' && debug) # android(Bug 1189784, timeouts on 4.3 emulator)
 [test_peerConnection_basicVideoVerifyRtpHeaderExtensions.html]
 skip-if = (android_version == '18') # android(Bug 1189784, timeouts on 4.3 emulator)
 [test_peerConnection_basicScreenshare.html]
 # frequent timeouts/crashes on e10s (bug 1048455)
 skip-if = toolkit == 'android' # no screenshare on android
 [test_peerConnection_basicWindowshare.html]
--- a/dom/media/tests/mochitest/pc.js
+++ b/dom/media/tests/mochitest/pc.js
@@ -145,17 +145,17 @@ PeerConnectionTest.prototype.closePC = f
       Promise.all(pc._pc.getReceivers()
         .filter(receiver => receiver.track.readyState == "live")
         .map(receiver => {
           info("Waiting for track " + receiver.track.id + " (" +
                receiver.track.kind + ") to end.");
           return haveEvent(receiver.track, "ended", wait(50000))
             .then(event => {
               is(event.target, receiver.track, "Event target should be the correct track");
-              info("ended fired for track " + receiver.track.id);
+              info(pc + " ended fired for track " + receiver.track.id);
             }, e => e ? Promise.reject(e)
                       : ok(false, "ended never fired for track " +
                                     receiver.track.id));
         }))
     ]);
     pc.close();
     return promise;
   };
@@ -756,18 +756,20 @@ function PeerConnectionWrapper(label, co
   this._local_ice_candidates = [];
   this._remote_ice_candidates = [];
   this.localRequiresTrickleIce = false;
   this.remoteRequiresTrickleIce = false;
   this.localMediaElements = [];
   this.remoteMediaElements = [];
   this.audioElementsOnly = false;
 
+  this._sendStreams = [];
+
   this.expectedLocalTrackInfoById = {};
-  this.expectedRemoteTrackInfoById = {};
+  this.expectedSignalledTrackInfoById = {};
   this.observedRemoteTrackInfoById = {};
 
   this.disableRtpCountChecking = false;
 
   this.iceConnectedResolve;
   this.iceConnectedReject;
   this.iceConnected = new Promise((resolve, reject) => {
     this.iceConnectedResolve = resolve;
@@ -870,36 +872,70 @@ PeerConnectionWrapper.prototype = {
   get iceConnectionState() {
     return this._pc.iceConnectionState;
   },
 
   setIdentityProvider: function(provider, protocol, identity) {
     this._pc.setIdentityProvider(provider, protocol, identity);
   },
 
+  elementPrefix : direction =>
+  {
+    return [this.label, direction].join('_');
+  },
+
+  getMediaElementForTrack : function (track, direction)
+  {
+    var prefix = this.elementPrefix(direction);
+    return getMediaElementForTrack(track, prefix);
+  },
+
+  createMediaElementForTrack : function(track, direction)
+  {
+    var prefix = this.elementPrefix(direction);
+    return createMediaElementForTrack(track, prefix);
+  },
+
   ensureMediaElement : function(track, direction) {
-    const idPrefix = [this.label, direction].join('_');
-    var element = getMediaElementForTrack(track, idPrefix);
-
+    var prefix = this.elementPrefix(direction);
+    var element = this.getMediaElementForTrack(track, direction);
     if (!element) {
-      element = createMediaElementForTrack(track, idPrefix);
+      element = this.createMediaElementForTrack(track, direction);
       if (direction == "local") {
         this.localMediaElements.push(element);
       } else if (direction == "remote") {
         this.remoteMediaElements.push(element);
       }
     }
 
     // We do this regardless, because sometimes we end up with a new stream with
     // an old id (ie; the rollback tests cause the same stream to be added
     // twice)
     element.srcObject = new MediaStream([track]);
     element.play();
   },
 
+  addSendStream : function(stream)
+  {
+    // The PeerConnection will not necessarily know about this stream
+    // automatically, because replaceTrack is not told about any streams the
+    // new track might be associated with. Only content really knows.
+    this._sendStreams.push(stream);
+  },
+
+  getStreamForSendTrack : function(track)
+  {
+    return this._sendStreams.find(str => str.getTrackById(track.id));
+  },
+
+  getStreamForRecvTrack : function(track)
+  {
+    return this._pc.getRemoteStreams().find(s => !!s.getTrackById(track.id));
+  },
+
   /**
    * Attaches a local track to this RTCPeerConnection using
    * RTCPeerConnection.addTrack().
    *
    * Also creates a media element playing a MediaStream containing all
    * tracks that have been added to `stream` using `attachLocalTrack()`.
    *
    * @param {MediaStreamTrack} track
@@ -916,110 +952,147 @@ PeerConnectionWrapper.prototype = {
 
     ok(track.id, "track has id");
     ok(track.kind, "track has kind");
     ok(stream.id, "stream has id");
     this.expectedLocalTrackInfoById[track.id] = {
       type: track.kind,
       streamId: stream.id,
     };
+    this.expectedSignalledTrackInfoById[track.id] =
+      this.expectedLocalTrackInfoById[track.id];
+
+    this.addSendStream(stream);
 
     // This will create one media element per track, which might not be how
     // we set up things with the RTCPeerConnection. It's the only way
     // we can ensure all sent tracks are flowing however.
     this.ensureMediaElement(track, "local");
 
     return this.observedNegotiationNeeded;
   },
 
   /**
    * Callback when we get local media. Also an appropriate HTML media element
    * will be created and added to the content node.
    *
    * @param {MediaStream} stream
    *        Media stream to handle
    */
-  attachLocalStream : function(stream) {
+  attachLocalStream : function(stream, useAddTransceiver) {
     info("Got local media stream: (" + stream.id + ")");
 
     this.expectNegotiationNeeded();
+    if (useAddTransceiver) {
+      info("Using addTransceiver (on PC).");
+      stream.getTracks().forEach(track => {
+        var transceiver = this._pc.addTransceiver(track, {streams: [stream]});
+        is(transceiver.sender.track, track, "addTransceiver returns sender");
+      });
+    }
     // In order to test both the addStream and addTrack APIs, we do half one
     // way, half the other, at random.
-    if (Math.random() < 0.5) {
+    else if (Math.random() < 0.5) {
       info("Using addStream.");
       this._pc.addStream(stream);
       ok(this._pc.getSenders().find(sender => sender.track == stream.getTracks()[0]),
          "addStream returns sender");
     } else {
       info("Using addTrack (on PC).");
       stream.getTracks().forEach(track => {
         var sender = this._pc.addTrack(track, stream);
         is(sender.track, track, "addTrack returns sender");
       });
     }
 
+    this.addSendStream(stream);
+
     stream.getTracks().forEach(track => {
       ok(track.id, "track has id");
       ok(track.kind, "track has kind");
       this.expectedLocalTrackInfoById[track.id] = {
           type: track.kind,
           streamId: stream.id
         };
+      this.expectedSignalledTrackInfoById[track.id] =
+        this.expectedLocalTrackInfoById[track.id];
       this.ensureMediaElement(track, "local");
     });
+
+    return this.observedNegotiationNeeded;
   },
 
   removeSender : function(index) {
     var sender = this._pc.getSenders()[index];
     delete this.expectedLocalTrackInfoById[sender.track.id];
     this.expectNegotiationNeeded();
     this._pc.removeTrack(sender);
     return this.observedNegotiationNeeded;
   },
 
-  senderReplaceTrack : function(index, withTrack, withStreamId) {
-    var sender = this._pc.getSenders()[index];
+  senderReplaceTrack : function(sender, withTrack, stream) {
     delete this.expectedLocalTrackInfoById[sender.track.id];
     this.expectedLocalTrackInfoById[withTrack.id] = {
         type: withTrack.kind,
-        streamId: withStreamId
+        streamId: stream.id
       };
+    this.addSendStream(stream);
+    this.ensureMediaElement(withTrack, 'local');
     return sender.replaceTrack(withTrack);
   },
 
+  getUserMedia : async function(constraints) {
+    var stream = await getUserMedia(constraints);
+    if (constraints.audio) {
+      stream.getAudioTracks().forEach(track => {
+        info(this + " gUM local stream " + stream.id +
+          " with audio track " + track.id);
+      });
+    }
+    if (constraints.video) {
+      stream.getVideoTracks().forEach(track => {
+        info(this + " gUM local stream " + stream.id +
+          " with video track " + track.id);
+      });
+    }
+    return stream;
+  },
+
   /**
    * Requests all the media streams as specified in the constrains property.
    *
    * @param {array} constraintsList
    *        Array of constraints for GUM calls
    */
   getAllUserMedia : function(constraintsList) {
     if (constraintsList.length === 0) {
       info("Skipping GUM: no UserMedia requested");
       return Promise.resolve();
     }
 
     info("Get " + constraintsList.length + " local streams");
-    return Promise.all(constraintsList.map(constraints => {
-      return getUserMedia(constraints).then(stream => {
-        if (constraints.audio) {
-          stream.getAudioTracks().forEach(track => {
-            info(this + " gUM local stream " + stream.id +
-              " with audio track " + track.id);
-          });
-        }
-        if (constraints.video) {
-          stream.getVideoTracks().forEach(track => {
-            info(this + " gUM local stream " + stream.id +
-              " with video track " + track.id);
-          });
-        }
-        return this.attachLocalStream(stream);
-      });
-    }));
+    return Promise.all(
+      constraintsList.map(constraints => this.getUserMedia(constraints))
+    );
+  },
+
+  getAllUserMediaAndAddStreams : async function(constraintsList) {
+    var streams = await this.getAllUserMedia(constraintsList);
+    if (!streams) {
+      return;
+    }
+    return Promise.all(streams.map(stream => this.attachLocalStream(stream)));
+  },
+
+  getAllUserMediaAndAddTransceivers : async function(constraintsList) {
+    var streams = await this.getAllUserMedia(constraintsList);
+    if (!streams) {
+      return;
+    }
+    return Promise.all(streams.map(stream => this.attachLocalStream(stream, true)));
   },
 
   /**
    * Create a new data channel instance.  Also creates a promise called
    * `this.nextDataChannel` that resolves when the next data channel arrives.
    */
   expectDataChannel: function(message) {
     this.nextDataChannel = new Promise(resolve => {
@@ -1159,44 +1232,65 @@ PeerConnectionWrapper.prototype = {
     });
   },
 
   /**
    * Checks whether a given track is expected, has not been observed yet, and
    * is of the correct type. Then, moves the track from
    * |expectedTrackInfoById| to |observedTrackInfoById|.
    */
-  checkTrackIsExpected : function(track,
+  checkTrackIsExpected : function(trackId,
+                                  kind,
                                   expectedTrackInfoById,
                                   observedTrackInfoById) {
-    ok(expectedTrackInfoById[track.id], "track id " + track.id + " was expected");
-    ok(!observedTrackInfoById[track.id], "track id " + track.id + " was not yet observed");
-    var observedKind = track.kind;
-    var expectedKind = expectedTrackInfoById[track.id].type;
+    ok(expectedTrackInfoById[trackId], "track id " + trackId + " was expected");
+    ok(!observedTrackInfoById[trackId], "track id " + trackId + " was not yet observed");
+    var observedKind = kind;
+    var expectedKind = expectedTrackInfoById[trackId].type;
     is(observedKind, expectedKind,
-        "track id " + track.id + " was of kind " +
+        "track id " + trackId + " was of kind " +
         observedKind + ", which matches " + expectedKind);
-    observedTrackInfoById[track.id] = expectedTrackInfoById[track.id];
+    observedTrackInfoById[trackId] = expectedTrackInfoById[trackId];
   },
 
   isTrackOnPC: function(track) {
-    return this._pc.getRemoteStreams().some(s => !!s.getTrackById(track.id));
+    return !!this.getStreamForRecvTrack(track);
   },
 
   allExpectedTracksAreObserved: function(expected, observed) {
     return Object.keys(expected).every(trackId => observed[trackId]);
   },
 
+  getWebrtcTrackId: function(receiveTrack) {
+    let matchingTransceiver = this._pc.getTransceivers().find(
+        transceiver => transceiver.receiver.track == receiveTrack);
+    if (!matchingTransceiver) {
+      return null;
+    }
+
+    return matchingTransceiver.getRemoteTrackId();
+  },
+
   setupTrackEventHandler: function() {
     this._pc.addEventListener('track', event => {
-      info(this + ": 'ontrack' event fired for " + JSON.stringify(event.track));
+      info(this + ": 'ontrack' event fired for " + event.track.id +
+                  "(SDP msid is " + this.getWebrtcTrackId(event.track) +
+                  ")");
 
-      this.checkTrackIsExpected(event.track,
-                                this.expectedRemoteTrackInfoById,
-                                this.observedRemoteTrackInfoById);
+      // TODO(bug 1403238): Checking for remote tracks needs to be completely
+      // reworked, because with the latest spec the identifiers aren't the same
+      // as they are on the other end. Ultimately, what we need to check is
+      // whether the _transceivers_ are in line with what is expected, and
+      // whether the callbacks are consistent with the transceivers.
+      let trackId = this.getWebrtcTrackId(event.track);
+      ok(!this.observedRemoteTrackInfoById[trackId],
+         "track id " + trackId + " was not yet observed");
+      this.observedRemoteTrackInfoById[trackId] = {
+        type: event.track.kind
+      };
       ok(this.isTrackOnPC(event.track), "Found track " + event.track.id);
 
       this.ensureMediaElement(event.track, 'remote');
     });
   },
 
   /**
    * Either adds a given ICE candidate right away or stores it to be added
@@ -1319,53 +1413,47 @@ PeerConnectionWrapper.prototype = {
       candidateHandler(this.label, anEvent.candidate);
     };
   },
 
   checkLocalMediaTracks : function() {
     var observed = {};
     info(this + " Checking local tracks " + JSON.stringify(this.expectedLocalTrackInfoById));
     this._pc.getSenders().forEach(sender => {
-      this.checkTrackIsExpected(sender.track, this.expectedLocalTrackInfoById, observed);
+      if (sender.track) {
+        this.checkTrackIsExpected(sender.track.id,
+                                  sender.track.kind,
+                                  this.expectedLocalTrackInfoById,
+                                  observed);
+      }
     });
 
     Object.keys(this.expectedLocalTrackInfoById).forEach(
         id => ok(observed[id], this + " local id " + id + " was observed"));
   },
 
   /**
    * Checks that we are getting the media tracks we expect.
    */
   checkMediaTracks : function() {
     this.checkLocalMediaTracks();
-
-    info(this + " Checking remote tracks " +
-         JSON.stringify(this.expectedRemoteTrackInfoById));
-
-    ok(this.allExpectedTracksAreObserved(this.expectedRemoteTrackInfoById,
-                                         this.observedRemoteTrackInfoById),
-       "All expected tracks have been observed"
-       + "\nexpected: " + JSON.stringify(this.expectedRemoteTrackInfoById)
-       + "\nobserved: " + JSON.stringify(this.observedRemoteTrackInfoById));
   },
 
   checkMsids: function() {
     var checkSdpForMsids = (desc, expectedTrackInfo, side) => {
       Object.keys(expectedTrackInfo).forEach(trackId => {
         var streamId = expectedTrackInfo[trackId].streamId;
         ok(desc.sdp.match(new RegExp("a=msid:" + streamId + " " + trackId)),
            this + ": " + side + " SDP contains stream " + streamId +
            " and track " + trackId );
       });
     };
 
-    checkSdpForMsids(this.localDescription, this.expectedLocalTrackInfoById,
+    checkSdpForMsids(this.localDescription, this.expectedSignalledTrackInfoById,
                      "local");
-    checkSdpForMsids(this.remoteDescription, this.expectedRemoteTrackInfoById,
-                     "remote");
   },
 
   markRemoteTracksAsNegotiated: function() {
     Object.values(this.observedRemoteTrackInfoById).forEach(
         trackInfo => trackInfo.negotiated = true);
   },
 
   rollbackRemoteTracksIfNotNegotiated: function() {
@@ -1456,32 +1544,66 @@ PeerConnectionWrapper.prototype = {
         return stats;
       }
       await wait(retryInterval);
     }
     throw new Error("Timeout checking for stats for track " + track.id
                     + " after at least" + timeout + "ms");
   },
 
+  getExpectedActiveReceiveTracks : function() {
+    return this._pc.getTransceivers()
+      .filter(t => {
+        return !t.stopped &&
+               t.currentDirection &&
+               (t.currentDirection != "inactive") &&
+               (t.currentDirection != "sendonly");
+      })
+      .map(t => {
+        info("Found transceiver that should be receiving RTP: mid=" + t.mid +
+             " currentDirection=" + t.currentDirection + " kind=" +
+             t.receiver.track.kind + " track-id=" + t.receiver.track.id);
+        return t.receiver.track;
+      });
+  },
+
+  getExpectedSendTracks : function() {
+    return Object.keys(this.expectedLocalTrackInfoById)
+              .map(id => this.findSendTrackByWebrtcId(id));
+  },
+
+  findReceiveTrackByWebrtcId : function(webrtcId) {
+    return this._pc.getReceivers().map(receiver => receiver.track)
+              .find(track => this.getWebrtcTrackId(track) == webrtcId);
+  },
+
+  // Send tracks use the same identifiers that go in the signaling
+  findSendTrackByWebrtcId : function(webrtcId) {
+    return this._pc.getSenders().map(sender => sender.track)
+              .filter(track => track) // strip out null
+              .find(track => track.id == webrtcId);
+  },
+
   /**
    * Wait for presence of video flow on all media elements and rtp flow on
    * all sending and receiving track involved in this test.
    *
    * @returns {Promise}
    *        A promise that resolves when media flows for all elements and tracks
    */
   waitForMediaFlow : function() {
     return Promise.all([].concat(
       this.localMediaElements.map(element => this.waitForMediaElementFlow(element)),
-      Object.keys(this.expectedRemoteTrackInfoById)
-          .map(id => this.remoteMediaElements
-              .find(e => e.srcObject.getTracks().some(t => t.id == id)))
-          .map(e => this.waitForMediaElementFlow(e)),
-      this._pc.getSenders().map(sender => this.waitForRtpFlow(sender.track)),
-      this._pc.getReceivers().map(receiver => this.waitForRtpFlow(receiver.track))));
+      this.remoteMediaElements.filter(elem =>
+          this.getExpectedActiveReceiveTracks()
+            .some(track => elem.srcObject.getTracks().some(t => t == track))
+        )
+        .map(elem => this.waitForMediaElementFlow(elem)),
+      this.getExpectedActiveReceiveTracks().map(track => this.waitForRtpFlow(track)),
+      this.getExpectedSendTracks().map(track => this.waitForRtpFlow(track))));
   },
 
   async waitForSyncedRtcp() {
     // Ensures that RTCP is present
     let ensureSyncedRtcp = async () => {
       let report = await this._pc.getStats();
       for (let [k, v] of report) {
         if (v.type.endsWith("bound-rtp") && !v.remoteId) {
@@ -1517,69 +1639,100 @@ PeerConnectionWrapper.prototype = {
       await wait(waitPeriod);
     }
     throw Error("Waiting for synced RTCP timed out after at least "
                 + maxTime + "ms");
   },
 
   /**
    * Check that correct audio (typically a flat tone) is flowing to this
-   * PeerConnection. Uses WebAudio AnalyserNodes to compare input and output
-   * audio data in the frequency domain.
+   * PeerConnection for each transceiver that should be receiving. Uses
+   * WebAudio AnalyserNodes to compare input and output audio data in the
+   * frequency domain.
    *
    * @param {object} from
    *        A PeerConnectionWrapper whose audio RTPSender we use as source for
    *        the audio flow check.
    * @returns {Promise}
-   *        A promise that resolves when we're receiving the tone from |from|.
+   *        A promise that resolves when we're receiving the tone/s from |from|.
    */
   checkReceivingToneFrom : async function(audiocontext, from,
       cancel = wait(60000, new Error("Tone not detected"))) {
-    let inputElem = from.localMediaElements[0];
+    let localTransceivers = this._pc.getTransceivers()
+      .filter(t => t.mid)
+      .filter(t => t.receiver.track.kind == "audio")
+      .sort((t1, t2) => t1.mid < t2.mid);
+    let remoteTransceivers = from._pc.getTransceivers()
+      .filter(t => t.mid)
+      .filter(t => t.receiver.track.kind == "audio")
+      .sort((t1, t2) => t1.mid < t2.mid);
 
-    // As input we use the stream of |from|'s first available audio sender.
-    let inputSenderTracks = from._pc.getSenders().map(sn => sn.track);
-    let inputAudioStream = from._pc.getLocalStreams()
-      .find(s => inputSenderTracks.some(t => t.kind == "audio" && s.getTrackById(t.id)));
-    let inputAnalyser = new AudioStreamAnalyser(audiocontext, inputAudioStream);
+    is(localTransceivers.length, remoteTransceivers.length,
+       "Same number of associated audio transceivers on remote and local.");
 
-    // It would have been nice to have a working getReceivers() here, but until
-    // we do, let's use what remote streams we have.
-    let outputAudioStream = this._pc.getRemoteStreams()
-      .find(s => s.getAudioTracks().length > 0);
-    let outputAnalyser = new AudioStreamAnalyser(audiocontext, outputAudioStream);
+    for (let i = 0; i < localTransceivers.length; i++) {
+      is(localTransceivers[i].mid, remoteTransceivers[i].mid,
+         "Transceivers at index " + i + " have the same mid.");
 
-    let error = null;
-    cancel.then(e => error = e);
+      if (!remoteTransceivers[i].sender.track) {
+        continue;
+      }
 
-    let indexOfMax = data => 
-      data.reduce((max, val, i) => (val >= data[max]) ? i : max, 0);
-
-    await outputAnalyser.waitForAnalysisSuccess(() => {
-      if (error) {
-        throw error;
+      if (remoteTransceivers[i].currentDirection == "recvonly" ||
+          remoteTransceivers[i].currentDirection == "inactive") {
+        continue;
       }
 
-      let inputData = inputAnalyser.getByteFrequencyData();
-      let outputData = outputAnalyser.getByteFrequencyData();
+      let sendTrack = remoteTransceivers[i].sender.track;
+      let inputElem = from.getMediaElementForTrack(sendTrack, "local");
+      ok(inputElem,
+         "Remote wrapper should have a media element for track id " +
+         sendTrack.id);
+      let inputAudioStream = from.getStreamForSendTrack(sendTrack);
+      ok(inputAudioStream,
+         "Remote wrapper should have a stream for track id " + sendTrack.id);
+      let inputAnalyser =
+        new AudioStreamAnalyser(audiocontext, inputAudioStream);
+
+      let recvTrack = localTransceivers[i].receiver.track;
+      let outputAudioStream = this.getStreamForRecvTrack(recvTrack);
+      ok(outputAudioStream,
+         "Local wrapper should have a stream for track id " + recvTrack.id);
+      let outputAnalyser =
+        new AudioStreamAnalyser(audiocontext, outputAudioStream);
+
+      let error = null;
+      cancel.then(e => error = e);
+
+      let indexOfMax = data =>
+        data.reduce((max, val, i) => (val >= data[max]) ? i : max, 0);
 
-      let inputMax = indexOfMax(inputData);
-      let outputMax = indexOfMax(outputData);
-      info(`Comparing maxima; input[${inputMax}] = ${inputData[inputMax]},`
-        + ` output[${outputMax}] = ${outputData[outputMax]}`);
-      if (!inputData[inputMax] || !outputData[outputMax]) {
-        return false;
-      }
+      await outputAnalyser.waitForAnalysisSuccess(() => {
+        if (error) {
+          throw error;
+        }
+
+        let inputData = inputAnalyser.getByteFrequencyData();
+        let outputData = outputAnalyser.getByteFrequencyData();
 
-      // When the input and output maxima are within reasonable distance (2% of
-      // total length, which means ~10 for length 512) from each other, we can
-      // be sure that the input tone has made it through the peer connection.
-      info(`input data length: ${inputData.length}`);
-      return Math.abs(inputMax - outputMax) < (inputData.length * 0.02);
-    });
+        let inputMax = indexOfMax(inputData);
+        let outputMax = indexOfMax(outputData);
+        info(`Comparing maxima; input[${inputMax}] = ${inputData[inputMax]},`
+          + ` output[${outputMax}] = ${outputData[outputMax]}`);
+        if (!inputData[inputMax] || !outputData[outputMax]) {
+          return false;
+        }
+
+        // When the input and output maxima are within reasonable distance (2% of
+        // total length, which means ~10 for length 512) from each other, we can
+        // be sure that the input tone has made it through the peer connection.
+        info(`input data length: ${inputData.length}`);
+        return Math.abs(inputMax - outputMax) < (inputData.length * 0.02);
+      });
+    }
   },
 
   /**
    * Get stats from the "legacy" getStats callback interface
    */
   getStatsLegacy : function(selector, onSuccess, onFail) {
     let wrapper = stats => {
       info(this + ": Got legacy stats: " + JSON.stringify(stats));
@@ -1617,16 +1770,17 @@ PeerConnectionWrapper.prototype = {
    *        The stats to check from this PeerConnectionWrapper
    */
   checkStats : function(stats, twoMachines) {
     const isWinXP = navigator.userAgent.indexOf("Windows NT 5.1") != -1;
 
     // Use spec way of enumerating stats
     var counters = {};
     for (let [key, res] of stats) {
+      info("Checking stats for " + key + " : " + res);
       // validate stats
       ok(res.id == key, "Coherent stats id");
       var nowish = Date.now() + 1000;        // TODO: clock drift observed
       var minimum = this.whenCreated - 1000; // on Windows XP (Bug 979649)
       if (isWinXP) {
         todo(false, "Can't reliably test rtcp timestamps on WinXP (Bug 979649)");
 
       } else if (false) { // Bug 1325430 - timestamps aren't working properly in update 49
@@ -1650,21 +1804,27 @@ PeerConnectionWrapper.prototype = {
       if (res.isRemote) {
         continue;
       }
       counters[res.type] = (counters[res.type] || 0) + 1;
 
       switch (res.type) {
         case "inbound-rtp":
         case "outbound-rtp": {
-          // ssrc is a 32 bit number returned as a string by spec
-          ok(res.ssrc.length > 0, "Ssrc has length");
-          ok(res.ssrc.length < 11, "Ssrc not lengthy");
-          ok(!/[^0-9]/.test(res.ssrc), "Ssrc numeric");
-          ok(parseInt(res.ssrc) < Math.pow(2,32), "Ssrc within limits");
+          // Inbound tracks won't have an ssrc if RTP is not flowing.
+          // (eg; negotiated inactive)
+          ok(res.ssrc || res.type == "inbound-rtp", "Outbound RTP stats has an ssrc.");
+
+          if (res.ssrc) {
+            // ssrc is a 32 bit number returned as a string by spec
+            ok(res.ssrc.length > 0, "Ssrc has length");
+            ok(res.ssrc.length < 11, "Ssrc not lengthy");
+            ok(!/[^0-9]/.test(res.ssrc), "Ssrc numeric");
+            ok(parseInt(res.ssrc) < Math.pow(2,32), "Ssrc within limits");
+          }
 
           if (res.type == "outbound-rtp") {
             ok(res.packetsSent !== undefined, "Rtp packetsSent");
             // We assume minimum payload to be 1 byte (guess from RFC 3550)
             ok(res.bytesSent >= res.packetsSent, "Rtp bytesSent");
           } else {
             ok(res.packetsReceived !== undefined, "Rtp packetsReceived");
             ok(res.bytesReceived >= res.packetsReceived, "Rtp bytesReceived");
@@ -1729,17 +1889,22 @@ PeerConnectionWrapper.prototype = {
       var res = stats[key];
       var type = legacyToSpecMapping[res.type] || res.type;
       if (!res.isRemote) {
         counters2[type] = (counters2[type] || 0) + 1;
       }
     }
     is(JSON.stringify(counters), JSON.stringify(counters2),
        "Spec and legacy variant of RTCStatsReport enumeration agree");
-    var nin = Object.keys(this.expectedRemoteTrackInfoById).length;
+    var nin = this._pc.getTransceivers()
+      .filter(t => {
+        return !t.stopped &&
+               (t.currentDirection != "inactive") &&
+               (t.currentDirection != "sendonly");
+      }).length;
     var nout = Object.keys(this.expectedLocalTrackInfoById).length;
     var ndata = this.dataChannels.length;
 
     // TODO(Bug 957145): Restore stronger inbound-rtp test once Bug 948249 is fixed
     //is((counters["inbound-rtp"] || 0), nin, "Have " + nin + " inbound-rtp stat(s)");
     ok((counters["inbound-rtp"] || 0) >= nin, "Have at least " + nin + " inbound-rtp stat(s) *");
 
     is(counters["outbound-rtp"] || 0, nout, "Have " + nout + " outbound-rtp stat(s)");
@@ -1805,49 +1970,46 @@ PeerConnectionWrapper.prototype = {
 
   /**
    * Compares amount of established ICE connection according to ICE candidate
    * pairs in the stats reporting with the expected amount of connection based
    * on the constraints.
    *
    * @param {object} stats
    *        The stats to check for ICE candidate pairs
-   * @param {object} counters
-   *        The counters for media and data tracks based on constraints
    * @param {object} testOptions
    *        The test options object from the PeerConnectionTest
    */
-  checkStatsIceConnections : function(stats,
-      offerConstraintsList, offerOptions, testOptions) {
+  checkStatsIceConnections : function(stats, testOptions) {
     var numIceConnections = 0;
     stats.forEach(stat => {
       if ((stat.type === "candidate-pair") && stat.selected) {
         numIceConnections += 1;
       }
     });
     info("ICE connections according to stats: " + numIceConnections);
     isnot(numIceConnections, 0, "Number of ICE connections according to stats is not zero");
     if (testOptions.bundle) {
       if (testOptions.rtcpmux) {
         is(numIceConnections, 1, "stats reports exactly 1 ICE connection");
       } else {
         is(numIceConnections, 2, "stats report exactly 2 ICE connections for media and RTCP");
       }
     } else {
-      // This code assumes that no media sections have been rejected due to
-      // codec mismatch or other unrecoverable negotiation failures.
-      var numAudioTracks =
-          sdputils.countTracksInConstraint('audio', offerConstraintsList) ||
-          ((offerOptions && offerOptions.offerToReceiveAudio) ? 1 : 0);
+      var numAudioTransceivers =
+        this._pc.getTransceivers().filter((transceiver) => {
+          return (!transceiver.stopped) && transceiver.receiver.track.kind == "audio";
+        }).length;
 
-      var numVideoTracks =
-          sdputils.countTracksInConstraint('video', offerConstraintsList) ||
-          ((offerOptions && offerOptions.offerToReceiveVideo) ? 1 : 0);
+      var numVideoTransceivers =
+        this._pc.getTransceivers().filter((transceiver) => {
+          return (!transceiver.stopped) && transceiver.receiver.track.kind == "video";
+        }).length;
 
-      var numExpectedTransports = numAudioTracks + numVideoTracks;
+      var numExpectedTransports = numAudioTransceivers + numVideoTransceivers;
       if (!testOptions.rtcpmux) {
         numExpectedTransports *= 2;
       }
 
       if (this.dataChannels.length) {
         ++numExpectedTransports;
       }
 
--- a/dom/media/tests/mochitest/templates.js
+++ b/dom/media/tests/mochitest/templates.js
@@ -78,18 +78,17 @@ function waitForAnIceCandidate(pc) {
   }).then(() => {
     ok(pc._local_ice_candidates.length > 0,
        pc + " received local trickle ICE candidates");
     isnot(pc._pc.iceGatheringState, GATH_NEW,
           pc + " ICE gathering state is not 'new'");
   });
 }
 
-function checkTrackStats(pc, rtpSenderOrReceiver, outbound) {
-  var track = rtpSenderOrReceiver.track;
+function checkTrackStats(pc, track, outbound) {
   var audio = (track.kind == "audio");
   var msg = pc + " stats " + (outbound ? "outbound " : "inbound ") +
       (audio ? "audio" : "video") + " rtp track id " + track.id;
   return pc.getStats(track).then(stats => {
     ok(pc.hasStat(stats, {
       type: outbound ? "outbound-rtp" : "inbound-rtp",
       isRemote: false,
       mediaType: audio ? "audio" : "video"
@@ -101,18 +100,18 @@ function checkTrackStats(pc, rtpSenderOr
     ok(!pc.hasStat(stats, {
       mediaType: audio ? "video" : "audio"
     }), msg + " - did not find extra stats with wrong media type");
   });
 }
 
 var checkAllTrackStats = pc => {
   return Promise.all([].concat(
-    pc._pc.getSenders().map(sender => checkTrackStats(pc, sender, true)),
-    pc._pc.getReceivers().map(receiver => checkTrackStats(pc, receiver, false))));
+    pc.getExpectedActiveReceiveTracks().map(track => checkTrackStats(pc, track, false)),
+    pc.getExpectedSendTracks().map(track => checkTrackStats(pc, track, true))));
 }
 
 // Commands run once at the beginning of each test, even when performing a
 // renegotiation test.
 var commandsPeerConnectionInitial = [
   function PC_SETUP_SIGNALING_CLIENT(test) {
     if (test.testOptions.steeplechase) {
       test.setupSignalingClient();
@@ -178,21 +177,21 @@ var commandsPeerConnectionInitial = [
   function PC_REMOTE_CHECK_INITIAL_CAN_TRICKLE_SYNC(test) {
     is(test.pcRemote._pc.canTrickleIceCandidates, null,
        "Remote trickle status should start out unknown");
   },
 ];
 
 var commandsGetUserMedia = [
   function PC_LOCAL_GUM(test) {
-    return test.pcLocal.getAllUserMedia(test.pcLocal.constraints);
+    return test.pcLocal.getAllUserMediaAndAddStreams(test.pcLocal.constraints);
   },
 
   function PC_REMOTE_GUM(test) {
-    return test.pcRemote.getAllUserMedia(test.pcRemote.constraints);
+    return test.pcRemote.getAllUserMediaAndAddStreams(test.pcRemote.constraints);
   },
 ];
 
 var commandsPeerConnectionOfferAnswer = [
   function PC_LOCAL_SETUP_ICE_HANDLER(test) {
     test.pcLocal.setupIceCandidateHandler(test);
   },
 
@@ -209,42 +208,16 @@ var commandsPeerConnectionOfferAnswer = 
 
   function PC_REMOTE_STEEPLECHASE_SIGNAL_EXPECTED_LOCAL_TRACKS(test) {
     if (test.testOptions.steeplechase) {
       send_message({"type": "remote_expected_tracks",
                     "expected_tracks": test.pcRemote.expectedLocalTrackInfoById});
     }
   },
 
-  function PC_LOCAL_GET_EXPECTED_REMOTE_TRACKS(test) {
-    if (test.testOptions.steeplechase) {
-      return test.getSignalingMessage("remote_expected_tracks").then(
-          message => {
-            test.pcLocal.expectedRemoteTrackInfoById = message.expected_tracks;
-          });
-    }
-
-    // Deep copy, as similar to steeplechase as possible
-    test.pcLocal.expectedRemoteTrackInfoById =
-      JSON.parse(JSON.stringify(test.pcRemote.expectedLocalTrackInfoById));
-  },
-
-  function PC_REMOTE_GET_EXPECTED_REMOTE_TRACKS(test) {
-    if (test.testOptions.steeplechase) {
-      return test.getSignalingMessage("local_expected_tracks").then(
-          message => {
-            test.pcRemote.expectedRemoteTrackInfoById = message.expected_tracks;
-          });
-    }
-
-    // Deep copy, as similar to steeplechase as possible
-    test.pcRemote.expectedRemoteTrackInfoById =
-      JSON.parse(JSON.stringify(test.pcLocal.expectedLocalTrackInfoById));
-  },
-
   function PC_LOCAL_CREATE_OFFER(test) {
     return test.createOffer(test.pcLocal).then(offer => {
       is(test.pcLocal.signalingState, STABLE,
          "Local create offer does not change signaling state");
     });
   },
 
   function PC_LOCAL_STEEPLECHASE_SIGNAL_OFFER(test) {
@@ -430,29 +403,23 @@ var commandsPeerConnectionOfferAnswer = 
     return test.pcRemote.getStats().then(stats => {
       test.pcRemote.checkStatsIceConnectionType(stats,
           test.testOptions.expectedRemoteCandidateType);
     });
   },
 
   function PC_LOCAL_CHECK_ICE_CONNECTIONS(test) {
     return test.pcLocal.getStats().then(stats => {
-      test.pcLocal.checkStatsIceConnections(stats,
-                                            test._offer_constraints,
-                                            test._offer_options,
-                                            test.testOptions);
+      test.pcLocal.checkStatsIceConnections(stats, test.testOptions);
     });
   },
 
   function PC_REMOTE_CHECK_ICE_CONNECTIONS(test) {
     return test.pcRemote.getStats().then(stats => {
-      test.pcRemote.checkStatsIceConnections(stats,
-                                             test._offer_constraints,
-                                             test._offer_options,
-                                             test.testOptions);
+      test.pcRemote.checkStatsIceConnections(stats, test.testOptions);
     });
   },
 
   function PC_LOCAL_CHECK_MSID(test) {
     return test.pcLocal.checkMsids();
   },
   function PC_REMOTE_CHECK_MSID(test) {
     return test.pcRemote.checkMsids();
--- a/dom/media/tests/mochitest/test_peerConnection_addSecondAudioStream.html
+++ b/dom/media/tests/mochitest/test_peerConnection_addSecondAudioStream.html
@@ -13,17 +13,17 @@
 
   runNetworkTest(function (options) {
     const test = new PeerConnectionTest(options);
     addRenegotiation(test.chain,
       [
         function PC_LOCAL_ADD_SECOND_STREAM(test) {
           test.setMediaConstraints([{audio: true}, {audio: true}],
                                    [{audio: true}]);
-          return test.pcLocal.getAllUserMedia([{audio: true}]);
+          return test.pcLocal.getAllUserMediaAndAddStreams([{audio: true}]);
         },
       ],
       [
         function PC_REMOTE_CHECK_ADDED_TRACK(test) {
           // We test both tracks to avoid an ordering problem
           is(test.pcRemote._pc.getReceivers().length, 2,
              "pcRemote should have two receivers");
           return Promise.all(test.pcRemote._pc.getReceivers().map(r => {
--- a/dom/media/tests/mochitest/test_peerConnection_addSecondAudioStreamNoBundle.html
+++ b/dom/media/tests/mochitest/test_peerConnection_addSecondAudioStreamNoBundle.html
@@ -17,17 +17,17 @@
     addRenegotiation(test.chain,
       [
         function PC_LOCAL_ADD_SECOND_STREAM(test) {
           test.setMediaConstraints([{audio: true}, {audio: true}],
                                    [{audio: true}]);
           // Since this is a NoBundle variant, adding a track will cause us to
           // go back to checking.
           test.pcLocal.expectIceChecking();
-          return test.pcLocal.getAllUserMedia([{audio: true}]);
+          return test.pcLocal.getAllUserMediaAndAddStreams([{audio: true}]);
         },
         function PC_REMOTE_EXPECT_ICE_CHECKING(test) {
           test.pcRemote.expectIceChecking();
         },
       ],
       [
         function PC_REMOTE_CHECK_ADDED_TRACK(test) {
           // We test both tracks to avoid an ordering problem
--- a/dom/media/tests/mochitest/test_peerConnection_addSecondVideoStream.html
+++ b/dom/media/tests/mochitest/test_peerConnection_addSecondVideoStream.html
@@ -16,17 +16,17 @@
     const test = new PeerConnectionTest(options);
     addRenegotiation(test.chain,
       [
         function PC_LOCAL_ADD_SECOND_STREAM(test) {
           test.setMediaConstraints([{video: true}, {video: true}],
                                    [{video: true}]);
           // Use fake:true here since the native fake device on linux doesn't
           // change color as needed by checkVideoPlaying() below.
-          return test.pcLocal.getAllUserMedia([{video: true, fake: true}]);
+          return test.pcLocal.getAllUserMediaAndAddStreams([{video: true, fake: true}]);
         },
       ],
       [
         function PC_REMOTE_CHECK_VIDEO_FLOW(test) {
           const h = new VideoStreamHelper();
           is(test.pcRemote.remoteMediaElements.length, 2,
              "Should have two remote media elements after renegotiation");
           return Promise.all(test.pcRemote.remoteMediaElements.map(video =>
--- a/dom/media/tests/mochitest/test_peerConnection_addSecondVideoStreamNoBundle.html
+++ b/dom/media/tests/mochitest/test_peerConnection_addSecondVideoStreamNoBundle.html
@@ -20,17 +20,17 @@
         function PC_LOCAL_ADD_SECOND_STREAM(test) {
           test.setMediaConstraints([{video: true}, {video: true}],
                                    [{video: true}]);
           // Since this is a NoBundle variant, adding a track will cause us to
           // go back to checking.
           test.pcLocal.expectIceChecking();
           // Use fake:true here since the native fake device on linux doesn't
           // change color as needed by checkVideoPlaying() below.
-          return test.pcLocal.getAllUserMedia([{video: true, fake: true}]);
+          return test.pcLocal.getAllUserMediaAndAddStreams([{video: true, fake: true}]);
         },
         function PC_REMOTE_EXPECT_ICE_CHECKING(test) {
           test.pcRemote.expectIceChecking();
         },
       ],
       [
         function PC_REMOTE_CHECK_VIDEO_FLOW(test) {
           const h = new VideoStreamHelper();
--- a/dom/media/tests/mochitest/test_peerConnection_addtrack_removetrack_events.html
+++ b/dom/media/tests/mochitest/test_peerConnection_addtrack_removetrack_events.html
@@ -15,53 +15,54 @@ createHTML({
 
 runNetworkTest(function (options) {
   let test = new PeerConnectionTest(options);
   let eventsPromise;
   addRenegotiation(test.chain,
     [
       function PC_LOCAL_SWAP_VIDEO_TRACKS(test) {
         return getUserMedia({video: true}).then(stream => {
+          var videoTransceiver = test.pcLocal._pc.getTransceivers()[1];
+          is(videoTransceiver.currentDirection, "sendonly",
+             "Video transceiver's current direction is sendonly");
+          is(videoTransceiver.direction, "sendrecv",
+             "Video transceiver's desired direction is sendrecv");
+
           const localStream = test.pcLocal._pc.getLocalStreams()[0];
           ok(localStream, "Should have local stream");
 
           const remoteStream = test.pcRemote._pc.getRemoteStreams()[0];
           ok(remoteStream, "Should have remote stream");
 
           const newTrack = stream.getTracks()[0];
 
           const videoSenderIndex =
             test.pcLocal._pc.getSenders().findIndex(s => s.track.kind == "video");
           isnot(videoSenderIndex, -1, "Should have video sender");
 
           test.pcLocal.removeSender(videoSenderIndex);
+          is(videoTransceiver.direction, "recvonly",
+             "Video transceiver should be recvonly after removeTrack");
           test.pcLocal.attachLocalTrack(stream.getTracks()[0], localStream);
+          is(videoTransceiver.direction, "recvonly",
+             "Video transceiver should be recvonly after addTrack");
 
-          const addTrackPromise = haveEvent(remoteStream, "addtrack",
-              wait(50000, new Error("No addtrack event")))
+          eventsPromise = haveEvent(remoteStream, "addtrack",
+              wait(50000, new Error("No addtrack event for " + newTrack.id)))
             .then(trackEvent => {
               ok(trackEvent instanceof MediaStreamTrackEvent,
                  "Expected event to be instance of MediaStreamTrackEvent");
               is(trackEvent.type, "addtrack",
                  "Expected addtrack event type");
-              is(trackEvent.track.id, newTrack.id, "Expected track in event");
+              is(test.pcRemote.getWebrtcTrackId(trackEvent.track), newTrack.id, "Expected track in event");
               is(trackEvent.track.readyState, "live",
                  "added track should be live");
             })
             .then(() => haveNoEvent(remoteStream, "addtrack"));
 
-          const remoteTrack = test.pcRemote._pc.getReceivers()
-              .map(r => r.track)
-              .find(t => t.kind == "video");
-          ok(remoteTrack, "Should have received remote track");
-          const endedPromise = haveEvent(remoteTrack, "ended",
-              wait(50000, new Error("No ended event")));
-
-          eventsPromise = Promise.all([addTrackPromise, endedPromise]);
-
           remoteStream.addEventListener("removetrack",
                                         function onRemovetrack(trackEvent) {
             ok(false, "UA shouldn't raise 'removetrack' when receiving peer connection");
           })
         });
       },
     ],
     [
--- a/dom/media/tests/mochitest/test_peerConnection_answererAddSecondAudioStream.html
+++ b/dom/media/tests/mochitest/test_peerConnection_answererAddSecondAudioStream.html
@@ -14,17 +14,17 @@
   var test;
   runNetworkTest(function (options) {
     test = new PeerConnectionTest(options);
     addRenegotiationAnswerer(test.chain,
       [
         function PC_LOCAL_ADD_SECOND_STREAM(test) {
           test.setMediaConstraints([{audio: true}, {audio: true}],
                                    [{audio: true}]);
-          return test.pcLocal.getAllUserMedia([{audio: true}]);
+          return test.pcLocal.getAllUserMediaAndAddStreams([{audio: true}]);
         },
       ]
     );
 
     test.setMediaConstraints([{audio: true}], [{audio: true}]);
     test.run();
   });
 </script>
copy from dom/media/tests/mochitest/test_peerConnection_basicAudioVideo.html
copy to dom/media/tests/mochitest/test_peerConnection_basicAudioVideoTransceivers.html
--- a/dom/media/tests/mochitest/test_peerConnection_basicAudioVideo.html
+++ b/dom/media/tests/mochitest/test_peerConnection_basicAudioVideoTransceivers.html
@@ -2,23 +2,30 @@
 <html>
 <head>
   <script type="application/javascript" src="pc.js"></script>
 </head>
 <body>
 <pre id="test">
 <script type="application/javascript">
   createHTML({
-    bug: "796890",
-    title: "Basic audio/video (separate) peer connection"
+    bug: "1290948",
+    title: "Basic audio/video with addTransceiver"
   });
 
   var test;
   runNetworkTest(function (options) {
     test = new PeerConnectionTest(options);
     test.setMediaConstraints([{audio: true}, {video: true}],
                              [{audio: true}, {video: true}]);
+    test.chain.replace("PC_LOCAL_GUM",
+      [
+        function PC_LOCAL_GUM_TRANSCEIVERS(test) {
+          return test.pcLocal.getAllUserMediaAndAddTransceivers(test.pcLocal.constraints);
+        }
+      ]);
+
     test.run();
   });
 </script>
 </pre>
 </body>
 </html>
--- a/dom/media/tests/mochitest/test_peerConnection_bug1064223.html
+++ b/dom/media/tests/mochitest/test_peerConnection_bug1064223.html
@@ -11,17 +11,17 @@
     title: "CreateOffer fails without streams or modern RTCOfferOptions"
   });
 
   runNetworkTest(function () {
     var pc = new mozRTCPeerConnection();
     var options = { mandatory: { OfferToReceiveVideo: true } }; // obsolete
 
     pc.createOffer(options).then(() => ok(false, "createOffer must fail"),
-                                 e => is(e.name, "InternalError",
+                                 e => is(e.name, "InvalidStateError",
                                          "createOffer must fail"))
     .catch(e => ok(false, e.message))
     .then(() => {
       pc.close();
       networkTestFinished();
     })
     .catch(e => ok(false, e.message));
   });
--- a/dom/media/tests/mochitest/test_peerConnection_constructedStream.html
+++ b/dom/media/tests/mochitest/test_peerConnection_constructedStream.html
@@ -45,17 +45,17 @@ runNetworkTest(() => {
     ok(receivedStream, "We should receive a stream with with the sent stream's id (" + sentStreamId + ")");
     if (!receivedStream) {
       return;
     }
 
     is(receivedStream.getTracks().length, sentTracks.length,
        "Should receive same number of tracks as were sent");
     sentTracks.forEach(t =>
-      ok(receivedStream.getTracks().find(t2 => t.id == t2.id),
+      ok(receivedStream.getTracks().find(t2 => t.id == test.pcRemote.getWebrtcTrackId(t2)),
          "The sent track (" + t.id + ") should exist on the receive side"));
   };
 
   test.chain.append([
     function PC_REMOTE_CHECK_RECEIVED_CONSTRUCTED_STREAM() {
       checkSentTracksReceived(constructedStream.id, constructedStream.getTracks());
     },
     function PC_REMOTE_CHECK_RECEIVED_DUMMY_STREAM() {
--- a/dom/media/tests/mochitest/test_peerConnection_localReofferRollback.html
+++ b/dom/media/tests/mochitest/test_peerConnection_localReofferRollback.html
@@ -13,17 +13,17 @@
 
   var test;
   runNetworkTest(function (options) {
     test = new PeerConnectionTest(options);
     addRenegotiation(test.chain, [
         function PC_LOCAL_ADD_SECOND_STREAM(test) {
           test.setMediaConstraints([{audio: true}, {audio: true}],
                                    [{audio: true}]);
-          return test.pcLocal.getAllUserMedia([{audio: true}]);
+          return test.pcLocal.getAllUserMediaAndAddStreams([{audio: true}]);
         },
 
         function PC_REMOTE_SETUP_ICE_HANDLER(test) {
           test.pcRemote.setupIceCandidateHandler(test);
           if (test.testOptions.steeplechase) {
             test.pcRemote.endOfTrickleIce.then(() => {
               send_message({"type": "end_of_trickle_ice"});
             });
@@ -32,16 +32,19 @@
 
         function PC_REMOTE_CREATE_AND_SET_OFFER(test) {
           return test.createOffer(test.pcRemote).then(offer => {
             return test.setLocalDescription(test.pcRemote, offer, HAVE_LOCAL_OFFER);
           });
         },
 
         function PC_REMOTE_ROLLBACK(test) {
+          // the negotiationNeeded slot should have been true both before and
+          // after this SLD, so the event should fire again.
+          test.pcRemote.expectNegotiationNeeded();
           return test.setLocalDescription(test.pcRemote,
                                           { type: "rollback", sdp: "" },
                                           STABLE);
         },
 
         // Rolling back should shut down gathering
         function PC_REMOTE_WAIT_FOR_END_OF_TRICKLE(test) {
           return test.pcRemote.endOfTrickleIce;
--- a/dom/media/tests/mochitest/test_peerConnection_localRollback.html
+++ b/dom/media/tests/mochitest/test_peerConnection_localRollback.html
@@ -18,16 +18,19 @@
     test.chain.insertBefore('PC_LOCAL_CREATE_OFFER', [
         function PC_REMOTE_CREATE_AND_SET_OFFER(test) {
           return test.createOffer(test.pcRemote).then(offer => {
             return test.setLocalDescription(test.pcRemote, offer, HAVE_LOCAL_OFFER);
           });
         },
 
         function PC_REMOTE_ROLLBACK(test) {
+          // the negotiationNeeded slot should have been true both before and
+          // after this SLD, so the event should fire again.
+          test.pcRemote.expectNegotiationNeeded();
           return test.setLocalDescription(test.pcRemote,
                                           { type: "rollback", sdp: "" },
                                           STABLE);
         },
 
         // Rolling back should shut down gathering
         function PC_REMOTE_WAIT_FOR_END_OF_TRICKLE(test) {
           return test.pcRemote.endOfTrickleIce;
--- a/dom/media/tests/mochitest/test_peerConnection_remoteReofferRollback.html
+++ b/dom/media/tests/mochitest/test_peerConnection_remoteReofferRollback.html
@@ -14,17 +14,17 @@
   var test;
   runNetworkTest(function (options) {
     test = new PeerConnectionTest(options);
     addRenegotiation(test.chain,
       [
         function PC_LOCAL_ADD_SECOND_STREAM(test) {
           test.setMediaConstraints([{audio: true}, {audio: true}],
                                    [{audio: true}]);
-          return test.pcLocal.getAllUserMedia([{audio: true}]);
+          return test.pcLocal.getAllUserMediaAndAddStreams([{audio: true}]);
         },
       ]
     );
     test.chain.replaceAfter('PC_REMOTE_SET_REMOTE_DESCRIPTION',
       [
         function PC_LOCAL_SETUP_ICE_HANDLER(test) {
           test.pcLocal.setupIceCandidateHandler(test);
           if (test.testOptions.steeplechase) {
--- a/dom/media/tests/mochitest/test_peerConnection_removeAudioTrack.html
+++ b/dom/media/tests/mochitest/test_peerConnection_removeAudioTrack.html
@@ -32,20 +32,21 @@
         },
         function PC_LOCAL_REMOVE_AUDIO_TRACK(test) {
           test.setOfferOptions({ offerToReceiveAudio: true });
           return test.pcLocal.removeSender(0);
         },
       ],
       [
         function PC_REMOTE_CHECK_FLOW_STOPPED(test) {
-          is(test.pcRemote._pc.getReceivers().length, 0,
-             "pcRemote should have no more receivers");
-          is(receivedTrack.readyState, "ended",
-             "The received track should have ended");
+          // Simply removing a track is not enough to cause it to be
+          // signaled as ended. Spec may change though.
+          // TODO: One last check of the spec is in order
+          is(receivedTrack.readyState, "live",
+             "The received track should not have ended");
 
           return analyser.waitForAnalysisSuccess(arr => arr[freq] < 50);
         },
       ]
     );
 
     test.setMediaConstraints([{audio: true}], [{audio: true}]);
     test.run();
--- a/dom/media/tests/mochitest/test_peerConnection_removeThenAddAudioTrack.html
+++ b/dom/media/tests/mochitest/test_peerConnection_removeThenAddAudioTrack.html
@@ -24,31 +24,40 @@
         function PC_LOCAL_REMOVE_AUDIO_TRACK(test) {
           return test.pcLocal.removeSender(0);
         },
         function PC_LOCAL_ADD_AUDIO_TRACK(test) {
           // The new track's pipeline will start with a packet count of
           // 0, but the remote side will keep its old pipeline and packet
           // count.
           test.pcLocal.disableRtpCountChecking = true;
-          return test.pcLocal.getAllUserMedia([{audio: true}]);
+          return test.pcLocal.getAllUserMediaAndAddStreams([{audio: true}]);
         },
       ],
       [
         function PC_REMOTE_CHECK_ADDED_TRACK(test) {
-          is(test.pcRemote._pc.getReceivers().length, 1,
-              "pcRemote should still have one receiver");
-          const track = test.pcRemote._pc.getReceivers()[0].track;
-          isnot(originalTrack.id, track.id, "Receiver should have changed");
+          is(test.pcRemote._pc.getTransceivers().length, 2,
+              "pcRemote should have two transceivers");
+          const track = test.pcRemote._pc.getTransceivers()[1].receiver.track;
 
           const analyser = new AudioStreamAnalyser(
               new AudioContext(), new MediaStream([track]));
           const freq = analyser.binIndexForFrequency(TEST_AUDIO_FREQ);
           return analyser.waitForAnalysisSuccess(arr => arr[freq] > 200);
         },
+        function PC_REMOTE_CHECK_REMOVED_TRACK(test) {
+          is(test.pcRemote._pc.getTransceivers().length, 2,
+              "pcRemote should have two transceivers");
+          const track = test.pcRemote._pc.getTransceivers()[0].receiver.track;
+
+          const analyser = new AudioStreamAnalyser(
+              new AudioContext(), new MediaStream([track]));
+          const freq = analyser.binIndexForFrequency(TEST_AUDIO_FREQ);
+          return analyser.waitForAnalysisSuccess(arr => arr[freq] < 50);
+        }
       ]
     );
 
     test.setMediaConstraints([{audio: true}], [{audio: true}]);
     test.run();
   });
 </script>
 </pre>
--- a/dom/media/tests/mochitest/test_peerConnection_removeThenAddAudioTrackNoBundle.html
+++ b/dom/media/tests/mochitest/test_peerConnection_removeThenAddAudioTrackNoBundle.html
@@ -7,16 +7,18 @@
 <pre id="test">
 <script type="application/javascript">
   createHTML({
     bug: "1017888",
     title: "Renegotiation: remove then add audio track"
   });
 
   runNetworkTest(function (options) {
+    options = options || { };
+    options.bundle = false;
     const test = new PeerConnectionTest(options);
     let originalTrack;
     addRenegotiation(test.chain,
       [
         function PC_REMOTE_FIND_RECEIVER(test) {
           is(test.pcRemote._pc.getReceivers().length, 1,
              "pcRemote should have one receiver");
           originalTrack = test.pcRemote._pc.getReceivers()[0].track;
@@ -24,31 +26,46 @@
         function PC_LOCAL_REMOVE_AUDIO_TRACK(test) {
           // The new track's pipeline will start with a packet count of
           // 0, but the remote side will keep its old pipeline and packet
           // count.
           test.pcLocal.disableRtpCountChecking = true;
           return test.pcLocal.removeSender(0);
         },
         function PC_LOCAL_ADD_AUDIO_TRACK(test) {
-          return test.pcLocal.getAllUserMedia([{audio: true}]);
+          return test.pcLocal.getAllUserMediaAndAddStreams([{audio: true}]);
+        },
+        function PC_LOCAL_EXPECT_ICE_CHECKING(test) {
+          test.pcLocal.expectIceChecking();
+        },
+        function PC_REMOTE_EXPECT_ICE_CHECKING(test) {
+          test.pcRemote.expectIceChecking();
         },
       ],
       [
         function PC_REMOTE_CHECK_ADDED_TRACK(test) {
-          is(test.pcRemote._pc.getReceivers().length, 1,
-              "pcRemote should still have one receiver");
-          const track = test.pcRemote._pc.getReceivers()[0].track;
-          isnot(originalTrack.id, track.id, "Receiver should have changed");
+          is(test.pcRemote._pc.getTransceivers().length, 2,
+              "pcRemote should have two transceivers");
+          const track = test.pcRemote._pc.getTransceivers()[1].receiver.track;
 
           const analyser = new AudioStreamAnalyser(
               new AudioContext(), new MediaStream([track]));
           const freq = analyser.binIndexForFrequency(TEST_AUDIO_FREQ);
           return analyser.waitForAnalysisSuccess(arr => arr[freq] > 200);
         },
+        function PC_REMOTE_CHECK_REMOVED_TRACK(test) {
+          is(test.pcRemote._pc.getTransceivers().length, 2,
+              "pcRemote should have two transceivers");
+          const track = test.pcRemote._pc.getTransceivers()[0].receiver.track;
+
+          const analyser = new AudioStreamAnalyser(
+              new AudioContext(), new MediaStream([track]));
+          const freq = analyser.binIndexForFrequency(TEST_AUDIO_FREQ);
+          return analyser.waitForAnalysisSuccess(arr => arr[freq] < 50);
+        }
       ]
     );
 
     test.chain.insertAfterEach('PC_LOCAL_CREATE_OFFER',
                                PC_LOCAL_REMOVE_BUNDLE_FROM_OFFER);
 
     test.setMediaConstraints([{audio: true}], [{audio: true}]);
     test.run();
--- a/dom/media/tests/mochitest/test_peerConnection_removeThenAddVideoTrack.html
+++ b/dom/media/tests/mochitest/test_peerConnection_removeThenAddVideoTrack.html
@@ -28,33 +28,38 @@
           // 0, but the remote side will keep its old pipeline and packet
           // count.
           test.pcLocal.disableRtpCountChecking = true;
           return test.pcLocal.removeSender(0);
         },
         function PC_LOCAL_ADD_VIDEO_TRACK(test) {
           // Use fake:true here since the native fake device on linux doesn't
           // change color as needed by checkVideoPlaying() below.
-          return test.pcLocal.getAllUserMedia([{video: true, fake: true}]);
+          return test.pcLocal.getAllUserMediaAndAddStreams([{video: true, fake: true}]);
         },
       ],
       [
         function PC_REMOTE_CHECK_ADDED_TRACK(test) {
-          is(test.pcRemote._pc.getReceivers().length, 1,
-              "pcRemote should still have one receiver");
-          const track = test.pcRemote._pc.getReceivers()[0].track;
-          isnot(originalTrack.id, track.id, "Receiver should have changed");
+          is(test.pcRemote._pc.getTransceivers().length, 2,
+              "pcRemote should have two transceivers");
+          const track = test.pcRemote._pc.getTransceivers()[1].receiver.track;
 
-          const vOriginal = test.pcRemote.remoteMediaElements.find(
-              elem => elem.id.includes(originalTrack.id));
           const vAdded = test.pcRemote.remoteMediaElements.find(
               elem => elem.id.includes(track.id));
-          ok(vOriginal.ended, "Original video element should have ended");
           return helper.checkVideoPlaying(vAdded);
         },
+        function PC_REMOTE_CHECK_REMOVED_TRACK(test) {
+          is(test.pcRemote._pc.getTransceivers().length, 2,
+              "pcRemote should have two transceivers");
+          const track = test.pcRemote._pc.getTransceivers()[0].receiver.track;
+
+          const vAdded = test.pcRemote.remoteMediaElements.find(
+              elem => elem.id.includes(track.id));
+          return helper.checkVideoPaused(vAdded, 10, 10, 16, 5000);
+        }
       ]
     );
 
     test.setMediaConstraints([{video: true}], [{video: true}]);
     test.run();
   });
 </script>
 </pre>
--- a/dom/media/tests/mochitest/test_peerConnection_removeThenAddVideoTrackNoBundle.html
+++ b/dom/media/tests/mochitest/test_peerConnection_removeThenAddVideoTrackNoBundle.html
@@ -8,16 +8,18 @@
 <pre id="test">
 <script type="application/javascript">
   createHTML({
     bug: "1017888",
     title: "Renegotiation: remove then add video track, no bundle"
   });
 
   runNetworkTest(function (options) {
+    options = options || { };
+    options.bundle = false;
     const test = new PeerConnectionTest(options);
     const helper = new VideoStreamHelper();
     var originalTrack;
     addRenegotiation(test.chain,
       [
         function PC_REMOTE_FIND_RECEIVER(test) {
           is(test.pcRemote._pc.getReceivers().length, 1,
              "pcRemote should have one receiver");
@@ -28,33 +30,44 @@
           // 0, but the remote side will keep its old pipeline and packet
           // count.
           test.pcLocal.disableRtpCountChecking = true;
           return test.pcLocal.removeSender(0);
         },
         function PC_LOCAL_ADD_VIDEO_TRACK(test) {
           // Use fake:true here since the native fake device on linux doesn't
           // change color as needed by checkVideoPlaying() below.
-          return test.pcLocal.getAllUserMedia([{video: true, fake: true}]);
+          return test.pcLocal.getAllUserMediaAndAddStreams([{video: true, fake: true}]);
+        },
+        function PC_LOCAL_EXPECT_ICE_CHECKING(test) {
+          test.pcLocal.expectIceChecking();
+        },
+        function PC_REMOTE_EXPECT_ICE_CHECKING(test) {
+          test.pcRemote.expectIceChecking();
         },
       ],
       [
         function PC_REMOTE_CHECK_ADDED_TRACK(test) {
-          is(test.pcRemote._pc.getReceivers().length, 1,
-              "pcRemote should still have one receiver");
-          const track = test.pcRemote._pc.getReceivers()[0].track;
-          isnot(originalTrack.id, track.id, "Receiver should have changed");
+          is(test.pcRemote._pc.getTransceivers().length, 2,
+              "pcRemote should have two transceivers");
+          const track = test.pcRemote._pc.getTransceivers()[1].receiver.track;
 
-          const vOriginal = test.pcRemote.remoteMediaElements.find(
-              elem => elem.id.includes(originalTrack.id));
           const vAdded = test.pcRemote.remoteMediaElements.find(
               elem => elem.id.includes(track.id));
-          ok(vOriginal.ended, "Original video element should have ended");
           return helper.checkVideoPlaying(vAdded);
         },
+        function PC_REMOTE_CHECK_REMOVED_TRACK(test) {
+          is(test.pcRemote._pc.getTransceivers().length, 2,
+              "pcRemote should have two transceivers");
+          const track = test.pcRemote._pc.getTransceivers()[0].receiver.track;
+
+          const vAdded = test.pcRemote.remoteMediaElements.find(
+              elem => elem.id.includes(track.id));
+          return helper.checkVideoPaused(vAdded, 10, 10, 16, 5000);
+        },
       ]
     );
 
     test.chain.insertAfterEach('PC_LOCAL_CREATE_OFFER',
                                PC_LOCAL_REMOVE_BUNDLE_FROM_OFFER);
 
     test.setMediaConstraints([{video: true}], [{video: true}]);
     test.run();
--- a/dom/media/tests/mochitest/test_peerConnection_removeVideoTrack.html
+++ b/dom/media/tests/mochitest/test_peerConnection_removeVideoTrack.html
@@ -1,12 +1,13 @@
 <!DOCTYPE HTML>
 <html>
 <head>
   <script type="application/javascript" src="pc.js"></script>
+  <script type="application/javascript" src="/tests/dom/canvas/test/captureStream_common.js"></script>
 </head>
 <body>
 <pre id="test">
 <script type="application/javascript">
   createHTML({
     bug: "1017888",
     title: "Renegotiation: remove video track"
   });
@@ -31,22 +32,24 @@
         function PC_LOCAL_REMOVE_VIDEO_TRACK(test) {
           test.setOfferOptions({ offerToReceiveVideo: true });
           test.setMediaConstraints([], [{video: true}]);
           return test.pcLocal.removeSender(0);
         },
       ],
       [
         function PC_REMOTE_CHECK_FLOW_STOPPED(test) {
-          is(test.pcRemote._pc.getReceivers().length, 0,
-             "pcRemote should have no more receivers");
-          is(receivedTrack.readyState, "ended",
-             "The received track should have ended");
-          is(element.ended, true,
-             "Element playing the removed track should have ended");
+          is(test.pcRemote._pc.getTransceivers().length, 1,
+              "pcRemote should have one transceiver");
+          const track = test.pcRemote._pc.getTransceivers()[0].receiver.track;
+
+          const vAdded = test.pcRemote.remoteMediaElements.find(
+              elem => elem.id.includes(track.id));
+          const helper = new VideoStreamHelper();
+          return helper.checkVideoPaused(vAdded, 10, 10, 16, 5000);
         },
       ]
     );
 
     test.setMediaConstraints([{video: true}], [{video: true}]);
     test.run();
   });
 </script>
--- a/dom/media/tests/mochitest/test_peerConnection_replaceTrack.html
+++ b/dom/media/tests/mochitest/test_peerConnection_replaceTrack.html
@@ -42,29 +42,31 @@
     return navigator.mediaDevices.getUserMedia({video:true, audio:true})
       .then(newStream => {
         window.grip = newStream;
         newTrack = newStream.getVideoTracks()[0];
         audiotrack = newStream.getAudioTracks()[0];
         isnot(newTrack, sender.track, "replacing with a different track");
         ok(!pc.getLocalStreams().some(s => s == newStream),
            "from a different stream");
-        return sender.replaceTrack(newTrack);
+        // Use wrapper function, since it updates expected tracks
+        return wrapper.senderReplaceTrack(sender, newTrack, newStream);
       })
       .then(() => {
         is(pc.getSenders().length, oldSenderCount, "same sender count");
         is(sender.track, newTrack, "sender.track has been replaced");
         ok(!pc.getSenders().map(sn => sn.track).some(t => t == oldTrack),
            "old track not among senders");
-        ok(pc.getLocalStreams().some(s => s.getTracks()
+        // Spec does not say we add this new track to any stream
+        ok(!pc.getLocalStreams().some(s => s.getTracks()
                                            .some(t => t == sender.track)),
-           "track exists among pc's local streams");
+           "track does not exist among pc's local streams");
         return sender.replaceTrack(audiotrack)
           .then(() => ok(false, "replacing with different kind should fail"),
-                e => is(e.name, "IncompatibleMediaStreamTrackError",
+                e => is(e.name, "TypeError",
                         "replacing with different kind should fail"));
       });
   }
 
   runNetworkTest(function () {
     test = new PeerConnectionTest();
     test.audioCtx = new AudioContext();
     test.setMediaConstraints([{video: true, audio: true}], [{video: true}]);
@@ -125,53 +127,61 @@
         // (440Hz for loopback devices, 1kHz for fake tracks).
         sourceNode.frequency.value = 2000;
         sourceNode.start();
 
         var destNode = test.audioCtx.createMediaStreamDestination();
         sourceNode.connect(destNode);
         var newTrack = destNode.stream.getAudioTracks()[0];
 
-        return sender.replaceTrack(newTrack)
+        return test.pcLocal.senderReplaceTrack(
+            sender, newTrack, destNode.stream)
           .then(() => {
             is(pc.getSenders().length, oldSenderCount, "same sender count");
             ok(!pc.getSenders().some(sn => sn.track == oldTrack),
                "Replaced track should be removed from senders");
-            ok(allLocalStreamsHaveSender(pc),
-               "Shouldn't have any streams without a corresponding sender");
+            // TODO: Should PC remove local streams when there are no senders
+            // associated with it? getLocalStreams() isn't in the spec anymore,
+            // so I guess it is pretty arbitrary?
             is(sender.track, newTrack, "sender.track has been replaced");
-            ok(pc.getLocalStreams().some(s => s.getTracks()
+            // Spec does not say we add this new track to any stream
+            ok(!pc.getLocalStreams().some(s => s.getTracks()
                                                .some(t => t == sender.track)),
                "track exists among pc's local streams");
           });
       }
     ]);
     test.chain.append([
       function PC_LOCAL_CHECK_WEBAUDIO_FLOW_PRESENT(test) {
         return test.pcRemote.checkReceivingToneFrom(test.audioCtx, test.pcLocal);
       }
     ]);
     test.chain.append([
       function PC_LOCAL_INVALID_ADD_VIDEOTRACKS(test) {
-        var stream = test.pcLocal._pc.getLocalStreams()[0];
-        var track = stream.getVideoTracks()[0];
-        try {
-          test.pcLocal._pc.addTrack(track, stream);
-          ok(false, "addTrack existing track should fail");
-        } catch (e) {
-          is(e.name, "InvalidParameterError",
-             "addTrack existing track should fail");
-        }
-        try {
-          test.pcLocal._pc.addTrack(track, stream);
-          ok(false, "addTrack existing track should fail");
-        } catch (e) {
-          is(e.name, "InvalidParameterError",
-             "addTrack existing track should fail");
-        }
+        let videoTransceivers = test.pcLocal._pc.getTransceivers()
+          .filter(transceiver => {
+            return !transceiver.stopped &&
+                   transceiver.receiver.track.kind == "video" &&
+                   transceiver.sender.track;
+          });
+
+        ok(videoTransceivers.length,
+           "There is at least one non-stopped video transceiver with a track.");
+
+        videoTransceivers.forEach(transceiver => {
+            var stream = test.pcLocal._pc.getLocalStreams()[0];;
+            var track = transceiver.sender.track;
+            try {
+              test.pcLocal._pc.addTrack(track, stream);
+              ok(false, "addTrack existing track should fail");
+            } catch (e) {
+              is(e.name, "InvalidAccessError",
+                 "addTrack existing track should fail");
+            }
+          });
       }
     ]);
     test.run();
   });
 </script>
 </pre>
 </body>
 </html>
--- a/dom/media/tests/mochitest/test_peerConnection_replaceVideoThenRenegotiate.html
+++ b/dom/media/tests/mochitest/test_peerConnection_replaceVideoThenRenegotiate.html
@@ -31,60 +31,38 @@
     ]);
     addRenegotiation(test.chain,
       [
         function PC_LOCAL_REPLACE_VIDEO_TRACK_THEN_ADD_SECOND_STREAM(test) {
           emitter1.stop();
           emitter2.start();
           const newstream = emitter2.stream();
           const newtrack = newstream.getVideoTracks()[0];
-          return test.pcLocal.senderReplaceTrack(0, newtrack, newstream.id)
+          var sender = test.pcLocal._pc.getSenders()[0];
+          return test.pcLocal.senderReplaceTrack(sender, newtrack, newstream)
             .then(() => {
               test.setMediaConstraints([{video: true}, {video: true}],
                                        [{video: true}]);
-              // Use fake:true here since the native fake device on linux
-              // doesn't change color as needed by checkVideoPlaying() below.
-              return test.pcLocal.getAllUserMedia([{video: true, fake: true}]);
             });
         },
       ],
       [
-        function PC_REMOTE_CHECK_ORIGINAL_TRACK_ENDED(test) {
+        function PC_REMOTE_CHECK_ORIGINAL_TRACK_NOT_ENDED(test) {
+          is(test.pcRemote._pc.getTransceivers().length, 1,
+              "pcRemote should have one transceiver");
+          const track = test.pcRemote._pc.getTransceivers()[0].receiver.track;
+
           const vremote = test.pcRemote.remoteMediaElements.find(
-              elem => elem.id.includes(emitter1.stream().getTracks()[0].id));
-          if (!vremote) {
-            return Promise.reject(new Error("Couldn't find video element"));
-          }
-          ok(vremote.ended, "Original track should have ended after renegotiation");
-        },
-        function PC_REMOTE_CHECK_REPLACED_TRACK_FLOW(test) {
-          const vremote = test.pcRemote.remoteMediaElements.find(
-              elem => elem.id.includes(test.pcLocal._pc.getSenders()[0].track.id));
+              elem => elem.id.includes(track.id));
           if (!vremote) {
             return Promise.reject(new Error("Couldn't find video element"));
           }
-          return addFinallyToPromise(helper.checkVideoPlaying(vremote))
-            .finally(() => emitter2.stop())
-            .then(() => {
-              const px = helper._helper.getPixel(vremote, 10, 10);
-              const isBlue = helper._helper.isPixel(
-                  px, CaptureStreamTestHelper.prototype.blue, 5);
-              const isGrey = helper._helper.isPixel(
-                  px, CaptureStreamTestHelper.prototype.grey, 5);
-              ok(isBlue || isGrey, "replaced track should be blue or grey");
-            });
-        },
-        function PC_REMOTE_CHECK_ADDED_TRACK_FLOW(test) {
-          const vremote = test.pcRemote.remoteMediaElements.find(
-              elem => elem.id.includes(test.pcLocal._pc.getSenders()[1].track.id));
-          if (!vremote) {
-            return Promise.reject(new Error("Couldn't find video element"));
-          }
+          ok(!vremote.ended, "Original track should not have ended after renegotiation (replaceTrack is not signalled!)");
           return helper.checkVideoPlaying(vremote);
-        },
+        }
       ]
     );
 
     test.run();
    });
   });
 
 </script>
--- a/dom/media/tests/mochitest/test_peerConnection_scaleResolution.html
+++ b/dom/media/tests/mochitest/test_peerConnection_scaleResolution.html
@@ -13,73 +13,73 @@
   });
 
   const pushPrefs = (...p) => SpecialPowers.pushPrefEnv({set: p});
 
   var mustRejectWith = (msg, reason, f) =>
     f().then(() => ok(false, msg),
              e => is(e.name, reason, msg));
 
-  function testScale(codec) {
+  async function testScale(codec) {
     var pc1 = new RTCPeerConnection();
     var pc2 = new RTCPeerConnection();
 
     var add = (pc, can, failed) => can && pc.addIceCandidate(can).catch(failed);
     pc1.onicecandidate = e => add(pc2, e.candidate, generateErrorCallback());
     pc2.onicecandidate = e => add(pc1, e.candidate, generateErrorCallback());
 
     info("testing scaling with " + codec);
 
-    pc1.onnegotiationneeded = e =>
-      pc1.createOffer()
-      .then(d => pc1.setLocalDescription(codec == "VP8"
-        ? d
-        : (d.sdp = sdputils.removeAllButPayloadType(d.sdp, 126), d)))
-      .then(() => pc2.setRemoteDescription(pc1.localDescription))
-      .then(() => pc2.createAnswer()).then(d => pc2.setLocalDescription(d))
-      .then(() => pc1.setRemoteDescription(pc2.localDescription))
-      .catch(generateErrorCallback());
+    let stream = await navigator.mediaDevices.getUserMedia({ video: true });
+
+    var v1 = createMediaElement('video', 'v1');
+    var v2 = createMediaElement('video', 'v2');
+
+    var ontrackfired = new Promise(resolve => pc2.ontrack = e => resolve(e));
+    var v2loadedmetadata = new Promise(resolve => v2.onloadedmetadata = resolve);
+
+    is(v2.currentTime, 0, "v2.currentTime is zero at outset");
 
-    return navigator.mediaDevices.getUserMedia({ video: true })
-    .then(stream => {
-      var v1 = createMediaElement('video', 'v1');
-      var v2 = createMediaElement('video', 'v2');
+    v1.srcObject = stream;
+    var sender = pc1.addTrack(stream.getVideoTracks()[0], stream);
 
-      is(v2.currentTime, 0, "v2.currentTime is zero at outset");
+    await mustRejectWith(
+        "Invalid scaleResolutionDownBy must reject", "RangeError",
+        () => sender.setParameters(
+            { encodings:[{ scaleResolutionDownBy: 0.5 } ] })
+    );
 
-      v1.srcObject = stream;
-      var sender = pc1.addTrack(stream.getVideoTracks()[0], stream);
+    await sender.setParameters({ encodings: [{ maxBitrate: 60000,
+                                               scaleResolutionDownBy: 2 }] });
 
-      return mustRejectWith("Invalid scaleResolutionDownBy must reject", "RangeError",
-                            () => sender.setParameters({ encodings:
-                                                       [{ scaleResolutionDownBy: 0.5 } ] }))
-      .then(() => sender.setParameters({ encodings: [{ maxBitrate: 60000,
-                                                       scaleResolutionDownBy: 2 }] }))
-      .then(() => new Promise(resolve => pc2.ontrack = e => resolve(e)))
-      .then(e => v2.srcObject = e.streams[0])
-      .then(() => new Promise(resolve => v2.onloadedmetadata = resolve))
-      .then(() => waitUntil(() => v2.currentTime > 0 && v2.srcObject.currentTime > 0))
-      .then(() => ok(v2.currentTime > 0, "v2.currentTime is moving (" + v2.currentTime + ")"))
-      .then(() => wait(3000)) // TODO: Bug 1248154
-      .then(() => {
-        ok(v1.videoWidth > 0, "source width is positive");
-        ok(v1.videoHeight > 0, "source height is positive");
-        if (v2.videoWidth == 640 && v2.videoHeight == 480) { // TODO: Bug 1248154
-          info("Skipping test due to Bug 1248154");
-        } else {
-          is(v2.videoWidth, v1.videoWidth / 2, "sink is half the width of source");
-          is(v2.videoHeight, v1.videoHeight / 2, "sink is half the height of source");
-        }
-      })
-      .then(() => {
-        stream.getTracks().forEach(track => track.stop());
-        v1.srcObject = v2.srcObject = null;
-      })
-    })
-    .catch(generateErrorCallback());
+    let offer = await pc1.createOffer();
+    if (codec == "VP8") {
+      offer.sdp = sdputils.removeAllButPayloadType(offer.sdp, 126);
+    }
+    await pc1.setLocalDescription(offer);
+    await pc2.setRemoteDescription(pc1.localDescription);
+
+    let answer = await pc2.createAnswer();
+    await pc2.setLocalDescription(answer);
+    await pc1.setRemoteDescription(pc2.localDescription);
+    let trackevent = await ontrackfired;
+
+    v2.srcObject = trackevent.streams[0];
+
+    await v2loadedmetadata;
+
+    await waitUntil(() => v2.currentTime > 0 && v2.srcObject.currentTime > 0);
+    ok(v2.currentTime > 0, "v2.currentTime is moving (" + v2.currentTime + ")");
+
+    ok(v1.videoWidth > 0, "source width is positive");
+    ok(v1.videoHeight > 0, "source height is positive");
+    is(v2.videoWidth, v1.videoWidth / 2, "sink is half the width of source");
+    is(v2.videoHeight, v1.videoHeight / 2, "sink is half the height of source");
+    stream.getTracks().forEach(track => track.stop());
+    v1.srcObject = v2.srcObject = null;
   }
 
   pushPrefs(['media.peerconnection.video.lock_scaling', true]).then(() => {
     if (!navigator.appVersion.includes("Android")) {
       runNetworkTest(() => testScale("VP8").then(() => testScale("H264"))
                     .then(networkTestFinished));
     } else {
       // No support for H.264 on Android in automation, see Bug 1355786
--- a/dom/media/tests/mochitest/test_peerConnection_setParameters.html
+++ b/dom/media/tests/mochitest/test_peerConnection_setParameters.html
@@ -12,20 +12,21 @@ createHTML({
   visible: true
 });
 
 function parameterstest(pc) {
   ok(pc.getSenders().length > 0, "have senders");
   var sender = pc.getSenders()[0];
 
   var testParameters = (params, errorName, errorMsg) => {
+    info("Trying to set " + JSON.stringify(params));
 
     var validateParameters = (a, b) => {
       var validateEncoding = (a, b) => {
-        is(a.rid, b.rid || "", "same rid");
+        is(a.rid, b.rid, "same rid");
         is(a.maxBitrate, b.maxBitrate, "same maxBitrate");
         is(a.scaleResolutionDownBy, b.scaleResolutionDownBy,
            "same scaleResolutionDownBy");
       };
       is(a.encodings.length, (b.encodings || []).length, "same encodings");
       a.encodings.forEach((en, i) => validateEncoding(en, b.encodings[i]));
     };
 
new file mode 100644
--- /dev/null
+++ b/dom/media/tests/mochitest/test_peerConnection_transceivers.html
@@ -0,0 +1,1707 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+  <script type="application/javascript" src="pc.js"></script>
+</head>
+<body>
+<pre id="test">
+<script type="application/javascript">
+  createHTML({
+    bug: "1290948",
+    title: "Transceivers API tests"
+  });
+
+  let checkThrows = async (func, exceptionName, description) => {
+    try {
+      await func();
+      ok(false, description + " throws " + exceptionName);
+    } catch (e) {
+      is(e.name, exceptionName, description + " throws " + exceptionName);
+    }
+  };
+
+  let stopTracks = (...streams) => {
+    streams.forEach(stream => stream.getTracks().forEach(track => track.stop()));
+  };
+
+  let setRemoteDescriptionReturnTrackEvents = async (pc, desc) => {
+    let trackEvents = [];
+    let listener = e => trackEvents.push(e);
+    pc.addEventListener("track", listener);
+    await pc.setRemoteDescription(desc);
+    pc.removeEventListener("track", listener);
+
+    // basic sanity-check, simplifies testing elsewhere
+    for (let e of trackEvents) {
+      ok(e.track, "Track is set on event");
+      ok(e.receiver, "Receiver is set on event");
+      ok(e.transceiver, "Transceiver is set on event");
+      ok(e.streams, "Streams is set on event");
+      is(e.receiver, e.transceiver.receiver, "Receiver belongs to transceiver");
+      is(e.track, e.receiver.track, "Track belongs to receiver");
+    }
+
+    return trackEvents;
+  };
+
+  let trickle = (pc1, pc2) => {
+    pc1.onicecandidate = async e => {
+      info("Adding ICE candidate: " + JSON.stringify(e.candidate));
+      try {
+        await pc2.addIceCandidate(e.candidate);
+      } catch(e) {
+        ok(false, "addIceCandidate threw error: " + e.name);
+      }
+    };
+  };
+
+  let iceConnected = pc => {
+    info("Waiting for ICE connected...");
+    return new Promise((resolve, reject) => {
+      let iceCheck = () => {
+        if (pc.iceConnectionState == "connected") {
+          ok(true, "ICE connected");
+          resolve();
+        }
+
+        if (pc.iceConnectionState == "failed") {
+          ok(false, "ICE failed");
+          reject();
+        }
+      };
+
+      iceCheck();
+      pc.oniceconnectionstatechange = iceCheck;
+    });
+  };
+
+  let negotiationNeeded = pc => {
+    return new Promise(resolve => pc.onnegotiationneeded = resolve);
+  };
+
+  let logExpected = expected => {
+    info("(expected " + JSON.stringify(expected) + ")");
+  };
+
+  let hasProps = (observed, expected) => {
+
+    if (observed === expected) {
+      return true;
+    }
+
+    // If we are expecting an array, iterate over it
+    if (Array.isArray(expected)) {
+      if (!Array.isArray(observed)) {
+        ok(false, "Expected an array, but didn't get one.");
+        logExpected(expected);
+        return false;
+      }
+
+      if (observed.length !== expected.length) {
+        ok(false, "Expected array to be " + expected.length + " long, but it was " + observed.length + " long instead");
+        logExpected(expected);
+        return false;
+      }
+
+      for (let i = 0; i < expected.length; i++) {
+        if (!hasProps(observed[i], expected[i])) {
+          logExpected(expected);
+          return false;
+        }
+      }
+
+      return true;
+    }
+
+    // If we are expecting an object, check its props
+    if (typeof expected === "object" && expected !== null) {
+      let propsWeCareAbout = Object.getOwnPropertyNames(expected);
+      for (let i in propsWeCareAbout) {
+        let prop = propsWeCareAbout[i];
+        if (!hasProps(observed[prop], expected[prop])) {
+          logExpected(expected);
+          return false;
+        }
+      }
+
+      return true;
+    }
+
+    ok(false, "Expected (" + JSON.stringify(expected) + ") did not match " +
+              "observed (" + JSON.stringify(observed) + ")");
+    return false;
+  };
+
+  let checkAddTransceiverNoTrack = async () => {
+    let pc = new RTCPeerConnection();
+    hasProps(pc.getTransceivers(), []);
+
+    pc.addTransceiver("audio");
+    pc.addTransceiver("video");
+
+    // NOTE: the w3c spec doesn't say anything about transceiver order, so this
+    // may not necessarily be the same order we see on other browsers.
+    hasProps(pc.getTransceivers(),
+      [
+        {
+          receiver: {track: {kind: "audio", readyState: "live"}},
+          sender: {track: null},
+          direction: "sendrecv",
+          mid: null,
+          currentDirection: null,
+          stopped: false
+        },
+        {
+          receiver: {track: {kind: "video", readyState: "live"}},
+          sender: {track: null},
+          direction: "sendrecv",
+          mid: null,
+          currentDirection: null,
+          stopped: false
+        }
+      ]);
+
+    pc.close();
+  };
+
+  let checkAddTransceiverWithTrack = async () => {
+    let pc = new RTCPeerConnection();
+
+    let stream = await getUserMedia({audio: true, video: true});
+    let audio = stream.getAudioTracks()[0];
+    let video = stream.getVideoTracks()[0];
+
+    pc.addTransceiver(audio);
+    pc.addTransceiver(video);
+
+    hasProps(pc.getTransceivers(),
+      [
+        {
+          receiver: {track: {kind: "audio"}},
+          sender: {track: audio},
+          direction: "sendrecv",
+          mid: null,
+          currentDirection: null,
+          stopped: false
+        },
+        {
+          receiver: {track: {kind: "video"}},
+          sender: {track: video},
+          direction: "sendrecv",
+          mid: null,
+          currentDirection: null,
+          stopped: false
+        }
+      ]);
+
+    pc.close();
+    stopTracks(stream);
+  };
+
+  let checkAddTransceiverWithAddTrack = async () => {
+    let pc = new RTCPeerConnection();
+
+    let stream = await getUserMedia({audio: true, video: true});
+    let audio = stream.getAudioTracks()[0];
+    let video = stream.getVideoTracks()[0];
+
+    pc.addTrack(audio, stream);
+    pc.addTrack(video, stream);
+
+    hasProps(pc.getTransceivers(),
+      [
+        {
+          receiver: {track: {kind: "audio"}},
+          sender: {track: audio},
+          direction: "sendrecv",
+          mid: null,
+          currentDirection: null,
+          stopped: false
+        },
+        {
+          receiver: {track: {kind: "video"}},
+          sender: {track: video},
+          direction: "sendrecv",
+          mid: null,
+          currentDirection: null,
+          stopped: false
+        }
+      ]);
+
+    pc.close();
+    stopTracks(stream);
+  };
+
+  let checkAddTransceiverWithDirection = async () => {
+    let pc = new RTCPeerConnection();
+
+    pc.addTransceiver("audio", {direction: "recvonly"});
+    pc.addTransceiver("video", {direction: "recvonly"});
+
+    hasProps(pc.getTransceivers(),
+      [
+        {
+          receiver: {track: {kind: "audio"}},
+          sender: {track: null},
+          direction: "recvonly",
+          mid: null,
+          currentDirection: null,
+          stopped: false
+        },
+        {
+          receiver: {track: {kind: "video"}},
+          sender: {track: null},
+          direction: "recvonly",
+          mid: null,
+          currentDirection: null,
+          stopped: false
+        }
+      ]);
+
+    pc.close();
+  };
+
+  let checkAddTransceiverWithStream = async () => {
+    let pc = new RTCPeerConnection();
+
+    let audioStream = await getUserMedia({audio: true});
+    let videoStream = await getUserMedia({video: true});
+    let audio = audioStream.getAudioTracks()[0];
+    let video = videoStream.getVideoTracks()[0];
+
+    pc.addTransceiver(audio, {streams: [audioStream]});
+    pc.addTransceiver(video, {streams: [videoStream]});
+
+    hasProps(pc.getTransceivers(),
+      [
+        {
+          receiver: {track: {kind: "audio"}},
+          sender: {track: audio},
+          direction: "sendrecv",
+          mid: null,
+          currentDirection: null,
+          stopped: false
+        },
+        {
+          receiver: {track: {kind: "video"}},
+          sender: {track: video},
+          direction: "sendrecv",
+          mid: null,
+          currentDirection: null,
+          stopped: false
+        }
+      ]);
+
+    let offer = await pc.createOffer();
+    ok(offer.sdp.includes("a=msid:" + audioStream.id + " " + audio.id),
+      "offer contains the expected audio msid");
+    ok(offer.sdp.includes("a=msid:" + videoStream.id + " " + video.id),
+      "offer contains the expected video msid");
+
+    pc.close();
+    stopTracks(audioStream, videoStream);
+  };
+
+  let checkAddTransceiverWithOfferToReceive = async kinds => {
+    let pc = new RTCPeerConnection();
+
+    let options = {};
+
+    for (let kind of kinds) {
+      if (kind == "audio") {
+        options.offerToReceiveAudio = true;
+      } else if (kind == "video") {
+        options.offerToReceiveVideo = true;
+      }
+    }
+
+    let offer = await pc.createOffer(options);
+
+    let expected = [];
+
+    // NOTE: The ordering here is not laid out in the spec at all, this is
+    // firefox specific.
+    if (options.offerToReceiveVideo) {
+      expected.push(
+        {
+          receiver: {track: {kind: "video"}},
+          sender: {track: null},
+          direction: "recvonly",
+          mid: null,
+          currentDirection: null,
+          stopped: false
+        });
+    }
+
+    if (options.offerToReceiveAudio) {
+      expected.push(
+        {
+          receiver: {track: {kind: "audio"}},
+          sender: {track: null},
+          direction: "recvonly",
+          mid: null,
+          currentDirection: null,
+          stopped: false
+        });
+    }
+
+    hasProps(pc.getTransceivers(), expected);
+
+    pc.close();
+  };
+
+  let checkAddTransceiverWithSetRemoteOfferSending = async () => {
+    let pc1 = new RTCPeerConnection();
+    let pc2 = new RTCPeerConnection();
+
+    let stream = await getUserMedia({audio: true});
+    let track = stream.getAudioTracks()[0];
+    pc1.addTransceiver(track, {streams: [stream]});
+
+    let offer = await pc1.createOffer();
+
+    let trackEvents = await setRemoteDescriptionReturnTrackEvents(pc2, offer);
+    hasProps(trackEvents,
+      [
+        {
+          track: pc2.getTransceivers()[0].receiver.track,
+          streams: [{id: stream.id}]
+        }
+      ]);
+
+    hasProps(pc2.getTransceivers(),
+      [
+        {
+          receiver: {track: {kind: "audio"}},
+          sender: {track: null},
+          direction: "recvonly",
+          mid: "sdparta_0", // Firefox-specific
+          currentDirection: null,
+          stopped: false
+        }
+      ]);
+
+    pc1.close();
+    pc2.close();
+    stopTracks(stream);
+  };
+
+  let checkAddTransceiverWithSetRemoteOfferNoSend = async () => {
+    let pc1 = new RTCPeerConnection();
+    let pc2 = new RTCPeerConnection();
+
+    let stream = await getUserMedia({audio: true});
+    let track = stream.getAudioTracks()[0];
+    pc1.addTransceiver(track);
+    pc1.getTransceivers()[0].direction = "recvonly";
+
+    let offer = await pc1.createOffer();
+    let trackEvents = await setRemoteDescriptionReturnTrackEvents(pc2, offer);
+    hasProps(trackEvents, []);
+
+    hasProps(pc2.getTransceivers(),
+      [
+        {
+          receiver: {track: {kind: "audio"}},
+          sender: {track: null},
+          // rtcweb-jsep says this is recvonly, w3c-webrtc does not...
+          direction: "recvonly",
+          mid: "sdparta_0", // Firefox-specific
+          currentDirection: null,
+          stopped: false
+        }
+      ]);
+
+    pc1.close();
+    pc2.close();
+    stopTracks(stream);
+  };
+
+  let checkAddTransceiverBadKind = async () => {
+    let pc = new RTCPeerConnection();
+    try {
+      pc.addTransceiver("foo");
+      ok(false, 'addTransceiver("foo") throws');
+    }
+    catch (e if e instanceof TypeError) {
+      ok(true, 'addTransceiver("foo") throws a TypeError');
+    }
+    catch (e) {
+      ok(false, 'addTransceiver("foo") throws a TypeError');
+    }
+
+    hasProps(pc.getTransceivers(), []);
+
+    pc.close();
+  };
+
+  let checkAddTransceiverNoTrackDoesntPair = async () => {
+    let pc1 = new RTCPeerConnection();
+    let pc2 = new RTCPeerConnection();
+    pc1.addTransceiver("audio");
+    pc2.addTransceiver("audio");
+
+    let offer = await pc1.createOffer();
+    let trackEvents = await setRemoteDescriptionReturnTrackEvents(pc2, offer);
+    hasProps(trackEvents,
+      [
+        {
+          track: pc2.getTransceivers()[1].receiver.track,
+          streams: []
+        }
+      ]);
+
+    hasProps(pc2.getTransceivers(),
+      [
+        {mid: null}, // no addTrack magic, doesn't auto-pair
+        {mid: "sdparta_0"} // Created by SRD
+      ]);
+
+    pc1.close();
+    pc2.close();
+  };
+
+  let checkAddTransceiverWithTrackDoesntPair = async () => {
+    let pc1 = new RTCPeerConnection();
+    let pc2 = new RTCPeerConnection();
+    pc1.addTransceiver("audio");
+
+    let stream = await getUserMedia({audio: true});
+    let track = stream.getAudioTracks()[0];
+    pc2.addTransceiver(track);
+
+    let offer = await pc1.createOffer();
+    let trackEvents = await setRemoteDescriptionReturnTrackEvents(pc2, offer);
+    hasProps(trackEvents,
+      [
+        {
+          track: pc2.getTransceivers()[1].receiver.track,
+          streams: []
+        }
+      ]);
+
+    hasProps(pc2.getTransceivers(),
+      [
+        {mid: null, sender: {track}},
+        {mid: "sdparta_0", sender: {track: null}} // Created by SRD
+      ]);
+
+    pc1.close();
+    pc2.close();
+    stopTracks(stream);
+  };
+
+  let checkAddTransceiverThenReplaceTrackDoesntPair = async () => {
+    let pc1 = new RTCPeerConnection();
+    let pc2 = new RTCPeerConnection();
+    pc1.addTransceiver("audio");
+    pc2.addTransceiver("audio");
+
+    let stream = await getUserMedia({audio: true});
+    let track = stream.getAudioTracks()[0];
+    pc2.getTransceivers()[0].sender.replaceTrack(track);
+
+    let offer = await pc1.createOffer();
+    let trackEvents = await setRemoteDescriptionReturnTrackEvents(pc2, offer);
+    hasProps(trackEvents,
+      [
+        {
+          track: pc2.getTransceivers()[1].receiver.track,
+          streams: []
+        }
+      ]);
+
+    hasProps(pc2.getTransceivers(),
+      [
+        {mid: null, sender: {track}},
+        {mid: "sdparta_0", sender: {track: null}} // Created by SRD
+      ]);
+
+    pc1.close();
+    pc2.close();
+    stopTracks(stream);
+  };
+
+  let checkAddTransceiverThenAddTrackPairs = async () => {
+    let pc1 = new RTCPeerConnection();
+    let pc2 = new RTCPeerConnection();
+    pc1.addTransceiver("audio");
+    pc2.addTransceiver("audio");
+
+    let stream = await getUserMedia({audio: true});
+    let track = stream.getAudioTracks()[0];
+    pc2.addTrack(track, stream);
+
+    let offer = await pc1.createOffer();
+    let trackEvents = await setRemoteDescriptionReturnTrackEvents(pc2, offer);
+    hasProps(trackEvents,
+      [
+        {
+          track: pc2.getTransceivers()[0].receiver.track,
+          streams: []
+        }
+      ]);
+
+    hasProps(pc2.getTransceivers(),
+      [
+        {mid: "sdparta_0", sender: {track}}
+      ]);
+
+    pc1.close();
+    pc2.close();
+    stopTracks(stream);
+  };
+
+  let checkAddTrackPairs = async () => {
+    let pc1 = new RTCPeerConnection();
+    let pc2 = new RTCPeerConnection();
+    pc1.addTransceiver("audio");
+
+    let stream = await getUserMedia({audio: true});
+    let track = stream.getAudioTracks()[0];
+    pc2.addTrack(track, stream);
+
+    let offer = await pc1.createOffer();
+    let trackEvents = await setRemoteDescriptionReturnTrackEvents(pc2, offer);
+    hasProps(trackEvents,
+      [
+        {
+          track: pc2.getTransceivers()[0].receiver.track,
+          streams: []
+        }
+      ]);
+
+    hasProps(pc2.getTransceivers(),
+      [
+        {mid: "sdparta_0", sender: {track}}
+      ]);
+
+    pc1.close();
+    pc2.close();
+    stopTracks(stream);
+  };
+
+  let checkReplaceTrackNullDoesntPreventPairing = async () => {
+    let pc1 = new RTCPeerConnection();
+    let pc2 = new RTCPeerConnection();
+    pc1.addTransceiver("audio");
+
+    let stream = await getUserMedia({audio: true});
+    let track = stream.getAudioTracks()[0];
+    pc2.addTrack(track, stream);
+    pc2.getTransceivers()[0].sender.replaceTrack(null);
+
+    let offer = await pc1.createOffer();
+    let trackEvents = await setRemoteDescriptionReturnTrackEvents(pc2, offer);
+    hasProps(trackEvents,
+      [
+        {
+          track: pc2.getTransceivers()[0].receiver.track,
+          streams: []
+        }
+      ]);
+
+    hasProps(pc2.getTransceivers(),
+      [
+        {mid: "sdparta_0", sender: {track}}
+      ]);
+
+    pc1.close();
+    pc2.close();
+    stopTracks(stream);
+  };
+
+  let checkSetDirection = async () => {
+    let pc = new RTCPeerConnection();
+    pc.addTransceiver("audio");
+
+    pc.getTransceivers()[0].direction = "sendonly";
+    hasProps(pc.getTransceivers(),[{direction: "sendonly"}]);
+    pc.getTransceivers()[0].direction = "recvonly";
+    hasProps(pc.getTransceivers(),[{direction: "recvonly"}]);
+    pc.getTransceivers()[0].direction = "inactive";
+    hasProps(pc.getTransceivers(),[{direction: "inactive"}]);
+    pc.getTransceivers()[0].direction = "sendrecv";
+    hasProps(pc.getTransceivers(),[{direction: "sendrecv"}]);
+
+    pc.close();
+  };
+
+  let checkCurrentDirection = async () => {
+    let pc1 = new RTCPeerConnection();
+    let pc2 = new RTCPeerConnection();
+
+    let stream = await getUserMedia({audio: true});
+    let track = stream.getAudioTracks()[0];
+    pc1.addTrack(track, stream);
+    pc2.addTrack(track, stream);
+    hasProps(pc1.getTransceivers(), [{currentDirection: null}]);
+
+    let offer = await pc1.createOffer();
+    hasProps(pc1.getTransceivers(), [{currentDirection: null}]);
+
+    await pc1.setLocalDescription(offer);
+    hasProps(pc1.getTransceivers(), [{currentDirection: null}]);
+
+    let trackEvents = await setRemoteDescriptionReturnTrackEvents(pc2, offer);
+    hasProps(trackEvents,
+      [
+        {
+          track: pc2.getTransceivers()[0].receiver.track,
+          streams: [{id: stream.id}]
+        }
+      ]);
+
+    hasProps(pc2.getTransceivers(), [{currentDirection: null}]);
+
+    let answer = await pc2.createAnswer();
+    hasProps(pc2.getTransceivers(), [{currentDirection: null}]);
+
+    await pc2.setLocalDescription(answer);
+    hasProps(pc2.getTransceivers(), [{currentDirection: "sendrecv"}]);
+
+    trackEvents = await setRemoteDescriptionReturnTrackEvents(pc1, answer);
+    hasProps(trackEvents,
+      [
+        {
+          track: pc1.getTransceivers()[0].receiver.track,
+          streams: [{id: stream.id}]
+        }
+      ]);
+
+    hasProps(pc1.getTransceivers(), [{currentDirection: "sendrecv"}]);
+
+    pc2.getTransceivers()[0].direction = "sendonly";
+
+    offer = await pc2.createOffer();
+    hasProps(pc2.getTransceivers(), [{currentDirection: "sendrecv"}]);
+
+    await pc2.setLocalDescription(offer);
+    hasProps(pc2.getTransceivers(), [{currentDirection: "sendrecv"}]);
+
+    trackEvents = await setRemoteDescriptionReturnTrackEvents(pc1, offer);
+    hasProps(trackEvents, []);
+
+    hasProps(pc1.getTransceivers(), [{currentDirection: "sendrecv"}]);
+
+    answer = await pc1.createAnswer();
+    hasProps(pc1.getTransceivers(), [{currentDirection: "sendrecv"}]);
+
+    // TODO(bug 1400363): Check onmute/muted
+    await pc1.setLocalDescription(answer);
+    hasProps(pc1.getTransceivers(), [{currentDirection: "recvonly"}]);
+
+    trackEvents = await setRemoteDescriptionReturnTrackEvents(pc2, answer);
+    hasProps(trackEvents, []);
+
+    hasProps(pc2.getTransceivers(), [{currentDirection: "sendonly"}]);
+
+    pc2.getTransceivers()[0].direction = "sendrecv";
+
+    offer = await pc2.createOffer();
+    hasProps(pc2.getTransceivers(), [{currentDirection: "sendonly"}]);
+
+    await pc2.setLocalDescription(offer);
+    hasProps(pc2.getTransceivers(), [{currentDirection: "sendonly"}]);
+
+    trackEvents = await setRemoteDescriptionReturnTrackEvents(pc1, offer);
+    hasProps(trackEvents, []);
+
+    hasProps(pc1.getTransceivers(), [{currentDirection: "recvonly"}]);
+
+    answer = await pc1.createAnswer();
+    hasProps(pc1.getTransceivers(), [{currentDirection: "recvonly"}]);
+
+    // TODO(bug 1400363): Check onunmute/muted
+    await pc1.setLocalDescription(answer);
+    hasProps(pc1.getTransceivers(), [{currentDirection: "sendrecv"}]);
+
+    trackEvents = await setRemoteDescriptionReturnTrackEvents(pc2, answer);
+    hasProps(trackEvents,
+      [
+        {
+          track: pc2.getTransceivers()[0].receiver.track,
+          streams: [{id: stream.id}]
+        }
+      ]);
+
+    hasProps(pc2.getTransceivers(), [{currentDirection: "sendrecv"}]);
+
+    pc1.close();
+    pc2.close();
+    stopTracks(stream);
+  };
+
+  let checkSendrecvWithNoSendTrack = async () => {
+    let pc1 = new RTCPeerConnection();
+    let pc2 = new RTCPeerConnection();
+
+    let stream = await getUserMedia({audio: true});
+    let track = stream.getAudioTracks()[0];
+    pc1.addTransceiver("audio");
+    pc1.getTransceivers()[0].direction = "sendrecv";
+    pc2.addTrack(track, stream);
+
+    let offer = await pc1.createOffer();
+
+    let trackEvents = await setRemoteDescriptionReturnTrackEvents(pc2, offer);
+    hasProps(trackEvents,
+      [
+        {
+          track: pc2.getTransceivers()[0].receiver.track,
+          streams: []
+        }
+      ]);
+
+    trickle(pc1, pc2);
+    await pc1.setLocalDescription(offer);
+
+    let answer = await pc2.createAnswer();
+    trackEvents = await setRemoteDescriptionReturnTrackEvents(pc1, answer);
+    // Spec language doesn't say anything about checking whether the transceiver
+    // is stopped here.
+    hasProps(trackEvents,
+      [
+        {
+          track: pc1.getTransceivers()[0].receiver.track,
+          streams: [{id: stream.id}]
+        }
+      ]);
+
+    trickle(pc2, pc1);
+    await pc2.setLocalDescription(answer);
+
+    await iceConnected(pc1);
+    await iceConnected(pc2);
+
+    pc1.close();
+    pc2.close();
+    stopTracks(stream);
+  };
+
+  let checkStop = async () => {
+    let pc1 = new RTCPeerConnection();
+    let stream = await getUserMedia({audio: true});
+    let track = stream.getAudioTracks()[0];
+    pc1.addTrack(track, stream);
+
+    let offer = await pc1.createOffer();
+    await pc1.setLocalDescription(offer);
+
+    let pc2 = new RTCPeerConnection();
+    await pc2.setRemoteDescription(offer);
+
+    pc2.addTrack(track, stream);
+
+    let answer = await pc2.createAnswer();
+    await pc2.setLocalDescription(answer);
+    await pc1.setRemoteDescription(answer);
+
+    let stoppedTransceiver = pc1.getTransceivers()[0];
+    let onended = new Promise(resolve => {
+      stoppedTransceiver.receiver.track.onended = resolve;
+    });
+    stoppedTransceiver.stop();
+
+    await onended;
+
+    hasProps(pc1.getTransceivers(),
+      [
+        {
+          sender: {track: {kind: "audio"}},
+          receiver: {track: {kind: "audio", readyState: "ended"}},
+          stopped: true,
+          mid: "sdparta_0", // Firefox-specific
+          currentDirection: null,
+          direction: "sendrecv"
+        }
+      ]);
+
+    let transceiver = pc1.getTransceivers()[0];
+
+    checkThrows(() => transceiver.sender.setParameters(
+                        transceiver.sender.getParameters()),
+                "InvalidStateError", "setParameters on stopped transceiver");
+
+    let stream2 = await getUserMedia({audio: true});
+    let track2 = stream.getAudioTracks()[0];
+    checkThrows(() => transceiver.sender.replaceTrack(track2),
+                "InvalidStateError", "replaceTrack on stopped transceiver");
+
+    checkThrows(() => transceiver.direction = "sendrecv",
+                "InvalidStateError", "setDirection on stopped transceiver");
+
+    checkThrows(() => transceiver.sender.dtmf.insertDTMF("111"),
+                "InvalidStateError", "insertDTMF on stopped transceiver");
+
+    // Shouldn't throw
+    stoppedTransceiver.stop();
+
+    offer = await pc1.createOffer();
+    await pc1.setLocalDescription(offer);
+
+    stoppedTransceiver = pc2.getTransceivers()[0];
+    onended = new Promise(resolve => {
+      stoppedTransceiver.receiver.track.onended = resolve;
+    });
+
+    await pc2.setRemoteDescription(offer);
+
+    await onended;
+
+    hasProps(pc2.getTransceivers(),
+      [
+        {
+          sender: {track: {kind: "audio"}},
+          receiver: {track: {kind: "audio", readyState: "ended"}},
+          stopped: true,
+          mid: null,
+          currentDirection: null,
+          direction: "sendrecv"
+        }
+      ]);
+
+    // Shouldn't throw either
+    stoppedTransceiver.stop();
+
+    pc1.close();
+    pc2.close();
+
+    // Still shouldn't throw
+    stoppedTransceiver.stop();
+
+    stopTracks(stream);
+  };
+
+  let checkStopAfterCreateOffer = async () => {
+    let pc1 = new RTCPeerConnection();
+    let pc2 = new RTCPeerConnection();
+
+    let stream = await getUserMedia({audio: true});
+    let track = stream.getAudioTracks()[0];
+    pc1.addTrack(track, stream);
+    pc2.addTrack(track, stream);
+
+    let offer = await pc1.createOffer();
+
+    pc1.getTransceivers()[0].stop();
+
+    await pc2.setRemoteDescription(offer)
+    trickle(pc1, pc2);
+    await pc1.setLocalDescription(offer);
+
+    let answer = await pc2.createAnswer();
+    let trackEvents = await setRemoteDescriptionReturnTrackEvents(pc1, answer);
+    // Spec language doesn't say anything about checking whether the transceiver
+    // is stopped here.
+    hasProps(trackEvents,
+      [
+        {
+          track: pc1.getTransceivers()[0].receiver.track,
+          streams: [{id: stream.id}]
+        }
+      ]);
+
+    hasProps(pc1.getTransceivers(),
+      [
+        {
+          stopped: true,
+          mid: "sdparta_0"
+        }
+      ]);
+
+    trickle(pc2, pc1);
+    await pc2.setLocalDescription(answer);
+
+    await negotiationNeeded(pc1);
+    await iceConnected(pc1);
+    await iceConnected(pc2);
+
+    offer = await pc1.createOffer();
+    await pc1.setLocalDescription(offer);
+    await pc2.setRemoteDescription(offer);
+    answer = await pc2.createAnswer();
+    await pc2.setLocalDescription(answer);
+    await pc1.setRemoteDescription(answer);
+
+    hasProps(pc1.getTransceivers(),
+      [
+        {
+          stopped: true,
+          mid: null
+        }
+      ]);
+
+    hasProps(pc2.getTransceivers(),
+      [
+        {
+          stopped: true,
+          mid: null
+        }
+      ]);
+
+    pc1.close();
+    pc2.close();
+    stopTracks(stream);
+  };
+
+  let checkStopAfterSetLocalOffer = async () => {
+    let pc1 = new RTCPeerConnection();
+    let pc2 = new RTCPeerConnection();
+
+    let stream = await getUserMedia({audio: true});
+    let track = stream.getAudioTracks()[0];
+    pc1.addTrack(track, stream);
+    pc2.addTrack(track, stream);
+
+    let offer = await pc1.createOffer();
+
+    await pc2.setRemoteDescription(offer)
+    trickle(pc1, pc2);
+    await pc1.setLocalDescription(offer);
+
+    pc1.getTransceivers()[0].stop();
+
+    let answer = await pc2.createAnswer();
+    let trackEvents = await setRemoteDescriptionReturnTrackEvents(pc1, answer);
+    // Spec language doesn't say anything about checking whether the transceiver
+    // is stopped here.
+    hasProps(trackEvents,
+      [
+        {
+          track: pc1.getTransceivers()[0].receiver.track,
+          streams: [{id: stream.id}]
+        }
+      ]);
+
+    hasProps(pc1.getTransceivers(),
+      [
+        {
+          stopped: true,
+          mid: "sdparta_0"
+        }
+      ]);
+
+    trickle(pc2, pc1);
+    await pc2.setLocalDescription(answer);
+
+    await negotiationNeeded(pc1);
+    await iceConnected(pc1);
+    await iceConnected(pc2);
+
+    offer = await pc1.createOffer();
+    await pc1.setLocalDescription(offer);
+    await pc2.setRemoteDescription(offer);
+    answer = await pc2.createAnswer();
+    await pc2.setLocalDescription(answer);
+    await pc1.setRemoteDescription(answer);
+
+    hasProps(pc1.getTransceivers(),
+      [
+        {
+          stopped: true,
+          mid: null
+        }
+      ]);
+
+    hasProps(pc2.getTransceivers(),
+      [
+        {
+          stopped: true,
+          mid: null
+        }
+      ]);
+
+    pc1.close();
+    pc2.close();
+    stopTracks(stream);
+  };
+
+  let checkStopAfterSetRemoteOffer = async () => {
+    let pc1 = new RTCPeerConnection();
+    let pc2 = new RTCPeerConnection();
+
+    let stream = await getUserMedia({audio: true});
+    let track = stream.getAudioTracks()[0];
+    pc1.addTrack(track, stream);
+    pc2.addTrack(track, stream);
+
+    let offer = await pc1.createOffer();
+
+    await pc2.setRemoteDescription(offer)
+    await pc1.setLocalDescription(offer);
+
+    // Stop on _answerer_side now. Should take effect in answer.
+    pc2.getTransceivers()[0].stop();
+
+    let answer = await pc2.createAnswer();
+    let trackEvents = await setRemoteDescriptionReturnTrackEvents(pc1, answer);
+    hasProps(trackEvents, []);
+
+    hasProps(pc1.getTransceivers(),
+      [
+        {
+          stopped: true,
+          mid: null
+        }
+      ]);
+
+    await pc2.setLocalDescription(answer);
+
+    pc1.close();
+    pc2.close();
+    stopTracks(stream);
+  };
+
+  let checkStopAfterCreateAnswer = async () => {
+    let pc1 = new RTCPeerConnection();
+    let pc2 = new RTCPeerConnection();
+
+    let stream = await getUserMedia({audio: true});
+    let track = stream.getAudioTracks()[0];
+    pc1.addTrack(track, stream);
+    pc2.addTrack(track, stream);
+
+    let offer = await pc1.createOffer();
+
+    await pc2.setRemoteDescription(offer)
+    trickle(pc1, pc2);
+    await pc1.setLocalDescription(offer);
+
+    let answer = await pc2.createAnswer();
+
+    // Too late for this to go in the answer. ICE should succeed.
+    pc2.getTransceivers()[0].stop();
+
+    let trackEvents = await setRemoteDescriptionReturnTrackEvents(pc1, answer);
+    hasProps(trackEvents,
+      [
+        {
+          track: pc1.getTransceivers()[0].receiver.track,
+          streams: [{id: stream.id}]
+        }
+      ]);
+
+    hasProps(pc2.getTransceivers(),
+      [
+        {
+          stopped: true,
+          mid: "sdparta_0"
+        }
+      ]);
+
+    trickle(pc2, pc1);
+    await pc2.setLocalDescription(answer);
+
+    await negotiationNeeded(pc2);
+    await iceConnected(pc1);
+    await iceConnected(pc2);
+
+    offer = await pc1.createOffer();
+    await pc1.setLocalDescription(offer);
+    await pc2.setRemoteDescription(offer);
+    answer = await pc2.createAnswer();
+    await pc2.setLocalDescription(answer);
+    await pc1.setRemoteDescription(answer);
+
+    hasProps(pc1.getTransceivers(),
+      [
+        {
+          stopped: true,
+          mid: null
+        }
+      ]);
+
+    hasProps(pc2.getTransceivers(),
+      [
+        {
+          stopped: true,
+          mid: null
+        }
+      ]);
+
+    pc1.close();
+    pc2.close();
+    stopTracks(stream);
+  };
+
+  let checkStopAfterSetLocalAnswer = async () => {
+    let pc1 = new RTCPeerConnection();
+    let pc2 = new RTCPeerConnection();
+
+    let stream = await getUserMedia({audio: true});
+    let track = stream.getAudioTracks()[0];
+    pc1.addTrack(track, stream);
+    pc2.addTrack(track, stream);
+
+    let offer = await pc1.createOffer();
+
+    await pc2.setRemoteDescription(offer)
+    trickle(pc1, pc2);
+    await pc1.setLocalDescription(offer);
+
+    let answer = await pc2.createAnswer();
+
+    let trackEvents = await setRemoteDescriptionReturnTrackEvents(pc1, answer);
+    hasProps(trackEvents,
+      [
+        {
+          track: pc1.getTransceivers()[0].receiver.track,
+          streams: [{id: stream.id}]
+        }
+      ]);
+
+    trickle(pc2, pc1);
+    await pc2.setLocalDescription(answer);
+
+    // ICE should succeed.
+    pc2.getTransceivers()[0].stop();
+
+    hasProps(pc2.getTransceivers(),
+      [
+        {
+          stopped: true,
+          mid: "sdparta_0"
+        }
+      ]);
+
+    await negotiationNeeded(pc2);
+    await iceConnected(pc1);
+    await iceConnected(pc2);
+
+    offer = await pc1.createOffer();
+    await pc1.setLocalDescription(offer);
+    await pc2.setRemoteDescription(offer);
+    answer = await pc2.createAnswer();
+    await pc2.setLocalDescription(answer);
+    await pc1.setRemoteDescription(answer);
+
+    hasProps(pc1.getTransceivers(),
+      [
+        {
+          stopped: true,
+          mid: null
+        }
+      ]);
+
+    hasProps(pc2.getTransceivers(),
+      [
+        {
+          stopped: true,
+          mid: null
+        }
+      ]);
+
+    pc1.close();
+    pc2.close();
+    stopTracks(stream);
+  };
+
+  let checkStopAfterClose = async () => {
+    let pc1 = new RTCPeerConnection();
+    let pc2 = new RTCPeerConnection();
+
+    let stream = await getUserMedia({audio: true});
+    let track = stream.getAudioTracks()[0];
+    pc1.addTrack(track, stream);
+    pc2.addTrack(track, stream);
+
+    let offer = await pc1.createOffer();
+    await pc2.setRemoteDescription(offer)
+    await pc1.setLocalDescription(offer);
+    let answer = await pc2.createAnswer();
+    await pc2.setLocalDescription(answer);
+    await pc1.setRemoteDescription(answer);
+
+    pc1.close();
+    pc2.close();
+    await checkThrows(() => pc1.getTransceivers()[0].stop(),
+                      "InvalidStateError",
+                      "Stopping a transceiver on a closed PC should throw.");
+    stopTracks(stream);
+  };
+
+  let checkLocalRollback = async () => {
+    let pc = new RTCPeerConnection();
+
+    let stream = await getUserMedia({audio: true});
+    let track = stream.getAudioTracks()[0];
+    pc.addTrack(track, stream);
+
+    let offer = await pc.createOffer();
+    await pc.setLocalDescription(offer);
+
+    hasProps(pc.getTransceivers(),
+      [
+        {
+          receiver: {track: {kind: "audio"}},
+          sender: {track},
+          direction: "sendrecv",
+          mid: "sdparta_0", // Firefox-specific
+          currentDirection: null,
+          stopped: false
+        }
+      ]);
+
+    // Verify that rollback doesn't stomp things it should not
+    pc.getTransceivers()[0].direction = "sendonly";
+    let stream2 = await getUserMedia({audio: true});
+    let track2 = stream2.getAudioTracks()[0];
+    await pc.getTransceivers()[0].sender.replaceTrack(track2);
+
+    await pc.setLocalDescription({type: "rollback"});
+
+    hasProps(pc.getTransceivers(),
+      [
+        {
+          receiver: {track: {kind: "audio"}},
+          sender: {track: track2},
+          direction: "sendonly",
+          mid: null,
+          currentDirection: null,
+          stopped: false
+        }
+      ]);
+
+    // Make sure stop() isn't rolled back either.
+    offer = await pc.createOffer();
+    await pc.setLocalDescription(offer);
+    pc.getTransceivers()[0].stop();
+    await pc.setLocalDescription({type: "rollback"});
+
+    hasProps(pc.getTransceivers(), [{ stopped: true }]);
+
+    stopTracks(stream);
+    pc.close();
+  };
+
+  let checkRemoteRollback = async () => {
+    let pc1 = new RTCPeerConnection();
+
+    let stream = await getUserMedia({audio: true});
+    let track = stream.getAudioTracks()[0];
+    pc1.addTrack(track, stream);
+
+    let offer = await pc1.createOffer();
+
+    let pc2 = new RTCPeerConnection();
+    await pc2.setRemoteDescription(offer);
+
+    let removedTransceiver = pc2.getTransceivers()[0];
+
+    let onended = new Promise(resolve => {
+      removedTransceiver.receiver.track.onended = resolve;
+    });
+
+    await pc2.setRemoteDescription({type: "rollback"});
+
+    // Transceiver should be _gone_
+    hasProps(pc2.getTransceivers(), []);
+
+    hasProps(removedTransceiver,
+      {
+        stopped: true,
+        mid: null,
+        currentDirection: null
+      }
+    );
+
+    await onended;
+
+    hasProps(removedTransceiver,
+      {
+        receiver: {track: {readyState: "ended"}},
+        stopped: true,
+        mid: null,
+        currentDirection: null
+      }
+    );
+
+    // Setting the same offer again should do the same thing as before
+    await pc2.setRemoteDescription(offer);
+    hasProps(pc2.getTransceivers(),
+      [
+        {
+          receiver: {track: {kind: "audio"}},
+          sender: {track: null},
+          direction: "recvonly",
+          mid: "sdparta_0", // Firefox-specific
+          currentDirection: null,
+          stopped: false
+        }
+      ]);
+
+    // Give pc2 a track with replaceTrack
+    let stream2 = await getUserMedia({audio: true});
+    let track2 = stream2.getAudioTracks()[0];
+    await pc2.getTransceivers()[0].sender.replaceTrack(track2);
+    pc2.getTransceivers()[0].direction = "sendrecv";
+    hasProps(pc2.getTransceivers(),
+      [
+        {
+          receiver: {track: {kind: "audio"}},
+          sender: {track: track2},
+          direction: "sendrecv",
+          mid: "sdparta_0", // Firefox-specific
+          currentDirection: null,
+          stopped: false
+        }
+      ]);
+
+    await pc2.setRemoteDescription({type: "rollback"});
+
+    // Transceiver should be _gone_, again. replaceTrack doesn't prevent this,
+    // nor does setDirection.
+    hasProps(pc2.getTransceivers(), []);
+
+    // Setting the same offer for a _third_ time should do the same thing
+    await pc2.setRemoteDescription(offer);
+    hasProps(pc2.getTransceivers(),
+      [
+        {
+          receiver: {track: {kind: "audio"}},
+          sender: {track: null},
+          direction: "recvonly",
+          mid: "sdparta_0", // Firefox-specific
+          currentDirection: null,
+          stopped: false
+        }
+      ]);
+
+    // We should be able to add the same track again
+    pc2.addTrack(track2, stream2);
+    hasProps(pc2.getTransceivers(),
+      [
+        {
+          receiver: {track: {kind: "audio"}},
+          sender: {track: track2},
+          direction: "sendrecv",
+          mid: "sdparta_0", // Firefox-specific
+          currentDirection: null,
+          stopped: false
+        }
+      ]);
+
+    await pc2.setRemoteDescription({type: "rollback"});
+    // Transceiver should _not_ be gone this time, because addTrack touched it.
+    hasProps(pc2.getTransceivers(),
+      [
+        {
+          receiver: {track: {kind: "audio"}},
+          sender: {track: track2},
+          direction: "sendrecv",
+          mid: null,
+          currentDirection: null,
+          stopped: false
+        }
+      ]);
+
+    // Complete negotiation so we can test interactions with transceiver.stop()
+    await pc1.setLocalDescription(offer);
+
+    // After all this SRD/rollback, we should still get the track event
+    let trackEvents = await setRemoteDescriptionReturnTrackEvents(pc2, offer);
+    hasProps(trackEvents,
+      [
+        {
+          track: pc2.getTransceivers()[0].receiver.track,
+          streams: [{id: stream.id}]
+        }
+      ]);
+
+    let answer = await pc2.createAnswer();
+    await pc2.setLocalDescription(answer);
+
+    // Make sure all this rollback hasn't messed up the signaling
+    trackEvents = await setRemoteDescriptionReturnTrackEvents(pc1, answer);
+    hasProps(trackEvents,
+      [
+        {
+          track: pc1.getTransceivers()[0].receiver.track,
+          streams: [{id: stream2.id}]
+        }
+      ]);
+    hasProps(pc1.getTransceivers(),
+      [
+        {
+          receiver: {track: {kind: "audio"}},
+          sender: {track},
+          direction: "sendrecv",
+          mid: "sdparta_0",
+          currentDirection: "sendrecv",
+          stopped: false
+        }
+      ]);
+
+    // Don't bother waiting for ICE and such
+
+    // Check to see whether rolling back a remote track removal works
+    pc1.getTransceivers()[0].direction = "recvonly";
+    offer = await pc1.createOffer();
+
+    trackEvents = await setRemoteDescriptionReturnTrackEvents(pc2, offer);
+    hasProps(trackEvents, []);
+
+    trackEvents =
+      await setRemoteDescriptionReturnTrackEvents(pc2, {type: "rollback"});
+    hasProps(trackEvents,
+      [
+        {
+          track: pc2.getTransceivers()[0].receiver.track,
+          streams: [{id: stream.id}]
+        }
+      ]);
+
+    // Check to see that stop() cannot be rolled back
+    pc1.getTransceivers()[0].stop();
+    offer = await pc1.createOffer();
+
+    await pc2.setRemoteDescription(offer);
+    hasProps(pc2.getTransceivers(),
+      [
+        {
+          receiver: {track: {kind: "audio"}},
+          sender: {track: track2},
+          direction: "sendrecv",
+          mid: null,
+          currentDirection: null,
+          stopped: true
+        }
+      ]);
+
+    // stop() cannot be rolled back!
+    await pc2.setRemoteDescription({type: "rollback"});
+    hasProps(pc2.getTransceivers(),
+      [
+        {
+          receiver: {track: {kind: "audio"}},
+          sender: {track: {kind: "audio"}},
+          direction: "sendrecv",
+          mid: null,
+          currentDirection: null,
+          stopped: true
+        }
+      ]);
+
+    pc1.close();
+    pc2.close();
+    stopTracks(stream, stream2);
+  };
+
+  let checkMsectionReuse = async () => {
+    // Use max-compat to make it easier to check for disabled m-sections
+    let pc1 = new RTCPeerConnection({ bundlePolicy: "max-compat" });
+    let pc2 = new RTCPeerConnection({ bundlePolicy: "max-compat" });
+
+    let stream = await getUserMedia({audio: true});
+    let track = stream.getAudioTracks()[0];
+    pc1.addTrack(track, stream);
+
+    let offer = await pc1.createOffer();
+    await pc1.setLocalDescription(offer);
+    await pc2.setRemoteDescription(offer);
+
+    // answerer stops transceiver to reject m-section
+    pc2.getTransceivers()[0].stop();
+
+    let answer = await pc2.createAnswer();
+    await pc2.setLocalDescription(answer);
+    await pc1.setRemoteDescription(answer);
+
+    hasProps(pc1.getTransceivers(),
+      [
+        {
+          mid: null,
+          currentDirection: null,
+          stopped: true
+        }
+      ]);
+
+    hasProps(pc2.getTransceivers(),
+      [
+        {
+          mid: null,
+          currentDirection: null,
+          stopped: true
+        }
+      ]);
+
+    // Check that m-section is reused on both ends
+    let stream2 = await getUserMedia({audio: true});
+    let track2 = stream2.getAudioTracks()[0];
+
+    pc1.addTrack(track2, stream2);
+    offer = await pc1.createOffer();
+    is(offer.sdp.match(/m=/g).length, 1, "Exactly one m-line in offer, because it was reused");
+    hasProps(pc1.getTransceivers(),
+      [
+        {
+          stopped: true
+        },
+        {
+          sender: {track: track2}
+        }
+      ]);
+
+
+    pc2.addTrack(track, stream);
+    offer = await pc2.createOffer();
+    is(offer.sdp.match(/m=/g).length, 1, "Exactly one m-line in offer, because it was reused");
+    hasProps(pc2.getTransceivers(),
+      [
+        {
+          stopped: true
+        },
+        {
+          sender: {track}
+        }
+      ]);
+
+    await pc2.setLocalDescription(offer);
+    await pc1.setRemoteDescription(offer);
+    answer = await pc1.createAnswer();
+    await pc1.setLocalDescription(answer);
+    await pc2.setRemoteDescription(answer);
+    hasProps(pc1.getTransceivers(),
+      [
+        {},
+        {
+          sender: {track: track2},
+          currentDirection: "sendrecv"
+        }
+      ]);
+
+    hasProps(pc2.getTransceivers(),
+      [
+        {},
+        {
+          sender: {track},
+          currentDirection: "sendrecv"
+        }
+      ]);
+
+    // stop the transceiver, and add a track. Verify that we don't reuse
+    // prematurely in our offer. (There should be one rejected m-section, and a
+    // new one for the new track)
+    pc1.getTransceivers()[1].stop();
+    let stream3 = await getUserMedia({audio: true});
+    let track3 = stream3.getAudioTracks()[0];
+    pc1.addTrack(track3, stream3);
+    offer = await pc1.createOffer();
+    is(offer.sdp.match(/m=/g).length, 2, "Exactly 2 m-lines in offer, because it is too early to reuse");
+    is(offer.sdp.match(/m=audio 0 /g).length, 1, "One m-line is rejected");
+
+    await pc1.setLocalDescription(offer);
+
+    let trackEvents = await setRemoteDescriptionReturnTrackEvents(pc2, offer);
+    hasProps(trackEvents,
+      [
+        {
+          track: pc2.getTransceivers()[2].receiver.track,
+          streams: [{id: stream3.id}]
+        }
+      ]);
+
+    answer = await pc2.createAnswer();
+    await pc2.setLocalDescription(answer);
+
+    trackEvents = await setRemoteDescriptionReturnTrackEvents(pc1, answer);
+    hasProps(trackEvents, []);
+
+    hasProps(pc2.getTransceivers(),
+      [
+        {},
+        {
+          stopped: true
+        },
+        {
+          mid: "sdparta_1", // Firefox-specific
+          sender: {track: null},
+          currentDirection: "recvonly"
+        }
+      ]);
+
+    pc2.addTrack(track3, stream3);
+    // There are two ways to handle this new track; reuse the recvonly
+    // transceiver created above, or create a new transceiver and reuse the
+    // disabled m-section. We're supposed to do the former.
+    offer = await pc2.createOffer();
+    is(offer.sdp.match(/m=/g).length, 2, "Exactly 2 m-lines in offer");
+    is(offer.sdp.match(/m=audio 0 /g).length, 1, "One m-line is rejected, because the other was used");
+
+    hasProps(pc2.getTransceivers(),
+      [
+        {},
+        {
+          stopped: true
+        },
+        {
+          mid: "sdparta_1", // Firefox-specific
+          sender: {track: track3},
+          currentDirection: "recvonly",
+          direction: "sendrecv"
+        }
+      ]);
+
+    // Add _another_ track; this should reuse the disabled m-section
+    let stream4 = await getUserMedia({audio: true});
+    let track4 = stream4.getAudioTracks()[0];
+    pc2.addTrack(track4, stream4);
+    offer = await pc2.createOffer();
+    await pc2.setLocalDescription(offer);
+    hasProps(pc2.getTransceivers(),
+      [
+        {}, {},
+        {
+          mid: "sdparta_1", // Firefox-specific
+        },
+        {
+          sender: {track: track4},
+          mid: "sdparta_0" // Firefox-specific
+        }
+      ]);
+    is(offer.sdp.match(/m=/g).length, 2, "Exactly 2 m-lines in offer, because m-section was reused");
+    is(offer.sdp.match(/m=audio 0 /g), null, "No rejected m-line, because it was reused");
+
+    pc1.close();
+    pc2.close();
+    stopTracks(stream, stream2, stream3, stream4);
+  };
+
+  runNetworkTest(async () => {
+    await checkAddTransceiverNoTrack();
+    await checkAddTransceiverWithTrack();
+    await checkAddTransceiverWithAddTrack();
+    await checkAddTransceiverWithDirection();
+    await checkAddTransceiverWithStream();
+    await checkAddTransceiverWithOfferToReceive(["audio"]);
+    await checkAddTransceiverWithOfferToReceive(["video"]);
+    await checkAddTransceiverWithOfferToReceive(["audio", "video"]);
+    await checkAddTransceiverWithSetRemoteOfferSending();
+    await checkAddTransceiverWithSetRemoteOfferNoSend();
+    await checkAddTransceiverBadKind();
+    await checkSetDirection();
+    await checkCurrentDirection();
+    await checkSendrecvWithNoSendTrack();
+    await checkAddTransceiverNoTrackDoesntPair();
+    await checkAddTransceiverWithTrackDoesntPair();
+    await checkAddTransceiverThenReplaceTrackDoesntPair();
+    await checkAddTransceiverThenAddTrackPairs();
+    await checkAddTrackPairs();
+    await checkReplaceTrackNullDoesntPreventPairing();
+    await checkStop();
+    await checkStopAfterCreateOffer();
+    await checkStopAfterSetLocalOffer();
+    await checkStopAfterSetRemoteOffer();
+    await checkStopAfterCreateAnswer();
+    await checkStopAfterSetLocalAnswer();
+    await checkStopAfterClose();
+    await checkLocalRollback();
+    await checkRemoteRollback();
+    await checkMsectionReuse();
+    return SimpleTest.finish();
+  });
+</script>
+</pre>
+</body>
+</html>
--- a/dom/media/tests/mochitest/test_peerConnection_twoAudioTracksInOneStream.html
+++ b/dom/media/tests/mochitest/test_peerConnection_twoAudioTracksInOneStream.html
@@ -14,35 +14,23 @@
   var test;
   runNetworkTest(function (options) {
     test = new PeerConnectionTest(options);
     test.chain.insertAfter("PC_REMOTE_GET_OFFER", [
         function PC_REMOTE_OVERRIDE_STREAM_IDS_IN_OFFER(test) {
           test._local_offer.sdp = test._local_offer.sdp.replace(
               /a=msid:[^\s]*/g,
               "a=msid:foo");
-        },
-        function PC_REMOTE_OVERRIDE_EXPECTED_STREAM_IDS(test) {
-          Object.keys(
-              test.pcRemote.expectedRemoteTrackInfoById).forEach(trackId => {
-                test.pcRemote.expectedRemoteTrackInfoById[trackId].streamId = "foo";
-              });
         }
     ]);
     test.chain.insertAfter("PC_LOCAL_GET_ANSWER", [
         function PC_LOCAL_OVERRIDE_STREAM_IDS_IN_ANSWER(test) {
           test._remote_answer.sdp = test._remote_answer.sdp.replace(
               /a=msid:[^\s]*/g,
               "a=msid:foo");
-        },
-        function PC_LOCAL_OVERRIDE_EXPECTED_STREAM_IDS(test) {
-          Object.keys(
-              test.pcLocal.expectedRemoteTrackInfoById).forEach(trackId => {
-                test.pcLocal.expectedRemoteTrackInfoById[trackId].streamId = "foo";
-              });
         }
     ]);
     test.setMediaConstraints([{audio: true}, {audio: true}],
                              [{audio: true}, {audio: true}]);
     test.run();
   });
 </script>
 </pre>
--- a/dom/media/tests/mochitest/test_peerConnection_twoVideoTracksInOneStream.html
+++ b/dom/media/tests/mochitest/test_peerConnection_twoVideoTracksInOneStream.html
@@ -14,35 +14,23 @@
   var test;
   runNetworkTest(function (options) {
     test = new PeerConnectionTest(options);
     test.chain.insertAfter("PC_REMOTE_GET_OFFER", [
         function PC_REMOTE_OVERRIDE_STREAM_IDS_IN_OFFER(test) {
           test._local_offer.sdp = test._local_offer.sdp.replace(
               /a=msid:[^\s]*/g,
               "a=msid:foo");
-        },
-        function PC_REMOTE_OVERRIDE_EXPECTED_STREAM_IDS(test) {
-          Object.keys(
-              test.pcRemote.expectedRemoteTrackInfoById).forEach(trackId => {
-                test.pcRemote.expectedRemoteTrackInfoById[trackId].streamId = "foo";
-              });
         }
     ]);
     test.chain.insertAfter("PC_LOCAL_GET_ANSWER", [
         function PC_LOCAL_OVERRIDE_STREAM_IDS_IN_ANSWER(test) {
           test._remote_answer.sdp = test._remote_answer.sdp.replace(
               /a=msid:[^\s]*/g,
               "a=msid:foo");
-        },
-        function PC_LOCAL_OVERRIDE_EXPECTED_STREAM_IDS(test) {
-          Object.keys(
-              test.pcLocal.expectedRemoteTrackInfoById).forEach(trackId => {
-                test.pcLocal.expectedRemoteTrackInfoById[trackId].streamId = "foo";
-              });
         }
     ]);
     test.setMediaConstraints([{video: true}, {video: true}],
                              [{video: true}, {video: true}]);
     test.run();
   });
 </script>
 </pre>
--- a/dom/media/tests/mochitest/test_peerConnection_verifyAudioAfterRenegotiation.html
+++ b/dom/media/tests/mochitest/test_peerConnection_verifyAudioAfterRenegotiation.html
@@ -43,17 +43,17 @@
       }
     ]);
 
     addRenegotiation(test.chain,
       [
         function PC_LOCAL_ADD_SECOND_STREAM(test) {
           test.setMediaConstraints([{audio: true}],
                                    []);
-          return test.pcLocal.getAllUserMedia([{audio: true}]);
+          return test.pcLocal.getAllUserMediaAndAddStreams([{audio: true}]);
         },
       ]
     );
 
     test.chain.append([
       function CHECK_ASSUMPTIONS2() {
         is(test.pcLocal.localMediaElements.length, 2,
            "pcLocal should have two media elements");
--- a/dom/media/tests/mochitest/test_peerConnection_verifyVideoAfterRenegotiation.html
+++ b/dom/media/tests/mochitest/test_peerConnection_verifyVideoAfterRenegotiation.html
@@ -73,17 +73,17 @@ runNetworkTest(() => {
 
   addRenegotiation(test.chain,
     [
       function PC_LOCAL_ADD_SECOND_STREAM(test) {
         canvas2 = h2.createAndAppendElement('canvas', 'source_canvas2');
         h2.drawColor(canvas2, h2.blue);
         stream2 = canvas2.captureStream(0);
 
-        // can't use test.pcLocal.getAllUserMedia([{video: true}]);
+        // can't use test.pcLocal.getAllUserMediaAndAddStreams([{video: true}]);
         // because it doesn't let us substitute the capture stream
         test.pcLocal.attachLocalStream(stream2);
       }
     ]
   );
 
   test.chain.append([
     function FIND_REMOTE2_VIDEO() {
--- a/dom/tests/mochitest/general/test_interfaces.js
+++ b/dom/tests/mochitest/general/test_interfaces.js
@@ -810,16 +810,18 @@ var interfaceNamesInGlobalScope =
     "RTCPeerConnection",
 // IMPORTANT: Do not change this list without review from a DOM peer!
     "RTCPeerConnectionIceEvent",
 // IMPORTANT: Do not change this list without review from a DOM peer!
     "RTCRtpReceiver",
 // IMPORTANT: Do not change this list without review from a DOM peer!
     "RTCRtpSender",
 // IMPORTANT: Do not change this list without review from a DOM peer!
+    "RTCRtpTransceiver",
+// IMPORTANT: Do not change this list without review from a DOM peer!
     "RTCSessionDescription",
 // IMPORTANT: Do not change this list without review from a DOM peer!
     "RTCStatsReport",
 // IMPORTANT: Do not change this list without review from a DOM peer!
     "RTCTrackEvent",
 // IMPORTANT: Do not change this list without review from a DOM peer!
     "Screen",
 // IMPORTANT: Do not change this list without review from a DOM peer!
--- a/dom/webidl/MediaStream.webidl
+++ b/dom/webidl/MediaStream.webidl
@@ -38,9 +38,14 @@ interface MediaStream : EventTarget {
     MediaStream                clone ();
     readonly    attribute boolean      active;
                 attribute EventHandler onaddtrack;
     //             attribute EventHandler onremovetrack;
     readonly attribute double currentTime;
 
     [ChromeOnly, Throws]
     static Promise<long> countUnderlyingStreams();
+
+    // Webrtc allows the remote side to name a stream whatever it wants, and we
+    // need to surface this to content.
+    [ChromeOnly]
+    void assignId(DOMString id);
 };
deleted file mode 100644
--- a/dom/webidl/MediaStreamList.webidl
+++ /dev/null
@@ -1,11 +0,0 @@
-/* -*- Mode: IDL; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this file,
- * You can obtain one at http://mozilla.org/MPL/2.0/.
- */
-
-[ChromeOnly]
-interface MediaStreamList {
-  getter MediaStream? (unsigned long index);
-  readonly attribute unsigned long length;
-};
--- a/dom/webidl/PeerConnectionImpl.webidl
+++ b/dom/webidl/PeerConnectionImpl.webidl
@@ -36,38 +36,34 @@ interface PeerConnectionImpl  {
 
   /* Stats call, calls either |onGetStatsSuccess| or |onGetStatsError| on our
      observer. (see the |PeerConnectionObserver| interface) */
   [Throws]
   void getStats(MediaStreamTrack? selector);
 
   /* Adds the tracks created by GetUserMedia */
   [Throws]
-  void addTrack(MediaStreamTrack track, MediaStream... streams);
-  [Throws]
   void removeTrack(MediaStreamTrack track);
   [Throws]
-  void insertDTMF(RTCRtpSender sender, DOMString tones,
+  TransceiverImpl createTransceiverImpl(DOMString kind,
+                                        MediaStreamTrack? track);
+  [Throws]
+  boolean checkNegotiationNeeded();
+  [Throws]
+  void insertDTMF(TransceiverImpl transceiver, DOMString tones,
                   optional unsigned long duration = 100,
                   optional unsigned long interToneGap = 70);
   [Throws]
   DOMString getDTMFToneBuffer(RTCRtpSender sender);
   [Throws]
-  void replaceTrack(MediaStreamTrack thisTrack, MediaStreamTrack withTrack);
-  [Throws]
-  void setParameters(MediaStreamTrack track,
-                     optional RTCRtpParameters parameters);
-  [Throws]
-  RTCRtpParameters getParameters(MediaStreamTrack track);
+  void replaceTrackNoRenegotiation(TransceiverImpl transceiverImpl,
+                                   MediaStreamTrack? withTrack);
   [Throws]
   void closeStreams();
 
-  sequence<MediaStream> getLocalStreams();
-  sequence<MediaStream> getRemoteStreams();
-
   void addRIDExtension(MediaStreamTrack recvTrack, unsigned short extensionId);
   void addRIDFilter(MediaStreamTrack recvTrack, DOMString rid);
 
   void enablePacketDump(unsigned long level,
                         mozPacketDumpType type,
                         boolean sending);
 
   void disablePacketDump(unsigned long level,
--- a/dom/webidl/PeerConnectionObserver.webidl
+++ b/dom/webidl/PeerConnectionObserver.webidl
@@ -18,37 +18,37 @@ interface PeerConnectionObserver
   void onCreateAnswerError(unsigned long name, DOMString message);
   void onSetLocalDescriptionSuccess();
   void onSetRemoteDescriptionSuccess();
   void onSetLocalDescriptionError(unsigned long name, DOMString message);
   void onSetRemoteDescriptionError(unsigned long name, DOMString message);
   void onAddIceCandidateSuccess();
   void onAddIceCandidateError(unsigned long name, DOMString message);
   void onIceCandidate(unsigned short level, DOMString mid, DOMString candidate);
-  void onNegotiationNeeded();
 
   /* Stats callbacks */
   void onGetStatsSuccess(optional RTCStatsReportInternal report);
   void onGetStatsError(unsigned long name, DOMString message);
 
-  /* replaceTrack callbacks */
-  void onReplaceTrackSuccess();
-  void onReplaceTrackError(unsigned long name, DOMString message);
-
   /* Data channel callbacks */
   void notifyDataChannel(DataChannel channel);
 
   /* Notification of one of several types of state changed */
   void onStateChange(PCObserverStateType state);
 
   /* Changes to MediaStreamTracks */
-  void onAddStream(MediaStream stream);
   void onRemoveStream(MediaStream stream);
-  void onAddTrack(MediaStreamTrack track, sequence<MediaStream> streams);
-  void onRemoveTrack(MediaStreamTrack track);
+  void onTrack(DOMString webrtcTrackId, sequence<DOMString> streamIds);
+
+  /* Transceiver management; called when setRemoteDescription causes a
+     transceiver to be created on the C++ side */
+  void onTransceiverNeeded(DOMString kind, TransceiverImpl transceiverImpl);
 
   /* DTMF callback */
-  void onDTMFToneChange(DOMString trackId, DOMString tone);
+  void onDTMFToneChange(MediaStreamTrack track, DOMString tone);
 
   /* Packet dump callback */
   void onPacket(unsigned long level, mozPacketDumpType type, boolean sending,
                 ArrayBuffer packet);
+
+  /* Transceiver sync */
+  void syncTransceivers();
 };
--- a/dom/webidl/RTCPeerConnection.webidl
+++ b/dom/webidl/RTCPeerConnection.webidl
@@ -123,18 +123,22 @@ interface RTCPeerConnection : EventTarge
   // because a track can be part of multiple streams, stream parameters
   // indicate which particular streams should be referenced in signaling
 
   RTCRtpSender addTrack(MediaStreamTrack track,
                         MediaStream stream,
                         MediaStream... moreStreams);
   void removeTrack(RTCRtpSender sender);
 
+  RTCRtpTransceiver addTransceiver((MediaStreamTrack or DOMString) trackOrKind,
+                                   optional RTCRtpTransceiverInit init);
+
   sequence<RTCRtpSender> getSenders();
   sequence<RTCRtpReceiver> getReceivers();
+  sequence<RTCRtpTransceiver> getTransceivers();
 
   [ChromeOnly]
   void mozAddRIDExtension(RTCRtpReceiver receiver, unsigned short extensionId);
   [ChromeOnly]
   void mozAddRIDFilter(RTCRtpReceiver receiver, DOMString rid);
   [ChromeOnly]
   void mozSetPacketCallback(mozPacketCallback callback);
   [ChromeOnly]
--- a/dom/webidl/RTCRtpSender.webidl
+++ b/dom/webidl/RTCRtpSender.webidl
@@ -64,16 +64,25 @@ dictionary RTCRtpParameters {
   sequence<RTCRtpHeaderExtensionParameters> headerExtensions;
   RTCRtcpParameters                         rtcp;
   sequence<RTCRtpCodecParameters>           codecs;
 };
 
 [Pref="media.peerconnection.enabled",
  JSImplementation="@mozilla.org/dom/rtpsender;1"]
 interface RTCRtpSender {
-  readonly attribute MediaStreamTrack track;
+  readonly attribute MediaStreamTrack? track;
   Promise<void> setParameters (optional RTCRtpParameters parameters);
   RTCRtpParameters getParameters();
-  Promise<void> replaceTrack(MediaStreamTrack track);
+  Promise<void> replaceTrack(MediaStreamTrack? withTrack);
   Promise<RTCStatsReport> getStats();
   [Pref="media.peerconnection.dtmf.enabled"]
   readonly attribute RTCDTMFSender? dtmf;
+  // Ugh, can't use a ChromeOnly attibute sequence<MediaStream>...
+  [ChromeOnly]
+  sequence<MediaStream> getStreams();
+  [ChromeOnly]
+  void setStreams(sequence<MediaStream> streams);
+  [ChromeOnly]
+  void setTrack(MediaStreamTrack? track);
+  [ChromeOnly]
+  void checkWasCreatedByPc(RTCPeerConnection pc);
 };
new file mode 100644
--- /dev/null
+++ b/dom/webidl/RTCRtpTransceiver.webidl
@@ -0,0 +1,78 @@
+/* -*- Mode: IDL; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * The origin of this IDL file is
+ * http://w3c.github.io/webrtc-pc/#rtcrtptransceiver-interface
+ */
+
+enum RTCRtpTransceiverDirection {
+    "sendrecv",
+    "sendonly",
+    "recvonly",
+    "inactive"
+};
+
+dictionary RTCRtpTransceiverInit {
+    RTCRtpTransceiverDirection         direction = "sendrecv";
+    sequence<MediaStream>              streams = [];
+    // TODO: bug 1396918
+    // sequence<RTCRtpEncodingParameters> sendEncodings;
+};
+
+[Pref="media.peerconnection.enabled",
+ JSImplementation="@mozilla.org/dom/rtptransceiver;1"]
+interface RTCRtpTransceiver {
+    readonly attribute DOMString?                  mid;
+    [SameObject]
+    readonly attribute RTCRtpSender                sender;
+    [SameObject]
+    readonly attribute RTCRtpReceiver              receiver;
+    readonly attribute boolean                     stopped;
+             attribute RTCRtpTransceiverDirection  direction;
+    readonly attribute RTCRtpTransceiverDirection? currentDirection;
+
+    void stop();
+    // TODO: bug 1396922
+    // void setCodecPreferences(sequence<RTCRtpCodecCapability> codecs);
+
+    [ChromeOnly]
+    void setRemoteTrackId(DOMString trackId);
+    [ChromeOnly]
+    boolean remoteTrackIdIs(DOMString trackId);
+
+    // Mostly for testing
+    [Pref="media.peerconnection.remoteTrackId.enabled"]
+    DOMString getRemoteTrackId();
+
+    [ChromeOnly]
+    void setAddTrackMagic();
+    [ChromeOnly]
+    readonly attribute boolean addTrackMagic;
+    [ChromeOnly]
+    void setCurrentDirection(RTCRtpTransceiverDirection direction);
+    [ChromeOnly]
+    void setDirectionInternal(RTCRtpTransceiverDirection direction);
+    [ChromeOnly]
+    void setMid(DOMString mid);
+    [ChromeOnly]
+    void unsetMid();
+    [ChromeOnly]
+    void setStopped();
+    [ChromeOnly]
+    void remove();
+
+    [ChromeOnly]
+    DOMString getKind();
+    [ChromeOnly]
+    boolean hasBeenUsedToSend();
+    [ChromeOnly]
+    void sync();
+
+    [ChromeOnly]
+    void insertDTMF(DOMString tones,
+                    optional unsigned long duration = 100,
+                    optional unsigned long interToneGap = 70);
+};
+
--- a/dom/webidl/RTCTrackEvent.webidl
+++ b/dom/webidl/RTCTrackEvent.webidl
@@ -6,22 +6,24 @@
  * The origin of this IDL file is
  * http://w3c.github.io/webrtc-pc/#idl-def-RTCTrackEvent
  */
 
 dictionary RTCTrackEventInit : EventInit {
     required RTCRtpReceiver        receiver;
     required MediaStreamTrack      track;
     sequence<MediaStream> streams = [];
+    required RTCRtpTransceiver     transceiver;
 };
 
 [Pref="media.peerconnection.enabled",
  Constructor(DOMString type, RTCTrackEventInit eventInitDict)]
 interface RTCTrackEvent : Event {
     readonly        attribute RTCRtpReceiver           receiver;
     readonly        attribute MediaStreamTrack         track;
 
 // TODO: Use FrozenArray once available. (Bug 1236777)
 //  readonly        attribute FrozenArray<MediaStream> streams;
 
     [Frozen, Cached, Pure]
     readonly        attribute sequence<MediaStream> streams; // workaround
+    readonly        attribute RTCRtpTransceiver transceiver;
 };
new file mode 100644
--- /dev/null
+++ b/dom/webidl/TransceiverImpl.webidl
@@ -0,0 +1,23 @@
+/* -*- Mode: IDL; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/.
+ *
+ * PeerConnection.js' interface to the C++ TransceiverImpl.
+ *
+ * Do not confuse with RTCRtpTransceiver. This interface is purely for
+ * communication between the PeerConnection JS DOM binding and the C++
+ * implementation.
+ *
+ * See media/webrtc/signaling/src/peerconnection/TransceiverImpl.h
+ *
+ */
+
+// Constructed by PeerConnectionImpl::CreateTransceiverImpl.
+[ChromeOnly]
+interface TransceiverImpl {
+  MediaStreamTrack getReceiveTrack();
+  [Throws]
+  void syncWithJS(RTCRtpTransceiver transceiver);
+};
+
--- a/dom/webidl/moz.build
+++ b/dom/webidl/moz.build
@@ -197,19 +197,16 @@ with Files("MediaEncryptedEvent.webidl")
     BUG_COMPONENT = ("Core", "Audio/Video")
 
 with Files("MediaKey*"):
     BUG_COMPONENT = ("Core", "Audio/Video: Playback")
 
 with Files("Media*List*"):
     BUG_COMPONENT = ("Core", "CSS Parsing and Computation")
 
-with Files("MediaStreamList.webidl"):
-    BUG_COMPONENT = ("Core", "Web Audio")
-
 with Files("*Record*"):
     BUG_COMPONENT = ("Core", "Audio/Video: Recording")
 
 with Files("Media*Track*"):
     BUG_COMPONENT = ("Core", "WebRTC: Audio/Video")
 
 with Files("Mouse*"):
     BUG_COMPONENT = ("Core", "DOM: Events")
@@ -980,32 +977,33 @@ WEBIDL_FILES = [
     'XULDocument.webidl',
     'XULElement.webidl',
     'XULTemplateBuilder.webidl',
 ]
 
 if CONFIG['MOZ_WEBRTC']:
     WEBIDL_FILES += [
         'DataChannel.webidl',
-        'MediaStreamList.webidl',
         'PeerConnectionImpl.webidl',
         'PeerConnectionImplEnums.webidl',
         'PeerConnectionObserver.webidl',
         'PeerConnectionObserverEnums.webidl',
         'RTCCertificate.webidl',
         'RTCConfiguration.webidl',
         'RTCDTMFSender.webidl',
         'RTCIceCandidate.webidl',
         'RTCIdentityAssertion.webidl',
         'RTCIdentityProvider.webidl',
         'RTCPeerConnection.webidl',
         'RTCPeerConnectionStatic.webidl',
         'RTCRtpReceiver.webidl',
         'RTCRtpSender.webidl',
+        'RTCRtpTransceiver.webidl',
         'RTCSessionDescription.webidl',
+        'TransceiverImpl.webidl',
         'WebrtcDeprecated.webidl',
         'WebrtcGlobalInformation.webidl',
     ]
 
 if CONFIG['MOZ_WEBSPEECH']:
     WEBIDL_FILES += [
         'SpeechGrammar.webidl',
         'SpeechGrammarList.webidl',
--- a/gfx/webrender/src/renderer.rs
+++ b/gfx/webrender/src/renderer.rs
@@ -591,16 +591,26 @@ impl SourceTextureResolver {
     fn deinit(self, device: &mut Device) {
         device.delete_texture(self.dummy_cache_texture);
 
         for texture in self.cache_texture_map {
             device.delete_texture(texture);
         }
     }
 
+    fn begin_frame(&self) {
+        assert!(self.cache_rgba8_texture.is_none());
+        assert!(self.cache_a8_texture.is_none());
+    }
+
+    fn end_frame(&mut self, pool: &mut Vec<Texture>) {
+        // return the cached targets to the pool
+        self.end_pass(None, None, pool)
+    }
+
     fn end_pass(
         &mut self,
         a8_texture: Option<Texture>,
         rgba8_texture: Option<Texture>,
         pool: &mut Vec<Texture>,
     ) {
         // If we have cache textures from previous pass, return them to the pool.
         pool.extend(self.cache_rgba8_texture.take());
@@ -3692,16 +3702,17 @@ impl Renderer {
             return;
         }
 
         self.device.disable_depth_write();
         self.device.disable_stencil();
         self.device.set_blend(false);
 
         self.bind_frame_data(frame);
+        self.texture_resolver.begin_frame();
 
         for (pass_index, pass) in frame.passes.iter_mut().enumerate() {
             self.gpu_profile.place_marker(&format!("pass {}", pass_index));
 
             self.texture_resolver.bind(
                 &SourceTexture::CacheA8,
                 TextureSampler::CacheA8,
                 &mut self.device,
@@ -3808,16 +3819,17 @@ impl Renderer {
                     self.texture_resolver.resolve(&SourceTexture::CacheA8)
                 {
                     self.device
                         .bind_texture(TextureSampler::SharedCacheA8, shared_alpha_texture);
                 }
             }
         }
 
+        self.texture_resolver.end_frame(&mut self.render_target_pool);
         self.draw_render_target_debug(framebuffer_size);
         self.draw_texture_cache_debug(framebuffer_size);
 
         // Garbage collect any frame outputs that weren't used this frame.
         let device = &mut self.device;
         self.output_targets
             .retain(|_, target| if target.last_access != frame_id {
                 device.delete_fbo(target.fbo_id);
--- a/js/xpconnect/loader/URLPreloader.cpp
+++ b/js/xpconnect/loader/URLPreloader.cpp
@@ -175,17 +175,17 @@ URLPreloader::GetCacheFile(const nsAStri
     MOZ_TRY(cacheFile->AppendNative(NS_LITERAL_CSTRING("startupCache")));
     Unused << cacheFile->Create(nsIFile::DIRECTORY_TYPE, 0777);
 
     MOZ_TRY(cacheFile->Append(NS_LITERAL_STRING("urlCache") + suffix));
 
     return Move(cacheFile);
 }
 
-static const uint8_t URL_MAGIC[] = "mozURLcachev001";
+static const uint8_t URL_MAGIC[] = "mozURLcachev002";
 
 Result<nsCOMPtr<nsIFile>, nsresult>
 URLPreloader::FindCacheFile()
 {
     nsCOMPtr<nsIFile> cacheFile;
     MOZ_TRY_VAR(cacheFile, GetCacheFile(NS_LITERAL_STRING(".bin")));
 
     bool exists;
@@ -511,23 +511,16 @@ URLPreloader::ReadURI(nsIURI* uri, ReadT
 
 /* static */ Result<const nsCString, nsresult>
 URLPreloader::ReadFile(nsIFile* file, ReadType readType)
 {
     return Read(CacheKey(file), readType);
 }
 
 /* static */ Result<const nsCString, nsresult>
-URLPreloader::ReadFile(const nsACString& path, ReadType readType)
-{
-    CacheKey key(CacheKey::TypeFile, path);
-    return Read(key, readType);
-}
-
-/* static */ Result<const nsCString, nsresult>
 URLPreloader::Read(FileLocation& location, ReadType readType)
 {
     if (location.IsZip()) {
         if (location.GetBaseZip()) {
             nsCString path;
             location.GetPath(path);
             return ReadZip(location.GetBaseZip(), path);
         }
@@ -599,20 +592,20 @@ URLPreloader::ResolveURI(nsIURI* uri)
     // Try for a file URI.
     if (scheme.EqualsLiteral("file")) {
         nsCOMPtr<nsIFileURL> fileURL = do_QueryInterface(resolved);
         MOZ_ASSERT(fileURL);
 
         nsCOMPtr<nsIFile> file;
         MOZ_TRY(fileURL->GetFile(getter_AddRefs(file)));
 
-        nsCString path;
-        MOZ_TRY(file->GetNativePath(path));
+        nsString path;
+        MOZ_TRY(file->GetPath(path));
 
-        return CacheKey(CacheKey::TypeFile, path);
+        return CacheKey(CacheKey::TypeFile, NS_ConvertUTF16toUTF8(path));
     }
 
     // Not a file or Omnijar URI, so currently unsupported.
     return Err(NS_ERROR_INVALID_ARG);
 }
 
 size_t
 URLPreloader::ShallowSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf)
@@ -623,17 +616,18 @@ URLPreloader::ShallowSizeOfIncludingThis
             mCachedURLs.ShallowSizeOfExcludingThis(mallocSizeOf));
 }
 
 Result<FileLocation, nsresult>
 URLPreloader::CacheKey::ToFileLocation()
 {
     if (mType == TypeFile) {
         nsCOMPtr<nsIFile> file;
-        MOZ_TRY(NS_NewNativeLocalFile(mPath, false, getter_AddRefs(file)));
+        MOZ_TRY(NS_NewLocalFile(NS_ConvertUTF8toUTF16(mPath), false,
+                                getter_AddRefs(file)));
         return Move(FileLocation(file));
     }
 
     RefPtr<nsZipArchive> zip = Archive();
     return Move(FileLocation(zip, mPath.get()));
 }
 
 Result<const nsCString, nsresult>
--- a/js/xpconnect/loader/URLPreloader.h
+++ b/js/xpconnect/loader/URLPreloader.h
@@ -72,18 +72,16 @@ public:
     // given location is not supported by the cache, the entries will be read
     // synchronously, and not stored in the cache.
     static Result<const nsCString, nsresult> Read(FileLocation& location, ReadType readType = Forget);
 
     static Result<const nsCString, nsresult> ReadURI(nsIURI* uri, ReadType readType = Forget);
 
     static Result<const nsCString, nsresult> ReadFile(nsIFile* file, ReadType readType = Forget);
 
-    static Result<const nsCString, nsresult> ReadFile(const nsACString& path, ReadType readType = Forget);
-
     static Result<const nsCString, nsresult> ReadZip(nsZipArchive* archive,
                                                      const nsACString& path,
                                                      ReadType readType = Forget);
 
 private:
     struct CacheKey;
 
     Result<const nsCString, nsresult> ReadInternal(const CacheKey& key, ReadType readType);
@@ -154,17 +152,19 @@ private:
 
         CacheKey(EntryType type, const nsACString& path)
             : mType(type), mPath(path)
         {}
 
         explicit CacheKey(nsIFile* file)
           : mType(TypeFile)
         {
-            MOZ_ALWAYS_SUCCEEDS(file->GetNativePath(mPath));
+            nsString path;
+            MOZ_ALWAYS_SUCCEEDS(file->GetPath(path));
+            CopyUTF16toUTF8(path, mPath);
         }
 
         explicit inline CacheKey(InputBuffer& buffer);
 
         // Encodes or decodes the cache key for storage in a session cache file.
         template <typename Buffer>
         void Code(Buffer& buffer)
         {
--- a/layout/base/RestyleManager.h
+++ b/layout/base/RestyleManager.h
@@ -33,22 +33,22 @@ class RestyleManager
 {
 public:
   typedef mozilla::dom::Element Element;
 
   NS_INLINE_DECL_REFCOUNTING(mozilla::RestyleManager)
 
   // Get an integer that increments every time we process pending restyles.
   // The value is never 0.
-  uint32_t GetRestyleGeneration() const { return mRestyleGeneration; }
+  uint64_t GetRestyleGeneration() const { return mRestyleGeneration; }
   // Unlike GetRestyleGeneration, which means the actual restyling count,
   // GetUndisplayedRestyleGeneration represents any possible DOM changes that
   // can cause restyling. This is needed for getComputedStyle to work with
   // non-styled (e.g. display: none) elements.
-  uint32_t GetUndisplayedRestyleGeneration() const {
+  uint64_t GetUndisplayedRestyleGeneration() const {
     return mUndisplayedRestyleGeneration;
   }
 
   // Get an integer that increments every time there is a style change
   // as a result of a change to the :hover content state.
   uint32_t GetHoverGeneration() const { return mHoverGeneration; }
 
   void Disconnect() { mPresContext = nullptr; }
@@ -258,18 +258,18 @@ protected:
   }
 
   nsCSSFrameConstructor* FrameConstructor() const {
     return PresContext()->FrameConstructor();
   }
 
 private:
   nsPresContext* mPresContext; // weak, can be null after Disconnect().
-  uint32_t mRestyleGeneration;
-  uint32_t mUndisplayedRestyleGeneration;
+  uint64_t mRestyleGeneration;
+  uint64_t mUndisplayedRestyleGeneration;
   uint32_t mHoverGeneration;
 
   // Used to keep track of frames that have been destroyed during
   // ProcessRestyledFrames, so we don't try to touch them again even if
   // they're referenced again later in the changelist.
   mozilla::UniquePtr<nsTHashtable<nsPtrHashKey<const nsIFrame>>> mDestroyedFrames;
 
 protected:
--- a/layout/generic/nsGfxScrollFrame.cpp
+++ b/layout/generic/nsGfxScrollFrame.cpp
@@ -2102,16 +2102,19 @@ ScrollFrameHelper::ScrollFrameHelper(nsC
 
 }
 
 ScrollFrameHelper::~ScrollFrameHelper()
 {
   if (mScrollEvent) {
     mScrollEvent->Revoke();
   }
+  if (mScrollEndEvent) {
+    mScrollEndEvent->Revoke();
+  }
 }
 
 /*
  * Callback function from AsyncSmoothMSDScroll, used in ScrollFrameHelper::ScrollTo
  */
 void
 ScrollFrameHelper::AsyncSmoothMSDScrollCallback(ScrollFrameHelper* aInstance,
                                                 mozilla::TimeDuration aDeltaTime)
@@ -2181,17 +2184,17 @@ ScrollFrameHelper::CompleteAsyncScroll(c
   AutoWeakFrame weakFrame(mOuter);
   ScrollToImpl(mDestination, aRange, aOrigin);
   if (!weakFrame.IsAlive()) {
     return;
   }
   // We are done scrolling, set our destination to wherever we actually ended
   // up scrolling to.
   mDestination = GetScrollPosition();
-  FireScrollEndEvent();
+  PostScrollEndEvent();
 }
 
 bool
 ScrollFrameHelper::HasPluginFrames()
 {
 #if defined(XP_WIN) || defined(MOZ_WIDGET_GTK)
   if (XRE_IsContentProcess()) {
     nsPresContext* presContext = mOuter->PresContext();
@@ -4418,19 +4421,34 @@ ScrollFrameHelper::FireScrollPortEvent()
                                                       mVerticalOverflow) ?
     eScrollPortOverflow : eScrollPortUnderflow, nullptr);
   event.mOrient = orient;
   return EventDispatcher::Dispatch(mOuter->GetContent(),
                                    mOuter->PresContext(), &event);
 }
 
 void
+ScrollFrameHelper::PostScrollEndEvent()
+{
+  if (mScrollEndEvent) {
+    return;
+  }
+
+  // The ScrollEndEvent constructor registers itself with the refresh driver.
+  mScrollEndEvent = new ScrollEndEvent(this);
+}
+
+void
 ScrollFrameHelper::FireScrollEndEvent()
 {
   MOZ_ASSERT(mOuter->GetContent());
+  MOZ_ASSERT(mScrollEndEvent);
+  mScrollEndEvent->Revoke();
+  mScrollEndEvent = nullptr;
+
   nsContentUtils::DispatchEventOnlyToChrome(mOuter->GetContent()->OwnerDoc(),
                                             mOuter->GetContent(),
                                             NS_LITERAL_STRING("scrollend"),
                                             true /* aCanBubble */,
                                             false /* aCancelable */);
 }
 
 void
@@ -4816,16 +4834,32 @@ NS_IMETHODIMP
 ScrollFrameHelper::ScrollEvent::Run()
 {
   if (mHelper) {
     mHelper->FireScrollEvent();
   }
   return NS_OK;
 }
 
+ScrollFrameHelper::ScrollEndEvent::ScrollEndEvent(ScrollFrameHelper* aHelper)
+  : Runnable("ScrollFrameHelper::ScrollEndEvent")
+  , mHelper(aHelper)
+{
+  mHelper->mOuter->PresContext()->RefreshDriver()->PostScrollEvent(this);
+}
+
+NS_IMETHODIMP
+ScrollFrameHelper::ScrollEndEvent::Run()
+{
+  if (mHelper) {
+    mHelper->FireScrollEndEvent();
+  }
+  return NS_OK;
+}
+
 void
 ScrollFrameHelper::FireScrollEvent()
 {
   AUTO_PROFILER_TRACING("Paint", "FireScrollEvent");
   MOZ_ASSERT(mScrollEvent);
   mScrollEvent->Revoke();
   mScrollEvent = nullptr;
 
--- a/layout/generic/nsGfxScrollFrame.h
+++ b/layout/generic/nsGfxScrollFrame.h
@@ -64,16 +64,17 @@ public:
   // reload our child frame list.
   // We need this if a scrollbar frame is recreated.
   void ReloadChildFrames();
 
   nsresult CreateAnonymousContent(
     nsTArray<nsIAnonymousContentCreator::ContentInfo>& aElements);
   void AppendAnonymousContentTo(nsTArray<nsIContent*>& aElements, uint32_t aFilter);
   nsresult FireScrollPortEvent();
+  void PostScrollEndEvent();
   void FireScrollEndEvent();
   void PostOverflowEvent();
   using PostDestroyData = nsIFrame::PostDestroyData;
   void Destroy(PostDestroyData& aPostDestroyData);
 
   void BuildDisplayList(nsDisplayListBuilder*   aBuilder,
                         const nsDisplayListSet& aLists);
 
@@ -126,16 +127,25 @@ public:
   public:
     NS_DECL_NSIRUNNABLE
     explicit ScrollEvent(ScrollFrameHelper* aHelper);
     void Revoke() { mHelper = nullptr; }
   private:
     ScrollFrameHelper* mHelper;
   };
 
+  class ScrollEndEvent : public Runnable {
+  public:
+    NS_DECL_NSIRUNNABLE
+    explicit ScrollEndEvent(ScrollFrameHelper* aHelper);
+    void Revoke() { mHelper = nullptr; }
+  private:
+    ScrollFrameHelper* mHelper;
+  };
+
   class AsyncScrollPortEvent : public Runnable {
   public:
     NS_DECL_NSIRUNNABLE
     explicit AsyncScrollPortEvent(ScrollFrameHelper* helper)
       : Runnable("ScrollFrameHelper::AsyncScrollPortEvent")
       , mHelper(helper)
     {
     }
@@ -397,17 +407,17 @@ public:
 
   bool IsAlwaysActive() const;
   void MarkRecentlyScrolled();
   void MarkNotRecentlyScrolled();
   nsExpirationState* GetExpirationState() { return &mActivityExpirationState; }
 
   void SetTransformingByAPZ(bool aTransforming) {
     if (mTransformingByAPZ && !aTransforming) {
-      FireScrollEndEvent();
+      PostScrollEndEvent();
     }
     mTransformingByAPZ = aTransforming;
     if (!mozilla::css::TextOverflow::HasClippedOverflow(mOuter)) {
       // If the block has some text-overflow stuff we should kick off a paint
       // because we have special behaviour for it when APZ scrolling is active.
       mOuter->SchedulePaint();
     }
   }
@@ -490,16 +500,17 @@ public:
 
   // owning references to the nsIAnonymousContentCreator-built content
   nsCOMPtr<nsIContent> mHScrollbarContent;
   nsCOMPtr<nsIContent> mVScrollbarContent;
   nsCOMPtr<nsIContent> mScrollCornerContent;
   nsCOMPtr<nsIContent> mResizerContent;
 
   RefPtr<ScrollEvent> mScrollEvent;
+  RefPtr<ScrollEndEvent> mScrollEndEvent;
   nsRevocableEventPtr<AsyncScrollPortEvent> mAsyncScrollPortEvent;
   nsRevocableEventPtr<ScrolledAreaEvent> mScrolledAreaEvent;
   nsIFrame* mHScrollbarBox;
   nsIFrame* mVScrollbarBox;
   nsIFrame* mScrolledFrame;
   nsIFrame* mScrollCornerBox;
   nsIFrame* mResizerBox;
   nsContainerFrame* mOuter;
--- a/layout/style/GroupRule.cpp
+++ b/layout/style/GroupRule.cpp
@@ -193,16 +193,23 @@ GeckoGroupRuleRules::SizeOfExcludingThis
   // - mRuleCollection
   return n;
 }
 
 // -------------------------------
 // ServoGroupRuleRules
 //
 
+ServoGroupRuleRules::~ServoGroupRuleRules()
+{
+  if (mRuleList) {
+    mRuleList->DropReference();
+  }
+}
+
 #ifdef DEBUG
 void
 ServoGroupRuleRules::List(FILE* out, int32_t aIndent) const
 {
   // TODO list something reasonable?
 }
 #endif
 
--- a/layout/style/GroupRule.h
+++ b/layout/style/GroupRule.h
@@ -82,16 +82,17 @@ struct ServoGroupRuleRules
   explicit ServoGroupRuleRules(already_AddRefed<ServoCssRules> aRawRules)
     : mRuleList(new ServoCSSRuleList(Move(aRawRules), nullptr)) {}
   ServoGroupRuleRules(ServoGroupRuleRules&& aOther)
     : mRuleList(Move(aOther.mRuleList)) {}
   ServoGroupRuleRules(const ServoGroupRuleRules& aCopy) {
     // Do we ever clone Servo rules?
     MOZ_ASSERT_UNREACHABLE("stylo: Cloning GroupRule not implemented");
   }
+  ~ServoGroupRuleRules();
 
   void SetParentRule(GroupRule* aParentRule) {
     if (mRuleList) {
       mRuleList->SetParentRule(aParentRule);
     }
   }
   void SetStyleSheet(StyleSheet* aSheet) {
     if (mRuleList) {
--- a/layout/style/MediaList.h
+++ b/layout/style/MediaList.h
@@ -79,17 +79,19 @@ public:
   {
     aRv = AppendMedium(aMedium);
   }
 
 protected:
   virtual nsresult Delete(const nsAString& aOldMedium) = 0;
   virtual nsresult Append(const nsAString& aNewMedium) = 0;
 
-  virtual ~MediaList() {}
+  virtual ~MediaList() {
+    MOZ_ASSERT(!mStyleSheet, "Backpointer should have been cleared");
+  }
 
   // not refcounted; sheet will let us know when it goes away
   // mStyleSheet is the sheet that needs to be dirtied when this
   // medialist changes
   StyleSheet* mStyleSheet = nullptr;
 
 private:
   template<typename Func>
--- a/layout/style/ServoCSSRuleList.cpp
+++ b/layout/style/ServoCSSRuleList.cpp
@@ -229,12 +229,14 @@ ServoCSSRuleList::GetDOMCSSRuleType(uint
   if (rule <= kMaxRuleType) {
     return rule;
   }
   return CastToPtr(rule)->Type();
 }
 
 ServoCSSRuleList::~ServoCSSRuleList()
 {
+  MOZ_ASSERT(!mStyleSheet, "Backpointer should have been cleared");
+  MOZ_ASSERT(!mParentRule, "Backpointer should have been cleared");
   DropAllRules();
 }
 
 } // namespace mozilla
--- a/layout/style/ServoKeyframeRule.cpp
+++ b/layout/style/ServoKeyframeRule.cpp
@@ -30,17 +30,20 @@ public:
     ServoKeyframeDeclaration, nsICSSDeclaration)
 
   NS_IMETHOD GetParentRule(nsIDOMCSSRule** aParent) final
   {
     NS_IF_ADDREF(*aParent = mRule);
     return NS_OK;
   }
 
-  void DropReference() { mRule = nullptr; }
+  void DropReference() {
+    mRule = nullptr;
+    mDecls->SetOwningRule(nullptr);
+  }
 
   DeclarationBlock* GetCSSDeclaration(Operation aOperation) final
   {
     return mDecls;
   }
   nsresult SetCSSDeclaration(DeclarationBlock* aDecls) final
   {
     if (!mRule) {
@@ -78,17 +81,19 @@ public:
   size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const
   {
     size_t n = aMallocSizeOf(this);
     // TODO we may want to add size of mDecls as well
     return n;
   }
 
 private:
-  virtual ~ServoKeyframeDeclaration() {}
+  virtual ~ServoKeyframeDeclaration() {
+    MOZ_ASSERT(!mRule, "Backpointer should have been cleared");
+  }
 
   ServoKeyframeRule* mRule;
   RefPtr<ServoDeclarationBlock> mDecls;
 };
 
 NS_IMPL_CYCLE_COLLECTING_ADDREF(ServoKeyframeDeclaration)
 NS_IMPL_CYCLE_COLLECTING_RELEASE(ServoKeyframeDeclaration)
 
@@ -99,16 +104,19 @@ NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(
 NS_INTERFACE_MAP_END_INHERITING(nsDOMCSSDeclaration)
 
 // -------------------------------------------
 // ServoKeyframeRule
 //
 
 ServoKeyframeRule::~ServoKeyframeRule()
 {
+  if (mDeclaration) {
+    mDeclaration->DropReference();
+  }
 }
 
 NS_IMPL_ADDREF_INHERITED(ServoKeyframeRule, dom::CSSKeyframeRule)
 NS_IMPL_RELEASE_INHERITED(ServoKeyframeRule, dom::CSSKeyframeRule)
 
 // QueryInterface implementation for nsCSSKeyframeRule
 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(ServoKeyframeRule)
 NS_INTERFACE_MAP_END_INHERITING(dom::CSSKeyframeRule)
--- a/layout/style/ServoKeyframesRule.cpp
+++ b/layout/style/ServoKeyframesRule.cpp
@@ -85,38 +85,44 @@ public:
   }
 
   uint32_t Length() final { return mRules.Length(); }
 
   void DropReference()
   {
     mStyleSheet = nullptr;
     mParentRule = nullptr;
-    DropAllRules();
+    for (css::Rule* rule : mRules) {
+      if (rule) {
+        rule->SetStyleSheet(nullptr);
+        rule->SetParentRule(nullptr);
+      }
+    }
   }
 
   size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const
   {
     size_t n = aMallocSizeOf(this);
     for (const css::Rule* rule : mRules) {
       n += rule ? rule->SizeOfIncludingThis(aMallocSizeOf) : 0;
     }
     return n;
   }
 
 private:
-  virtual ~ServoKeyframeList() {}
+  virtual ~ServoKeyframeList() {
+    MOZ_ASSERT(!mParentRule, "Backpointer should have been cleared");
+    MOZ_ASSERT(!mStyleSheet, "Backpointer should have been cleared");
+    DropAllRules();
+  }
 
   void DropAllRules()
   {
-    for (css::Rule* rule : mRules) {
-      if (rule) {
-        rule->SetStyleSheet(nullptr);
-        rule->SetParentRule(nullptr);
-      }
+    if (mParentRule || mStyleSheet) {
+      DropReference();
     }
     mRules.Clear();
     mRawRule = nullptr;
   }
 
   // may be nullptr when the style sheet drops the reference to us.
   ServoStyleSheet* mStyleSheet = nullptr;
   ServoKeyframesRule* mParentRule = nullptr;
@@ -158,16 +164,19 @@ ServoKeyframesRule::ServoKeyframesRule(R
   // rid of nsCSSKeyframeRule.
   : dom::CSSKeyframesRule(aLine, aColumn)
   , mRawRule(Move(aRawRule))
 {
 }
 
 ServoKeyframesRule::~ServoKeyframesRule()
 {
+  if (mKeyframeList) {
+    mKeyframeList->DropReference();
+  }
 }
 
 NS_IMPL_ADDREF_INHERITED(ServoKeyframesRule, dom::CSSKeyframesRule)
 NS_IMPL_RELEASE_INHERITED(ServoKeyframesRule, dom::CSSKeyframesRule)
 
 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(ServoKeyframesRule)
 NS_INTERFACE_MAP_END_INHERITING(dom::CSSKeyframesRule)
 
--- a/layout/style/ServoMediaRule.cpp
+++ b/layout/style/ServoMediaRule.cpp
@@ -19,27 +19,40 @@ ServoMediaRule::ServoMediaRule(RefPtr<Ra
                                uint32_t aLine, uint32_t aColumn)
   : CSSMediaRule(Servo_MediaRule_GetRules(aRawRule).Consume(), aLine, aColumn)
   , mRawRule(Move(aRawRule))
 {
 }
 
 ServoMediaRule::~ServoMediaRule()
 {
+  if (mMediaList) {
+    mMediaList->SetStyleSheet(nullptr);
+  }
 }
 
 NS_IMPL_ADDREF_INHERITED(ServoMediaRule, CSSMediaRule)
 NS_IMPL_RELEASE_INHERITED(ServoMediaRule, CSSMediaRule)
 
 // QueryInterface implementation for MediaRule
 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(ServoMediaRule)
 NS_INTERFACE_MAP_END_INHERITING(CSSMediaRule)
 
-NS_IMPL_CYCLE_COLLECTION_INHERITED(ServoMediaRule, CSSMediaRule,
-                                   mMediaList)
+NS_IMPL_CYCLE_COLLECTION_CLASS(ServoMediaRule)
+
+NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_INHERITED(ServoMediaRule, CSSMediaRule)
+  if (tmp->mMediaList) {
+    tmp->mMediaList->SetStyleSheet(nullptr);
+    tmp->mMediaList = nullptr;
+  }
+NS_IMPL_CYCLE_COLLECTION_UNLINK_END
+
+NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(ServoMediaRule, CSSMediaRule)
+  NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mMediaList)
+NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
 
 /* virtual */ already_AddRefed<css::Rule>
 ServoMediaRule::Clone() const
 {
   // Rule::Clone is only used when CSSStyleSheetInner is cloned in
   // preparation of being mutated. However, ServoStyleSheet never clones
   // anything, so this method should never be called.
   MOZ_ASSERT_UNREACHABLE("Shouldn't be cloning ServoMediaRule");
--- a/layout/style/ServoPageRule.cpp
+++ b/layout/style/ServoPageRule.cpp
@@ -21,16 +21,17 @@ namespace mozilla {
 ServoPageRuleDeclaration::ServoPageRuleDeclaration(
   already_AddRefed<RawServoDeclarationBlock> aDecls)
   : mDecls(new ServoDeclarationBlock(Move(aDecls)))
 {
 }
 
 ServoPageRuleDeclaration::~ServoPageRuleDeclaration()
 {
+  mDecls->SetOwningRule(nullptr);
 }
 
 // QueryInterface implementation for ServoPageRuleDeclaration
 NS_INTERFACE_MAP_BEGIN(ServoPageRuleDeclaration)
   NS_WRAPPERCACHE_INTERFACE_MAP_ENTRY
   // We forward the cycle collection interfaces to Rule(), which is
   // never null (in fact, we're part of that object!)
   if (aIID.Equals(NS_GET_IID(nsCycleCollectionISupports)) ||
@@ -138,16 +139,17 @@ NS_IMPL_CYCLE_COLLECTION_TRACE_END
 
 NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_INHERITED(ServoPageRule, CSSPageRule)
   // Keep this in sync with IsCCLeaf.
 
   // Unlink the wrapper for our declaraton.  This just expands out
   // NS_IMPL_CYCLE_COLLECTION_UNLINK_PRESERVED_WRAPPER which we can't use
   // directly because the wrapper is on the declaration, not on us.
   tmp->mDecls.ReleaseWrapper(static_cast<nsISupports*>(p));
+  tmp->mDecls.mDecls->SetOwningRule(nullptr);
 NS_IMPL_CYCLE_COLLECTION_UNLINK_END
 
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(ServoPageRule, CSSPageRule)
   // Keep this in sync with IsCCLeaf.
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
 
 bool
 ServoPageRule::IsCCLeaf() const
--- a/layout/style/ServoStyleRule.cpp
+++ b/layout/style/ServoStyleRule.cpp
@@ -23,16 +23,17 @@ namespace mozilla {
 ServoStyleRuleDeclaration::ServoStyleRuleDeclaration(
   already_AddRefed<RawServoDeclarationBlock> aDecls)
   : mDecls(new ServoDeclarationBlock(Move(aDecls)))
 {
 }
 
 ServoStyleRuleDeclaration::~ServoStyleRuleDeclaration()
 {
+  mDecls->SetOwningRule(nullptr);
 }
 
 // QueryInterface implementation for ServoStyleRuleDeclaration
 NS_INTERFACE_MAP_BEGIN(ServoStyleRuleDeclaration)
   NS_WRAPPERCACHE_INTERFACE_MAP_ENTRY
   // We forward the cycle collection interfaces to Rule(), which is
   // never null (in fact, we're part of that object!)
   if (aIID.Equals(NS_GET_IID(nsCycleCollectionISupports)) ||
--- a/layout/style/nsCSSRules.cpp
+++ b/layout/style/nsCSSRules.cpp
@@ -203,22 +203,35 @@ ImportRule::~ImportRule()
   if (mChildSheet) {
     mChildSheet->SetOwnerRule(nullptr);
   }
 }
 
 NS_IMPL_ADDREF_INHERITED(ImportRule, CSSImportRule)
 NS_IMPL_RELEASE_INHERITED(ImportRule, CSSImportRule)
 
-NS_IMPL_CYCLE_COLLECTION_INHERITED(ImportRule, CSSImportRule, mMedia, mChildSheet)
-
 // QueryInterface implementation for ImportRule
 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(ImportRule)
 NS_INTERFACE_MAP_END_INHERITING(CSSImportRule)
 
+NS_IMPL_CYCLE_COLLECTION_CLASS(ImportRule)
+
+NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_INHERITED(ImportRule, CSSImportRule)
+  if (tmp->mChildSheet) {
+    tmp->mChildSheet->SetOwnerRule(nullptr);
+    tmp->mChildSheet = nullptr;
+  }
+  tmp->mMedia = nullptr;
+NS_IMPL_CYCLE_COLLECTION_UNLINK_END
+
+NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(ImportRule, CSSImportRule)
+  NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mMedia)
+  NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mChildSheet)
+NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
+
 #ifdef DEBUG
 /* virtual */ void
 ImportRule::List(FILE* out, int32_t aIndent) const
 {
   nsAutoCString str;
   // Indent
   for (int32_t indent = aIndent; --indent >= 0; ) {
     str.AppendLiteral("  ");
@@ -333,18 +346,28 @@ MediaRule::~MediaRule()
 
 NS_IMPL_ADDREF_INHERITED(MediaRule, CSSMediaRule)
 NS_IMPL_RELEASE_INHERITED(MediaRule, CSSMediaRule)
 
 // QueryInterface implementation for MediaRule
 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(MediaRule)
 NS_INTERFACE_MAP_END_INHERITING(CSSMediaRule)
 
-NS_IMPL_CYCLE_COLLECTION_INHERITED(MediaRule, CSSMediaRule,
-                                   mMedia)
+NS_IMPL_CYCLE_COLLECTION_CLASS(MediaRule)
+
+NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_INHERITED(MediaRule, CSSMediaRule)
+  if (tmp->mMedia) {
+    tmp->mMedia->SetStyleSheet(nullptr);
+    tmp->mMedia = nullptr;
+  }
+NS_IMPL_CYCLE_COLLECTION_UNLINK_END
+
+NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(MediaRule, CSSMediaRule)
+  NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mMedia)
+NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
 
 /* virtual */ void
 MediaRule::SetStyleSheet(StyleSheet* aSheet)
 {
   if (mMedia) {
     // Set to null so it knows it's leaving one sheet and joining another.
     mMedia->SetStyleSheet(nullptr);
     if (aSheet) {
--- a/media/mtransport/nricectx.cpp
+++ b/media/mtransport/nricectx.cpp
@@ -1038,18 +1038,31 @@ nsresult NrIceCtx::ParseGlobalAttributes
     MOZ_MTLOG(ML_ERROR, "Couldn't parse global attributes for "
               << name_ << "'");
     return NS_ERROR_FAILURE;
   }
 
   return NS_OK;
 }
 
+bool NrIceCtx::HasStreamsToConnect() const {
+  for (auto& stream : streams_) {
+    if (stream && stream->state() != NrIceMediaStream::ICE_CLOSED) {
+      return true;
+    }
+  }
+  return false;
+}
+
 nsresult NrIceCtx::StartChecks(bool offerer) {
   int r;
+  if (!HasStreamsToConnect()) {
+    // Nothing to do
+    return NS_OK;
+  }
 
   offerer_ = offerer;
   ice_start_time_ = TimeStamp::Now();
 
   r=nr_ice_peer_ctx_pair_candidates(peer_);
   if (r) {
     MOZ_MTLOG(ML_ERROR, "Couldn't pair candidates on "
               << name_ << "'");
--- a/media/mtransport/nricectx.h
+++ b/media/mtransport/nricectx.h
@@ -267,16 +267,18 @@ class NrIceCtx {
   }
 
   // Some might be null
   size_t GetStreamCount() const
   {
     return streams_.size();
   }
 
+  bool HasStreamsToConnect() const;
+
   // The name of the ctx
   const std::string& name() const { return name_; }
 
   // Get ufrag and password.
   std::string ufrag() const;
   std::string pwd() const;
 
   // Current state
--- a/media/mtransport/test/transport_unittests.cpp
+++ b/media/mtransport/test/transport_unittests.cpp
@@ -609,18 +609,18 @@ class TransportTestPeer : public sigslot
     ice_ctx_->ctx()->SetStream(streams_.size(), stream);
     streams_.push_back(stream);
 
     // Listen for candidates
     stream->SignalCandidate.
         connect(this, &TransportTestPeer::GotCandidate);
 
     // Create the transport layer
-    ice_ = new TransportLayerIce(name);
-    ice_->SetParameters(ice_ctx_->ctx(), stream, 1);
+    ice_ = new TransportLayerIce();
+    ice_->SetParameters(stream, 1);
 
     // Assemble the stack
     nsAutoPtr<std::queue<mozilla::TransportLayer *> > layers(
       new std::queue<mozilla::TransportLayer *>);
     layers->push(ice_);
     layers->push(dtls_);
 
     test_utils_->sts_target()->Dispatch(
--- a/media/mtransport/transportlayerice.cpp
+++ b/media/mtransport/transportlayerice.cpp
@@ -79,30 +79,28 @@ extern "C" {
 namespace mozilla {
 
 #ifdef ERROR
 #undef ERROR
 #endif
 
 MOZ_MTLOG_MODULE("mtransport")
 
-TransportLayerIce::TransportLayerIce(const std::string& name)
-    : name_(name),
-      ctx_(nullptr), stream_(nullptr), component_(0),
+TransportLayerIce::TransportLayerIce()
+    : stream_(nullptr), component_(0),
       old_stream_(nullptr)
 {
   // setup happens later
 }
 
 TransportLayerIce::~TransportLayerIce() {
   // No need to do anything here, since we use smart pointers
 }
 
-void TransportLayerIce::SetParameters(RefPtr<NrIceCtx> ctx,
-                                      RefPtr<NrIceMediaStream> stream,
+void TransportLayerIce::SetParameters(RefPtr<NrIceMediaStream> stream,
                                       int component) {
   // Stream could be null in the case of some badly written js that causes
   // us to be in an ICE restart case, but not have valid streams due to
   // not calling PeerConnectionMedia::EnsureTransports if
   // PeerConnectionImpl::SetSignalingState_m thinks the conditions were
   // not correct.  We also solved a case where an incoming answer was
   // incorrectly beginning an ICE restart when the offer did not indicate one.
   if (!stream) {
@@ -116,26 +114,23 @@ void TransportLayerIce::SetParameters(Re
   if (stream_ && !old_stream_ && (stream_ != stream)) {
     // Here we leave the old stream's signals connected until we don't need
     // it anymore.  They will be disconnected if ice restart is successful.
     old_stream_ = stream_;
     MOZ_MTLOG(ML_INFO, LAYER_INFO << "SetParameters save old stream("
                                   << old_stream_->name() << ")");
   }
 
-  ctx_ = ctx;
   stream_ = stream;
   component_ = component;
 
   PostSetup();
 }
 
 void TransportLayerIce::PostSetup() {
-  target_ = ctx_->thread();
-
   stream_->SignalReady.connect(this, &TransportLayerIce::IceReady);
   stream_->SignalFailed.connect(this, &TransportLayerIce::IceFailed);
   stream_->SignalPacketReceived.connect(this,
                                         &TransportLayerIce::IcePacketReceived);
   if (stream_->state() == NrIceMediaStream::ICE_OPEN) {
     TL_SET_STATE(TS_OPEN);
   }
 }
--- a/media/mtransport/transportlayerice.h
+++ b/media/mtransport/transportlayerice.h
@@ -25,22 +25,21 @@
 #include "transportflow.h"
 #include "transportlayer.h"
 
 // An ICE transport layer -- corresponds to a single ICE
 namespace mozilla {
 
 class TransportLayerIce : public TransportLayer {
  public:
-  explicit TransportLayerIce(const std::string& name);
+  TransportLayerIce();
 
   virtual ~TransportLayerIce();
 
-  void SetParameters(RefPtr<NrIceCtx> ctx,
-                     RefPtr<NrIceMediaStream> stream,
+  void SetParameters(RefPtr<NrIceMediaStream> stream,
                      int component);
 
   void ResetOldStream(); // called after successful ice restart
   void RestoreOldStream(); // called after unsuccessful ice restart
 
   // Transport layer overrides.
   TransportResult SendPacket(const unsigned char *data, size_t len) override;
 
@@ -52,18 +51,16 @@ class TransportLayerIce : public Transpo
                          const unsigned char *data, int len);
 
   TRANSPORT_LAYER_ID("ice")
 
  private:
   DISALLOW_COPY_ASSIGN(TransportLayerIce);
   void PostSetup();
 
-  const std::string name_;
-  RefPtr<NrIceCtx> ctx_;
   RefPtr<NrIceMediaStream> stream_;
   int component_;
 
   // used to hold the old stream
   RefPtr<NrIceMediaStream> old_stream_;
 };
 
 }  // close namespace
--- a/media/webrtc/signaling/gtest/jsep_session_unittest.cpp
+++ b/media/webrtc/signaling/gtest/jsep_session_unittest.cpp
@@ -123,52 +123,158 @@ protected:
   {
     tdata.iceCredentialSerial = 0;
     GenerateNewIceCredentials(session, tdata);
     session.SetIceCredentials(tdata.mIceUfrag, tdata.mIcePwd);
     AddDtlsFingerprint("sha-1", session, tdata);
     AddDtlsFingerprint("sha-256", session, tdata);
   }
 
+  void
+  CheckTransceiverInvariants(
+      const std::vector<RefPtr<JsepTransceiver>>& oldTransceivers,
+      const std::vector<RefPtr<JsepTransceiver>>& newTransceivers)
+  {
+    ASSERT_LE(oldTransceivers.size(), newTransceivers.size());
+    std::set<size_t> levels;
+
+    for (const RefPtr<JsepTransceiver>& newTransceiver : newTransceivers) {
+      if (newTransceiver->HasLevel()) {
+        ASSERT_FALSE(levels.count(newTransceiver->GetLevel()))
+                     << "Two new transceivers are mapped to level "
+                     << newTransceiver->GetLevel();
+        levels.insert(newTransceiver->GetLevel());
+      }
+    }
+
+    auto last = levels.rbegin();
+    if (last != levels.rend()) {
+      ASSERT_LE(*last, levels.size())
+          << "Max level observed in transceivers was " << *last
+          << ", but there are only " << levels.size() << " levels in the "
+          "transceivers.";
+    }
+
+    for (const RefPtr<JsepTransceiver>& oldTransceiver : oldTransceivers) {
+      if (oldTransceiver->HasLevel()) {
+        ASSERT_TRUE(levels.count(oldTransceiver->GetLevel()))
+                    << "Level " << oldTransceiver->GetLevel()
+                    << " had a transceiver in the old, but not the new (or, "
+                    "perhaps this level had more than one transceiver in the "
+                    "old)";
+        levels.erase(oldTransceiver->GetLevel());
+      }
+    }
+  }
+
+  std::vector<RefPtr<JsepTransceiver>>
+  DeepCopy(const std::vector<RefPtr<JsepTransceiver>>& transceivers)
+  {
+    std::vector<RefPtr<JsepTransceiver>> copy;
+    for (const RefPtr<JsepTransceiver>& transceiver : transceivers) {
+      copy.push_back(new JsepTransceiver(*transceiver));
+    }
+    return copy;
+  }
+
   std::string
   CreateOffer(const Maybe<JsepOfferOptions>& options = Nothing())
   {
+    std::vector<RefPtr<JsepTransceiver>> transceiversBefore =
+      DeepCopy(mSessionOff->GetTransceivers());
     JsepOfferOptions defaultOptions;
     const JsepOfferOptions& optionsRef = options ? *options : defaultOptions;
     std::string offer;
     nsresult rv;
     rv = mSessionOff->CreateOffer(optionsRef, &offer);
     EXPECT_EQ(NS_OK, rv) << mSessionOff->GetLastError();
 
     std::cerr << "OFFER: " << offer << std::endl;
 
     ValidateTransport(*mOffererTransport, offer);
 
+    if (transceiversBefore.size() != mSessionOff->GetTransceivers().size()) {
+      EXPECT_TRUE(false) << "CreateOffer changed number of transceivers!";
+      return offer;
+    }
+
+    CheckTransceiverInvariants(transceiversBefore,
+                               mSessionOff->GetTransceivers());
+
+    for (size_t i = 0; i < transceiversBefore.size(); ++i) {
+      RefPtr<JsepTransceiver>& oldTransceiver = transceiversBefore[i];
+      RefPtr<JsepTransceiver>& newTransceiver = mSessionOff->GetTransceivers()[i];
+      EXPECT_EQ(oldTransceiver->IsStopped(), newTransceiver->IsStopped());
+
+      if (oldTransceiver->IsStopped()) {
+        if (!newTransceiver->HasLevel()) {
+          // Tolerate unmapping of stopped transceivers by removing this
+          // difference.
+          oldTransceiver->ClearLevel();
+        }
+      } else if (!oldTransceiver->HasLevel()) {
+        EXPECT_TRUE(newTransceiver->HasLevel());
+        // Tolerate new mappings.
+        oldTransceiver->SetLevel(newTransceiver->GetLevel());
+      }
+
+      EXPECT_TRUE(Equals(*oldTransceiver, *newTransceiver));
+    }
+
     return offer;
   }
 
+  typedef enum {
+    NO_ADDTRACK_MAGIC,
+    ADDTRACK_MAGIC
+  } AddTrackMagic;
+
   void
-  AddTracks(JsepSessionImpl& side)
+  AddTracks(JsepSessionImpl& side, AddTrackMagic magic = ADDTRACK_MAGIC)
   {
     // Add tracks.
     if (types.empty()) {
       types = BuildTypes(GetParam());
     }
-    AddTracks(side, types);
-
-    // Now that we have added streams, we expect audio, then video, then
-    // application in the SDP, regardless of the order in which the streams were
-    // added.
-    std::sort(types.begin(), types.end());
+    AddTracks(side, types, magic);
   }
 
   void
-  AddTracks(JsepSessionImpl& side, const std::string& mediatypes)
+  AddTracks(JsepSessionImpl& side,
+            const std::string& mediatypes,
+            AddTrackMagic magic = ADDTRACK_MAGIC)
   {
-    AddTracks(side, BuildTypes(mediatypes));
+    AddTracks(side, BuildTypes(mediatypes), magic);
+  }
+
+  JsepTrack
+  RemoveTrack(JsepSession& side, size_t index) {
+    if (side.GetTransceivers().size() <= index) {
+      EXPECT_TRUE(false) << "Index " << index << " out of bounds!";
+      return JsepTrack(SdpMediaSection::kAudio, sdp::kSend);
+    }
+
+    RefPtr<JsepTransceiver>& transceiver(side.GetTransceivers()[index]);
+    JsepTrack& track = transceiver->mSendTrack;
+    EXPECT_FALSE(track.GetTrackId().empty()) << "No track at index " << index;
+
+    JsepTrack original(track);
+    track.ClearTrackIds();
+    transceiver->mJsDirection &= SdpDirectionAttribute::Direction::kRecvonly;
+    return original;
+  }
+
+  void
+  SetDirection(JsepSession& side,
+               size_t index,
+               SdpDirectionAttribute::Direction direction) {
+    ASSERT_LT(index, side.GetTransceivers().size())
+      << "Index " << index << " out of bounds!";
+
+    side.GetTransceivers()[index]->mJsDirection = direction;
   }
 
   std::vector<SdpMediaSection::MediaType>
   BuildTypes(const std::string& mediatypes)
   {
     std::vector<SdpMediaSection::MediaType> result;
     size_t ptr = 0;
 
@@ -193,87 +299,177 @@ protected:
       ptr = comma + 1;
     }
 
     return result;
   }
 
   void
   AddTracks(JsepSessionImpl& side,
-            const std::vector<SdpMediaSection::MediaType>& mediatypes)
+            const std::vector<SdpMediaSection::MediaType>& mediatypes,
+            AddTrackMagic magic = ADDTRACK_MAGIC)
   {
     FakeUuidGenerator uuid_gen;
     std::string stream_id;
     std::string track_id;
 
     ASSERT_TRUE(uuid_gen.Generate(&stream_id));
 
-    AddTracksToStream(side, stream_id, mediatypes);
+    AddTracksToStream(side, stream_id, mediatypes, magic);
   }
 
   void
   AddTracksToStream(JsepSessionImpl& side,
                     const std::string stream_id,
-                    const std::string& mediatypes)
+                    const std::string& mediatypes,
+                    AddTrackMagic magic = ADDTRACK_MAGIC)
   {
-    AddTracksToStream(side, stream_id, BuildTypes(mediatypes));
+    AddTracksToStream(side, stream_id, BuildTypes(mediatypes), magic);
+  }
+
+  // A bit of a hack. JsepSessionImpl populates the track-id automatically, just
+  // in case, because the w3c spec requires msid to be set even when there's no
+  // send track.
+  bool IsNull(const JsepTrack& track) const {
+    return track.GetStreamIds().empty() &&
+           (track.GetMediaType() != SdpMediaSection::MediaType::kApplication);
   }
 
   void
   AddTracksToStream(JsepSessionImpl& side,
                     const std::string stream_id,
-                    const std::vector<SdpMediaSection::MediaType>& mediatypes)
+                    const std::vector<SdpMediaSection::MediaType>& mediatypes,
+                    AddTrackMagic magic = ADDTRACK_MAGIC)
 
   {
     FakeUuidGenerator uuid_gen;
     std::string track_id;
 
-    for (auto track = mediatypes.begin(); track != mediatypes.end(); ++track) {
+    for (auto type : mediatypes) {
       ASSERT_TRUE(uuid_gen.Generate(&track_id));
 
-      RefPtr<JsepTrack> mst(new JsepTrack(*track, stream_id, track_id));
-      side.AddTrack(mst);
+      std::vector<RefPtr<JsepTransceiver>>& transceivers(side.GetTransceivers());
+      size_t i = transceivers.size();
+      if (magic == ADDTRACK_MAGIC) {
+        for (i = 0; i < transceivers.size(); ++i) {
+          if (transceivers[i]->mSendTrack.GetMediaType() != type) {
+            continue;
+          }
+
+          if (IsNull(transceivers[i]->mSendTrack) ||
+              type == SdpMediaSection::MediaType::kApplication) {
+            break;
+          }
+        }
+      }
+
+      if (i == transceivers.size()) {
+        side.AddTransceiver(new JsepTransceiver(type));
+        MOZ_ASSERT(i < transceivers.size());
+      }
+
+      std::cerr << "Updating send track for transceiver " << i << std::endl;
+      if (magic == ADDTRACK_MAGIC) {
+        transceivers[i]->SetAddTrackMagic();
+      }
+      transceivers[i]->mJsDirection |=
+        SdpDirectionAttribute::Direction::kSendonly;
+      transceivers[i]->mSendTrack.UpdateTrackIds(
+          std::vector<std::string>(1, stream_id), track_id);
     }
   }
 
-  bool HasMediaStream(std::vector<RefPtr<JsepTrack>> tracks) const {
-    for (auto i = tracks.begin(); i != tracks.end(); ++i) {
-      if ((*i)->GetMediaType() != SdpMediaSection::kApplication) {
-        return 1;
+  bool HasMediaStream(const std::vector<JsepTrack>& tracks) const {
+    for (const auto& track : tracks) {
+      if (track.GetMediaType() != SdpMediaSection::kApplication) {
+        return true;
       }
     }
-    return 0;
+    return false;
   }
 
   const std::string GetFirstLocalStreamId(JsepSessionImpl& side) const {
-    auto tracks = side.GetLocalTracks();
-    return (*tracks.begin())->GetStreamId();
+    auto tracks = GetLocalTracks(side);
+    return tracks.begin()->GetStreamIds()[0];
+  }
+
+  std::vector<JsepTrack>
+  GetLocalTracks(const JsepSession& session) const {
+    std::vector<JsepTrack> result;
+    for (const auto& transceiver : session.GetTransceivers()) {
+      if (!IsNull(transceiver->mSendTrack)) {
+        result.push_back(transceiver->mSendTrack);
+      }
+    }
+    return result;
+  }
+
+  std::vector<JsepTrack>
+  GetRemoteTracks(const JsepSession& session) const {
+    std::vector<JsepTrack> result;
+    for (const auto& transceiver : session.GetTransceivers()) {
+      if (!IsNull(transceiver->mRecvTrack)) {
+        result.push_back(transceiver->mRecvTrack);
+      }
+    }
+    return result;
+  }
+
+  JsepTransceiver*
+  GetDatachannelTransceiver(JsepSession& side) {
+    for (const auto& transceiver : side.GetTransceivers()) {
+      if (transceiver->mSendTrack.GetMediaType() ==
+            SdpMediaSection::MediaType::kApplication) {
+        return transceiver.get();
+      }
+    }
+
+    return nullptr;
+  }
+
+  JsepTransceiver*
+  GetNegotiatedTransceiver(JsepSession& side, size_t index) {
+    for (RefPtr<JsepTransceiver>& transceiver : side.GetTransceivers()) {
+      if (transceiver->mSendTrack.GetNegotiatedDetails() ||
+          transceiver->mRecvTrack.GetNegotiatedDetails()) {
+        if (index) {
+          --index;
+          continue;
+        }
+
+        return transceiver.get();
+      }
+    }
+
+    return nullptr;
   }
 
   std::vector<std::string>
-  GetMediaStreamIds(std::vector<RefPtr<JsepTrack>> tracks) const {
+  GetMediaStreamIds(const std::vector<JsepTrack>& tracks) const {
     std::vector<std::string> ids;
-    for (auto i = tracks.begin(); i != tracks.end(); ++i) {
+    for (const auto& track : tracks) {
       // data channels don't have msid's
-      if ((*i)->GetMediaType() == SdpMediaSection::kApplication) {
+      if (track.GetMediaType() == SdpMediaSection::kApplication) {
         continue;
       }
-      ids.push_back((*i)->GetStreamId());
+      ids.insert(ids.end(),
+                 track.GetStreamIds().begin(),
+                 track.GetStreamIds().end());
     }
     return ids;
   }
 
   std::vector<std::string>
   GetLocalMediaStreamIds(JsepSessionImpl& side) const {
-    return GetMediaStreamIds(side.GetLocalTracks());
+    return GetMediaStreamIds(GetLocalTracks(side));
   }
 
   std::vector<std::string>
   GetRemoteMediaStreamIds(JsepSessionImpl& side) const {
-    return GetMediaStreamIds(side.GetRemoteTracks());
+    return GetMediaStreamIds(GetRemoteTracks(side));
   }
 
   std::vector<std::string>
   sortUniqueStrVector(std::vector<std::string> in) const {
     std::sort(in.begin(), in.end());
     auto it = std::unique(in.begin(), in.end());
     in.resize( std::distance(in.begin(), it));
     return in;
@@ -284,59 +480,49 @@ protected:
     return sortUniqueStrVector(GetLocalMediaStreamIds(side));
   }
 
   std::vector<std::string>
   GetRemoteUniqueStreamIds(JsepSessionImpl& side) const {
     return sortUniqueStrVector(GetRemoteMediaStreamIds(side));
   }
 
-  RefPtr<JsepTrack> GetTrack(JsepSessionImpl& side,
-                             SdpMediaSection::MediaType type,
-                             size_t index) const {
-    auto tracks = side.GetLocalTracks();
-
-    for (auto i = tracks.begin(); i != tracks.end(); ++i) {
-      if ((*i)->GetMediaType() != type) {
+  JsepTrack GetTrack(JsepSessionImpl& side,
+                     SdpMediaSection::MediaType type,
+                     size_t index) const {
+    for (const auto& transceiver : side.GetTransceivers()) {
+      if (IsNull(transceiver->mSendTrack) ||
+          transceiver->mSendTrack.GetMediaType() != type) {
         continue;
       }
 
       if (index != 0) {
         --index;
         continue;
       }
 
-      return *i;
+      return transceiver->mSendTrack;
     }
 
-    return RefPtr<JsepTrack>(nullptr);
+    return JsepTrack(type, sdp::kSend);
   }
 
-  RefPtr<JsepTrack> GetTrackOff(size_t index,
-                                SdpMediaSection::MediaType type) {
+  JsepTrack GetTrackOff(size_t index, SdpMediaSection::MediaType type) {
     return GetTrack(*mSessionOff, type, index);
   }
 
-  RefPtr<JsepTrack> GetTrackAns(size_t index,
-                                SdpMediaSection::MediaType type) {
+  JsepTrack GetTrackAns(size_t index, SdpMediaSection::MediaType type) {
     return GetTrack(*mSessionAns, type, index);
   }
 
-  class ComparePairsByLevel {
-    public:
-      bool operator()(const JsepTrackPair& lhs,
-                      const JsepTrackPair& rhs) const {
-        return lhs.mLevel < rhs.mLevel;
-      }
-  };
-
-  std::vector<JsepTrackPair> GetTrackPairsByLevel(JsepSessionImpl& side) const {
-    auto pairs = side.GetNegotiatedTrackPairs();
-    std::sort(pairs.begin(), pairs.end(), ComparePairsByLevel());
-    return pairs;
+  size_t CountRtpTypes() const {
+    return std::count_if(
+        types.begin(), types.end(),
+        [](SdpMediaSection::MediaType type)
+          {return type != SdpMediaSection::MediaType::kApplication;});
   }
 
   bool Equals(const SdpFingerprintAttributeList::Fingerprint& f1,
               const SdpFingerprintAttributeList::Fingerprint& f2) const {
     if (f1.hashFunc != f2.hashFunc) {
       return false;
     }
 
@@ -400,75 +586,143 @@ protected:
 
     if (t1->GetPassword() != t2->GetPassword()) {
       return false;
     }
 
     return true;
   }
 
-  bool Equals(const RefPtr<JsepTransport>& t1,
-              const RefPtr<JsepTransport>& t2) const {
-    if (!t1 && !t2) {
-      return true;
+  bool Equals(const JsepTransport& t1,
+              const JsepTransport& t2) const {
+    if (t1.mTransportId != t2.mTransportId) {
+      std::cerr << "Transport id differs: " << t1.mTransportId << " vs "
+                << t2.mTransportId << std::endl;
+      return false;
     }
 
-    if (!t1 || !t2) {
+    if (t1.mComponents != t2.mComponents) {
+      std::cerr << "Component count differs" << std::endl;
+      return false;
+    }
+
+    if (!Equals(t1.mIce, t2.mIce)) {
+      std::cerr << "ICE differs" << std::endl;
       return false;
     }
 
-    if (t1->mTransportId != t2->mTransportId) {
+    return true;
+  }
+
+  bool Equals(const JsepTrack& t1, const JsepTrack& t2) const {
+    if (t1.GetMediaType() != t2.GetMediaType()) {
+      return false;
+    }
+
+    if (t1.GetDirection() != t2.GetDirection()) {
+      return false;
+    }
+
+    if (t1.GetStreamIds() != t2.GetStreamIds()) {
       return false;
     }
 
-    if (t1->mComponents != t2->mComponents) {
+    if (t1.GetTrackId() != t2.GetTrackId()) {
+      return false;
+    }
+
+    if (t1.GetActive() != t2.GetActive()) {
       return false;
     }
 
-    if (!Equals(t1->mIce, t2->mIce)) {
+    if (t1.GetCNAME() != t2.GetCNAME()) {
+      return false;
+    }
+
+    if (t1.GetSsrcs() != t2.GetSsrcs()) {
       return false;
     }
 
     return true;
   }
 
-  bool Equals(const JsepTrackPair& p1,
-              const JsepTrackPair& p2) const {
-    if (p1.mLevel != p2.mLevel) {
+  bool Equals(const JsepTransceiver& p1,
+              const JsepTransceiver& p2) const {
+    if (p1.HasLevel() != p2.HasLevel()) {
+      std::cerr << "One transceiver has a level, the other doesn't"
+                << std::endl;
+      return false;
+    }
+
+    if (p1.HasLevel() && (p1.GetLevel() != p2.GetLevel())) {
+      std::cerr << "Level differs: " << p1.GetLevel() << " vs " << p2.GetLevel()
+                << std::endl;
       return false;
     }
 
     // We don't check things like BundleLevel(), since that can change without
     // any changes to the transport, which is what we're really interested in.
 
-    if (p1.mSending.get() != p2.mSending.get()) {
+    if (p1.IsStopped() != p2.IsStopped()) {
+      std::cerr << "One transceiver is stopped, the other is not" << std::endl;
+      return false;
+    }
+
+    if (p1.IsAssociated() != p2.IsAssociated()) {
+      std::cerr << "One transceiver has a mid, the other doesn't"
+                << std::endl;
       return false;
     }
 
-    if (p1.mReceiving.get() != p2.mReceiving.get()) {
+    if (p1.IsAssociated() && (p1.GetMid() != p2.GetMid())) {
+      std::cerr << "mid differs: " << p1.GetMid() << " vs " << p2.GetMid()
+                << std::endl;
+      return false;
+    }
+
+    if (!Equals(p1.mSendTrack, p2.mSendTrack)) {
+      std::cerr << "Send track differs" << std::endl;
       return false;
     }
 
-    if (!Equals(p1.mRtpTransport, p2.mRtpTransport)) {
+    if (!Equals(p1.mRecvTrack, p2.mRecvTrack)) {
+      std::cerr << "Receive track differs" << std::endl;
+      return false;
+    }
+
+    if (!Equals(p1.mTransport, p2.mTransport)) {
+      std::cerr << "Transport differs" << std::endl;
       return false;
     }
 
-    if (!Equals(p1.mRtcpTransport, p2.mRtcpTransport)) {
+    return true;
+  }
+
+  bool Equals(const std::vector<RefPtr<JsepTransceiver>>& t1,
+              const std::vector<RefPtr<JsepTransceiver>>& t2) const {
+    if (t1.size() != t2.size()) {
+      std::cerr << "Size differs: t1.size = " << t1.size() << ", t2.size = "
+                << t2.size() << std::endl;
       return false;
     }
 
+    for (size_t i = 0; i < t1.size(); ++i) {
+      if (!Equals(*t1[i], *t2[i])) {
+        return false;
+      }
+    }
+
     return true;
   }
 
   size_t GetTrackCount(JsepSessionImpl& side,
                        SdpMediaSection::MediaType type) const {
-    auto tracks = side.GetLocalTracks();
     size_t result = 0;
-    for (auto i = tracks.begin(); i != tracks.end(); ++i) {
-      if ((*i)->GetMediaType() == type) {
+    for (const auto& track : GetLocalTracks(side)) {
+      if (track.GetMediaType() == type) {
         ++result;
       }
     }
     return result;
   }
 
   UniquePtr<Sdp> GetParsedLocalDescription(const JsepSessionImpl& side) const {
     return Parse(side.GetLocalDescription(kJsepDescriptionCurrent));
@@ -517,52 +771,53 @@ protected:
       }
     }
   }
 
   void
   EnsureNegotiationFailure(SdpMediaSection::MediaType type,
                            const std::string& codecName)
   {
-    for (auto i = mSessionOff->Codecs().begin(); i != mSessionOff->Codecs().end();
-         ++i) {
-      auto* codec = *i;
+    for (auto* codec : mSessionOff->Codecs()) {
       if (codec->mType == type && codec->mName != codecName) {
         codec->mEnabled = false;
       }
     }
 
-    for (auto i = mSessionAns->Codecs().begin(); i != mSessionAns->Codecs().end();
-         ++i) {
-      auto* codec = *i;
+    for (auto* codec : mSessionAns->Codecs()) {
       if (codec->mType == type && codec->mName == codecName) {
         codec->mEnabled = false;
       }
     }
   }
 
   std::string
   CreateAnswer()
   {
+    std::vector<RefPtr<JsepTransceiver>> transceiversBefore =
+      DeepCopy(mSessionAns->GetTransceivers());
+
     JsepAnswerOptions options;
     std::string answer;
 
     // detect ice restart and generate new ice credentials (like
     // PeerConnectionImpl does).
     if (mSessionAns->RemoteIceIsRestarting()) {
       GenerateNewIceCredentials(*mSessionAns, *mAnswererTransport);
       mSessionAns->SetIceCredentials(mAnswererTransport->mIceUfrag,
                                      mAnswererTransport->mIcePwd);
     }
     nsresult rv = mSessionAns->CreateAnswer(options, &answer);
     EXPECT_EQ(NS_OK, rv);
 
     std::cerr << "ANSWER: " << answer << std::endl;
 
     ValidateTransport(*mAnswererTransport, answer);
+    CheckTransceiverInvariants(transceiversBefore,
+                               mSessionAns->GetTransceivers());
 
     return answer;
   }
 
   static const uint32_t NO_CHECKS = 0;
   static const uint32_t CHECK_SUCCESS = 1;
   static const uint32_t CHECK_TRACKS = 1 << 2;
   static const uint32_t ALL_CHECKS = CHECK_SUCCESS | CHECK_TRACKS;
@@ -576,154 +831,187 @@ protected:
     std::string answer = CreateAnswer();
     SetLocalAnswer(answer, checkFlags);
     SetRemoteAnswer(answer, checkFlags);
   }
 
   void
   SetLocalOffer(const std::string& offer, uint32_t checkFlags = ALL_CHECKS)
   {
+    std::vector<RefPtr<JsepTransceiver>> transceiversBefore =
+      DeepCopy(mSessionOff->GetTransceivers());
+
     nsresult rv = mSessionOff->SetLocalDescription(kJsepSdpOffer, offer);
 
+    CheckTransceiverInvariants(transceiversBefore,
+                               mSessionOff->GetTransceivers());
+
     if (checkFlags & CHECK_SUCCESS) {
       ASSERT_EQ(NS_OK, rv);
     }
 
     if (checkFlags & CHECK_TRACKS) {
-      // Check that the transports exist.
-      ASSERT_EQ(types.size(), mSessionOff->GetTransports().size());
-      auto tracks = mSessionOff->GetLocalTracks();
-      for (size_t i = 0; i < types.size(); ++i) {
-        ASSERT_NE("", tracks[i]->GetStreamId());
-        ASSERT_NE("", tracks[i]->GetTrackId());
-        if (tracks[i]->GetMediaType() != SdpMediaSection::kApplication) {
+      // This assumes no recvonly or inactive transceivers.
+      ASSERT_EQ(types.size(), mSessionOff->GetTransceivers().size());
+      for (const auto& transceiver : mSessionOff->GetTransceivers()) {
+        if (!transceiver->HasLevel()) {
+          continue;
+        }
+        const auto& track(transceiver->mSendTrack);
+        size_t level = transceiver->GetLevel();
+        ASSERT_FALSE(IsNull(track));
+        ASSERT_EQ(types[level], track.GetMediaType());
+        if (track.GetMediaType() != SdpMediaSection::kApplication) {
           std::string msidAttr("a=msid:");
-          msidAttr += tracks[i]->GetStreamId();
+          msidAttr += track.GetStreamIds()[0];
           msidAttr += " ";
-          msidAttr += tracks[i]->GetTrackId();
+          msidAttr += track.GetTrackId();
           ASSERT_NE(std::string::npos, offer.find(msidAttr))
             << "Did not find " << msidAttr << " in offer";
         }
       }
       if (types.size() == 1 &&
-          tracks[0]->GetMediaType() == SdpMediaSection::kApplication) {
+          types[0] == SdpMediaSection::kApplication) {
         ASSERT_EQ(std::string::npos, offer.find("a=ssrc"))
           << "Data channel should not contain SSRC";
       }
     }
   }
 
   void
   SetRemoteOffer(const std::string& offer, uint32_t checkFlags = ALL_CHECKS)
   {
+    std::vector<RefPtr<JsepTransceiver>> transceiversBefore =
+      DeepCopy(mSessionAns->GetTransceivers());
+
     nsresult rv = mSessionAns->SetRemoteDescription(kJsepSdpOffer, offer);
 
+    CheckTransceiverInvariants(transceiversBefore,
+                               mSessionAns->GetTransceivers());
+
     if (checkFlags & CHECK_SUCCESS) {
       ASSERT_EQ(NS_OK, rv);
     }
 
     if (checkFlags & CHECK_TRACKS) {
-      auto tracks = mSessionAns->GetRemoteTracks();
-      // Now verify that the right stuff is in the tracks.
-      ASSERT_EQ(types.size(), tracks.size());
-      for (size_t i = 0; i < tracks.size(); ++i) {
-        ASSERT_EQ(types[i], tracks[i]->GetMediaType());
-        ASSERT_NE("", tracks[i]->GetStreamId());
-        ASSERT_NE("", tracks[i]->GetTrackId());
-        if (tracks[i]->GetMediaType() != SdpMediaSection::kApplication) {
+      // This assumes no recvonly or inactive transceivers.
+      ASSERT_EQ(types.size(), mSessionAns->GetTransceivers().size());
+      for (const auto& transceiver : mSessionAns->GetTransceivers()) {
+        if (!transceiver->HasLevel()) {
+          continue;
+        }
+        const auto& track(transceiver->mRecvTrack);
+        size_t level = transceiver->GetLevel();
+        ASSERT_FALSE(IsNull(track));
+        ASSERT_EQ(types[level], track.GetMediaType());
+        if (track.GetMediaType() != SdpMediaSection::kApplication) {
           std::string msidAttr("a=msid:");
-          msidAttr += tracks[i]->GetStreamId();
+          msidAttr += track.GetStreamIds()[0];
           msidAttr += " ";
-          msidAttr += tracks[i]->GetTrackId();
+          msidAttr += track.GetTrackId();
           ASSERT_NE(std::string::npos, offer.find(msidAttr))
             << "Did not find " << msidAttr << " in offer";
         }
       }
     }
   }
 
   void
   SetLocalAnswer(const std::string& answer, uint32_t checkFlags = ALL_CHECKS)
   {
+    std::vector<RefPtr<JsepTransceiver>> transceiversBefore =
+      DeepCopy(mSessionAns->GetTransceivers());
+
     nsresult rv = mSessionAns->SetLocalDescription(kJsepSdpAnswer, answer);
     if (checkFlags & CHECK_SUCCESS) {
       ASSERT_EQ(NS_OK, rv);
     }
 
+    CheckTransceiverInvariants(transceiversBefore,
+                               mSessionAns->GetTransceivers());
+
     if (checkFlags & CHECK_TRACKS) {
       // Verify that the right stuff is in the tracks.
-      auto pairs = mSessionAns->GetNegotiatedTrackPairs();
-      ASSERT_EQ(types.size(), pairs.size());
-      for (size_t i = 0; i < types.size(); ++i) {
-        ASSERT_TRUE(pairs[i].mSending);
-        ASSERT_EQ(types[i], pairs[i].mSending->GetMediaType());
-        ASSERT_TRUE(pairs[i].mReceiving);
-        ASSERT_EQ(types[i], pairs[i].mReceiving->GetMediaType());
-        ASSERT_NE("", pairs[i].mSending->GetStreamId());
-        ASSERT_NE("", pairs[i].mSending->GetTrackId());
+      ASSERT_EQ(types.size(), mSessionAns->GetTransceivers().size());
+      for (const auto& transceiver : mSessionAns->GetTransceivers()) {
+        if (!transceiver->HasLevel()) {
+          continue;
+        }
+        const auto& sendTrack(transceiver->mSendTrack);
+        const auto& recvTrack(transceiver->mRecvTrack);
+        size_t level = transceiver->GetLevel();
+        ASSERT_FALSE(IsNull(sendTrack));
+        ASSERT_EQ(types[level], sendTrack.GetMediaType());
         // These might have been in the SDP, or might have been randomly
         // chosen by JsepSessionImpl
-        ASSERT_NE("", pairs[i].mReceiving->GetStreamId());
-        ASSERT_NE("", pairs[i].mReceiving->GetTrackId());
-
-        if (pairs[i].mReceiving->GetMediaType() != SdpMediaSection::kApplication) {
+        ASSERT_FALSE(IsNull(recvTrack));
+        ASSERT_EQ(types[level], recvTrack.GetMediaType());
+
+        if (recvTrack.GetMediaType() != SdpMediaSection::kApplication) {
           std::string msidAttr("a=msid:");
-          msidAttr += pairs[i].mSending->GetStreamId();
+          msidAttr += sendTrack.GetStreamIds()[0];
           msidAttr += " ";
-          msidAttr += pairs[i].mSending->GetTrackId();
+          msidAttr += sendTrack.GetTrackId();
           ASSERT_NE(std::string::npos, answer.find(msidAttr))
-            << "Did not find " << msidAttr << " in offer";
+            << "Did not find " << msidAttr << " in answer";
         }
       }
       if (types.size() == 1 &&
-          pairs[0].mReceiving->GetMediaType() == SdpMediaSection::kApplication) {
+          types[0] == SdpMediaSection::kApplication) {
         ASSERT_EQ(std::string::npos, answer.find("a=ssrc"))
           << "Data channel should not contain SSRC";
       }
     }
-    std::cerr << "OFFER pairs:" << std::endl;
-    DumpTrackPairs(*mSessionOff);
+    std::cerr << "Answerer transceivers:" << std::endl;
+    DumpTransceivers(*mSessionAns);
   }
 
   void
   SetRemoteAnswer(const std::string& answer, uint32_t checkFlags = ALL_CHECKS)
   {
+    std::vector<RefPtr<JsepTransceiver>> transceiversBefore =
+      DeepCopy(mSessionOff->GetTransceivers());
+
     nsresult rv = mSessionOff->SetRemoteDescription(kJsepSdpAnswer, answer);
     if (checkFlags & CHECK_SUCCESS) {
       ASSERT_EQ(NS_OK, rv);
     }
 
+    CheckTransceiverInvariants(transceiversBefore,
+                               mSessionOff->GetTransceivers());
+
     if (checkFlags & CHECK_TRACKS) {
       // Verify that the right stuff is in the tracks.
-      auto pairs = mSessionOff->GetNegotiatedTrackPairs();
-      ASSERT_EQ(types.size(), pairs.size());
-      for (size_t i = 0; i < types.size(); ++i) {
-        ASSERT_TRUE(pairs[i].mSending);
-        ASSERT_EQ(types[i], pairs[i].mSending->GetMediaType());
-        ASSERT_TRUE(pairs[i].mReceiving);
-        ASSERT_EQ(types[i], pairs[i].mReceiving->GetMediaType());
-        ASSERT_NE("", pairs[i].mSending->GetStreamId());
-        ASSERT_NE("", pairs[i].mSending->GetTrackId());
+      ASSERT_EQ(types.size(), mSessionOff->GetTransceivers().size());
+      for (const auto& transceiver : mSessionOff->GetTransceivers()) {
+        if (!transceiver->HasLevel()) {
+          continue;
+        }
+        const auto& sendTrack(transceiver->mSendTrack);
+        const auto& recvTrack(transceiver->mRecvTrack);
+        size_t level = transceiver->GetLevel();
+        ASSERT_FALSE(IsNull(sendTrack));
+        ASSERT_EQ(types[level], sendTrack.GetMediaType());
         // These might have been in the SDP, or might have been randomly
         // chosen by JsepSessionImpl
-        ASSERT_NE("", pairs[i].mReceiving->GetStreamId());
-        ASSERT_NE("", pairs[i].mReceiving->GetTrackId());
-
-        if (pairs[i].mReceiving->GetMediaType() != SdpMediaSection::kApplication) {
+        ASSERT_FALSE(IsNull(recvTrack));
+        ASSERT_EQ(types[level], recvTrack.GetMediaType());
+
+        if (recvTrack.GetMediaType() != SdpMediaSection::kApplication) {
           std::string msidAttr("a=msid:");
-          msidAttr += pairs[i].mReceiving->GetStreamId();
+          msidAttr += recvTrack.GetStreamIds()[0];
           msidAttr += " ";
-          msidAttr += pairs[i].mReceiving->GetTrackId();
+          msidAttr += recvTrack.GetTrackId();
           ASSERT_NE(std::string::npos, answer.find(msidAttr))
             << "Did not find " << msidAttr << " in answer";
         }
       }
     }
-    std::cerr << "ANSWER pairs:" << std::endl;
-    DumpTrackPairs(*mSessionAns);
+    std::cerr << "Offerer transceivers:" << std::endl;
+    DumpTransceivers(*mSessionOff);
   }
 
   typedef enum {
     RTP = 1,
     RTCP = 2
   } ComponentType;
 
   class CandidateSet {
@@ -963,23 +1251,22 @@ protected:
         << context << " (level " << msection.GetLevel() << ")";
     } else {
       ASSERT_FALSE(msection.GetAttributeList().HasAttribute(
             SdpAttribute::kEndOfCandidatesAttribute))
         << context << " (level " << msection.GetLevel() << ")";
     }
   }
 
-  void CheckPairs(const JsepSession& session, const std::string& context)
+  void CheckTransceiversAreBundled(const JsepSession& session,
+                                   const std::string& context)
   {
-    auto pairs = session.GetNegotiatedTrackPairs();
-
-    for (JsepTrackPair& pair : pairs) {
-      ASSERT_TRUE(pair.HasBundleLevel()) << context;
-      ASSERT_EQ(0U, pair.BundleLevel()) << context;
+    for (const auto& transceiver : session.GetTransceivers()) {
+      ASSERT_TRUE(transceiver->HasBundleLevel()) << context;
+      ASSERT_EQ(0U, transceiver->BundleLevel()) << context;
     }
   }
 
   void
   DisableMsid(std::string* sdp) const {
     size_t pos = sdp->find("a=msid-semantic");
     ASSERT_NE(std::string::npos, pos);
     (*sdp)[pos + 2] = 'X'; // garble, a=Xsid-semantic
@@ -1061,16 +1348,19 @@ protected:
     } else {
       // Not that we would have any test which tests this...
       ASSERT_EQ("19", msection->GetFormats()[0]);
       const SdpRtpmapAttributeList::Rtpmap* rtpmap(msection->FindRtpmap("19"));
       ASSERT_TRUE(rtpmap);
       ASSERT_EQ("19", rtpmap->pt);
       ASSERT_EQ("reserved", rtpmap->name);
     }
+
+    ASSERT_FALSE(msection->GetAttributeList().HasAttribute(
+          SdpAttribute::kMsidAttribute));
   }
 
   void
   ValidateSetupAttribute(const JsepSessionImpl& side,
                          const SdpSetupAttribute::Role expectedRole)
   {
     auto sdp = GetParsedLocalDescription(side);
     for (size_t i = 0; sdp && i < sdp->GetMediaSectionCount(); ++i) {
@@ -1081,17 +1371,22 @@ protected:
       }
     }
   }
 
   void
   DumpTrack(const JsepTrack& track)
   {
     const JsepTrackNegotiatedDetails* details = track.GetNegotiatedDetails();
-    std::cerr << "  type=" << track.GetMediaType() << std::endl;
+    std::cerr << "  type=" << track.GetMediaType() << " track-id="
+              << track.GetTrackId() << std::endl;
+    if (!details) {
+      std::cerr << "  not negotiated" << std::endl;
+      return;
+    }
     std::cerr << "  encodings=" << std::endl;
     for (size_t i = 0; i < details->GetEncodingCount(); ++i) {
       const JsepTrackEncoding& encoding = details->GetEncoding(i);
       std::cerr << "    id=" << encoding.mRid << std::endl;
       for (const JsepCodecDescription* codec : encoding.GetCodecs()) {
         std::cerr << "      " << codec->mName
                   << " enabled(" << (codec->mEnabled?"yes":"no") << ")";
         if (track.GetMediaType() == SdpMediaSection::kAudio) {
@@ -1100,28 +1395,36 @@ protected:
           std::cerr << " dtmf(" << (audioCodec->mDtmfEnabled?"yes":"no") << ")";
         }
         std::cerr << std::endl;
       }
     }
   }
 
   void
-  DumpTrackPairs(const JsepSessionImpl& session)
+  DumpTransceivers(const JsepSessionImpl& session)
   {
-    auto pairs = mSessionAns->GetNegotiatedTrackPairs();
-    for (auto i = pairs.begin(); i != pairs.end(); ++i) {
-      std::cerr << "Track pair " << i->mLevel << std::endl;
-      if (i->mSending) {
+    for (const auto& transceiver : mSessionAns->GetTransceivers()) {
+      std::cerr << "Transceiver ";
+      if (transceiver->HasLevel()) {
+        std::cerr << transceiver->GetLevel() << std::endl;
+      } else {
+        std::cerr << "<NO LEVEL>" << std::endl;
+      }
+      if (transceiver->HasBundleLevel()) {
+        std::cerr << "(bundle level is " << transceiver->BundleLevel() << ")"
+                  << std::endl;
+      }
+      if (!IsNull(transceiver->mSendTrack)) {
         std::cerr << "Sending-->" << std::endl;
-        DumpTrack(*i->mSending);
+        DumpTrack(transceiver->mSendTrack);
       }
-      if (i->mReceiving) {
+      if (!IsNull(transceiver->mRecvTrack)) {
         std::cerr << "Receiving-->" << std::endl;
-        DumpTrack(*i->mReceiving);
+        DumpTrack(transceiver->mRecvTrack);
       }
     }
   }
 
   UniquePtr<Sdp>
   Parse(const std::string& sdp) const
   {
     SipccSdpParser parser;
@@ -1333,34 +1636,36 @@ TEST_P(JsepSessionTest, RenegotiationNoC
 {
   AddTracks(*mSessionOff);
   std::string offer = CreateOffer();
   SetLocalOffer(offer);
   SetRemoteOffer(offer);
 
   auto added = mSessionAns->GetRemoteTracksAdded();
   auto removed = mSessionAns->GetRemoteTracksRemoved();
-  ASSERT_EQ(types.size(), added.size());
+  ASSERT_EQ(CountRtpTypes(), added.size());
   ASSERT_EQ(0U, removed.size());