Backed out 9 changesets (bug 1253740) for multiple xpcshell bustages
authorPhil Ringnalda <philringnalda@gmail.com>
Sat, 29 Oct 2016 20:58:58 -0700
changeset 320177 796ad37656451b7be62498a75723947223b5c38b
parent 320176 e8475fb54d7d81733d82738b541407fbb6dfaf3a
child 320178 6b80fdaeadfc9414049f96e949b31b5262bc8d67
push id20751
push userphilringnalda@gmail.com
push dateSun, 30 Oct 2016 18:06:35 +0000
treeherderfx-team@e3279760cd97 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
bugs1253740
milestone52.0a1
backs oute8475fb54d7d81733d82738b541407fbb6dfaf3a
5e234fe1099c7fa65103b25eb0ed4c710f872f09
486a200fd6f086bdb2f32fa044f87a09144d0be2
97a6ee1fddfced804aa7e516d3fc5ff4feb30a00
5cf17eb2fefecb0df2e275f1d1757b0c0b73354a
f890f42b44c4e9278778b5df9aab6ab286d3c5c2
28cc5db83380266fcb941fa2681fd076e5ee7a56
eb537ef54d55ce066a59696ceca7da2f6871acb7
8f100caf82bf582efcebfd7df8d53e07ce90672f
Backed out 9 changesets (bug 1253740) for multiple xpcshell bustages Backed out changeset e8475fb54d7d (bug 1253740) Backed out changeset 5e234fe1099c (bug 1253740) Backed out changeset 486a200fd6f0 (bug 1253740) Backed out changeset 97a6ee1fddfc (bug 1253740) Backed out changeset 5cf17eb2fefe (bug 1253740) Backed out changeset f890f42b44c4 (bug 1253740) Backed out changeset 28cc5db83380 (bug 1253740) Backed out changeset eb537ef54d55 (bug 1253740) Backed out changeset 8f100caf82bf (bug 1253740)
browser/app/profile/firefox.js
modules/libpref/init/all.js
services/sync/modules/engines/extension-storage.js
services/sync/modules/record.js
services/sync/modules/service.js
services/sync/modules/telemetry.js
services/sync/moz.build
services/sync/services-sync.js
services/sync/tests/unit/head_helpers.js
services/sync/tests/unit/test_extension_storage_crypto.js
services/sync/tests/unit/test_extension_storage_engine.js
services/sync/tests/unit/test_extension_storage_tracker.js
services/sync/tests/unit/test_load_modules.js
services/sync/tests/unit/test_records_crypto.js
services/sync/tests/unit/xpcshell.ini
toolkit/components/extensions/ExtensionStorageSync.jsm
toolkit/components/extensions/ext-c-storage.js
toolkit/components/extensions/ext-storage.js
toolkit/components/extensions/moz.build
toolkit/components/extensions/schemas/storage.json
toolkit/components/extensions/test/xpcshell/head_sync.js
toolkit/components/extensions/test/xpcshell/test_ext_storage.js
toolkit/components/extensions/test/xpcshell/test_ext_storage_sync.js
toolkit/components/extensions/test/xpcshell/xpcshell.ini
--- a/browser/app/profile/firefox.js
+++ b/browser/app/profile/firefox.js
@@ -1038,17 +1038,17 @@ pref("browser.taskbar.lists.enabled", tr
 pref("browser.taskbar.lists.frequent.enabled", true);
 pref("browser.taskbar.lists.recent.enabled", false);
 pref("browser.taskbar.lists.maxListItemCount", 7);
 pref("browser.taskbar.lists.tasks.enabled", true);
 pref("browser.taskbar.lists.refreshInSeconds", 120);
 #endif
 
 // The sync engines to use.
-pref("services.sync.registerEngines", "Bookmarks,Form,History,Password,Prefs,Tab,Addons,ExtensionStorage");
+pref("services.sync.registerEngines", "Bookmarks,Form,History,Password,Prefs,Tab,Addons");
 // Preferences to be synced by default
 pref("services.sync.prefs.sync.accessibility.blockautorefresh", true);
 pref("services.sync.prefs.sync.accessibility.browsewithcaret", true);
 pref("services.sync.prefs.sync.accessibility.typeaheadfind", true);
 pref("services.sync.prefs.sync.accessibility.typeaheadfind.linksonly", true);
 pref("services.sync.prefs.sync.addons.ignoreUserEnabledChanges", true);
 // The addons prefs related to repository verification are intentionally
 // not synced for security reasons. If a system is compromised, a user
--- a/modules/libpref/init/all.js
+++ b/modules/libpref/init/all.js
@@ -5455,24 +5455,16 @@ pref("toolkit.pageThumbs.screenSizeDivis
 pref("toolkit.pageThumbs.minWidth", 0);
 pref("toolkit.pageThumbs.minHeight", 0);
 
 pref("webextensions.tests", false);
 
 // 16MB default non-parseable upload limit for requestBody.raw.bytes
 pref("webextensions.webRequest.requestBodyMaxRawBytes", 16777216);
 
-// This functionality is still experimental
-pref("webextensions.storage.sync.enabled", false);
-#ifdef RELEASE_OR_BETA
-pref("webextensions.storage.sync.serverURL", "https://webextensions.settings.services.mozilla.com/v1");
-#else
-pref("webextensions.storage.sync.serverURL", "https://webextensions.dev.mozaws.net/v1");
-#endif
-
 // Allow customization of the fallback directory for file uploads
 pref("dom.input.fallbackUploadDir", "");
 
 // Turn rewriting of youtube embeds on/off
 pref("plugins.rewrite_youtube_embeds", true);
 
 // Don't hide Flash from navigator.plugins when it is click-to-activate
 pref("plugins.navigator_hide_disabled_flash", false);
deleted file mode 100644
--- a/services/sync/modules/engines/extension-storage.js
+++ /dev/null
@@ -1,272 +0,0 @@
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-"use strict";
-
-this.EXPORTED_SYMBOLS = ['ExtensionStorageEngine', 'EncryptionRemoteTransformer',
-                         'KeyRingEncryptionRemoteTransformer'];
-
-const {classes: Cc, interfaces: Ci, utils: Cu} = Components;
-
-Cu.import("resource://services-crypto/utils.js");
-Cu.import("resource://services-sync/constants.js");
-Cu.import("resource://services-sync/engines.js");
-Cu.import("resource://services-sync/keys.js");
-Cu.import("resource://services-sync/util.js");
-Cu.import("resource://services-common/async.js");
-XPCOMUtils.defineLazyModuleGetter(this, "ExtensionStorageSync",
-                                  "resource://gre/modules/ExtensionStorageSync.jsm");
-XPCOMUtils.defineLazyModuleGetter(this, "fxAccounts",
-                                  "resource://gre/modules/FxAccounts.jsm");
-XPCOMUtils.defineLazyModuleGetter(this, "Task",
-                                  "resource://gre/modules/Task.jsm");
-
-/**
- * The Engine that manages syncing for the web extension "storage"
- * API, and in particular ext.storage.sync.
- *
- * ext.storage.sync is implemented using Kinto, so it has mechanisms
- * for syncing that we do not need to integrate in the Firefox Sync
- * framework, so this is something of a stub.
- */
-this.ExtensionStorageEngine = function ExtensionStorageEngine(service) {
-  SyncEngine.call(this, "Extension-Storage", service);
-};
-ExtensionStorageEngine.prototype = {
-  __proto__: SyncEngine.prototype,
-  _trackerObj: ExtensionStorageTracker,
-  // we don't need these since we implement our own sync logic
-  _storeObj: undefined,
-  _recordObj: undefined,
-
-  syncPriority: 10,
-
-  _sync: function () {
-    return Async.promiseSpinningly(ExtensionStorageSync.syncAll());
-  },
-
-  get enabled() {
-    // By default, we sync extension storage if we sync addons. This
-    // lets us simplify the UX since users probably don't consider
-    // "extension preferences" a separate category of syncing.
-    // However, we also respect engine.extension-storage.force, which
-    // can be set to true or false, if a power user wants to customize
-    // the behavior despite the lack of UI.
-    const forced = Svc.Prefs.get("engine." + this.prefName + ".force", undefined);
-    if (forced !== undefined) {
-      return forced;
-    }
-    return Svc.Prefs.get("engine.addons", false);
-  },
-};
-
-function ExtensionStorageTracker(name, engine) {
-  Tracker.call(this, name, engine);
-}
-ExtensionStorageTracker.prototype = {
-  __proto__: Tracker.prototype,
-
-  startTracking: function () {
-    Svc.Obs.add("ext.storage.sync-changed", this);
-  },
-
-  stopTracking: function () {
-    Svc.Obs.remove("ext.storage.sync-changed", this);
-  },
-
-  observe: function (subject, topic, data) {
-    Tracker.prototype.observe.call(this, subject, topic, data);
-
-    if (this.ignoreAll) {
-      return;
-    }
-
-    if (topic !== "ext.storage.sync-changed") {
-      return;
-    }
-
-    // Single adds, removes and changes are not so important on their
-    // own, so let's just increment score a bit.
-    this.score += SCORE_INCREMENT_MEDIUM;
-  },
-
-  // Override a bunch of methods which don't do anything for us.
-  // This is a performance hack.
-  saveChangedIDs: function() {
-  },
-  loadChangedIDs: function() {
-  },
-  ignoreID: function() {
-  },
-  unignoreID: function() {
-  },
-  addChangedID: function() {
-  },
-  removeChangedID: function() {
-  },
-  clearChangedIDs: function() {
-  },
-};
-
-/**
- * Utility function to enforce an order of fields when computing an HMAC.
- */
-function ciphertextHMAC(keyBundle, id, IV, ciphertext) {
-  const hasher = keyBundle.sha256HMACHasher;
-  return Utils.bytesAsHex(Utils.digestUTF8(id + IV + ciphertext, hasher));
-}
-
-/**
- * A "remote transformer" that the Kinto library will use to
- * encrypt/decrypt records when syncing.
- *
- * This is an "abstract base class". Subclass this and override
- * getKeys() to use it.
- */
-class EncryptionRemoteTransformer {
-  encode(record) {
-    const self = this;
-    return Task.spawn(function* () {
-      const keyBundle = yield self.getKeys();
-      if (record.ciphertext) {
-        throw new Error("Attempt to reencrypt??");
-      }
-      let id = record.id;
-      if (!record.id) {
-        throw new Error("Record ID is missing or invalid");
-      }
-
-      let IV = Svc.Crypto.generateRandomIV();
-      let ciphertext = Svc.Crypto.encrypt(JSON.stringify(record),
-                                          keyBundle.encryptionKeyB64, IV);
-      let hmac = ciphertextHMAC(keyBundle, id, IV, ciphertext);
-      const encryptedResult = {ciphertext, IV, hmac, id};
-      if (record.hasOwnProperty("last_modified")) {
-        encryptedResult.last_modified = record.last_modified;
-      }
-      return encryptedResult;
-    });
-  }
-
-  decode(record) {
-    const self = this;
-    return Task.spawn(function* () {
-      const keyBundle = yield self.getKeys();
-      if (!record.ciphertext) {
-        throw new Error("No ciphertext: nothing to decrypt?");
-      }
-      // Authenticate the encrypted blob with the expected HMAC
-      let computedHMAC = ciphertextHMAC(keyBundle, record.id, record.IV, record.ciphertext);
-
-      if (computedHMAC != record.hmac) {
-        Utils.throwHMACMismatch(record.hmac, computedHMAC);
-      }
-
-      // Handle invalid data here. Elsewhere we assume that cleartext is an object.
-      let cleartext = Svc.Crypto.decrypt(record.ciphertext,
-                                         keyBundle.encryptionKeyB64, record.IV);
-      let jsonResult = JSON.parse(cleartext);
-      if (!jsonResult || typeof jsonResult !== "object") {
-        throw new Error("Decryption failed: result is <" + jsonResult + ">, not an object.");
-      }
-
-      // Verify that the encrypted id matches the requested record's id.
-      // This should always be true, because we compute the HMAC over
-      // the original record's ID, and that was verified already (above).
-      if (jsonResult.id != record.id) {
-        throw new Error("Record id mismatch: " + jsonResult.id + " != " + record.id);
-      }
-
-      if (record.hasOwnProperty("last_modified")) {
-        jsonResult.last_modified = record.last_modified;
-      }
-
-      return jsonResult;
-    });
-  }
-
-  /**
-   * Retrieve keys to use during encryption.
-   *
-   * Returns a Promise<KeyBundle>.
-   */
-  getKeys() {
-    throw new Error("override getKeys in a subclass");
-  }
-}
-// You can inject this
-EncryptionRemoteTransformer.prototype._fxaService = fxAccounts;
-
-/**
- * An EncryptionRemoteTransformer that provides a keybundle derived
- * from the user's kB, suitable for encrypting a keyring.
- */
-class KeyRingEncryptionRemoteTransformer extends EncryptionRemoteTransformer {
-  getKeys() {
-    const self = this;
-    return Task.spawn(function* () {
-      const user = yield self._fxaService.getSignedInUser();
-      // FIXME: we should permit this if the user is self-hosting
-      // their storage
-      if (!user) {
-        throw new Error("user isn't signed in to FxA; can't sync");
-      }
-
-      if (!user.kB) {
-        throw new Error("user doesn't have kB");
-      }
-
-      let kB = Utils.hexToBytes(user.kB);
-
-      let keyMaterial = CryptoUtils.hkdf(kB, undefined,
-                                       "identity.mozilla.com/picl/v1/chrome.storage.sync", 2*32);
-      let bundle = new BulkKeyBundle();
-      // [encryptionKey, hmacKey]
-      bundle.keyPair = [keyMaterial.slice(0, 32), keyMaterial.slice(32, 64)];
-      return bundle;
-    });
-  }
-  // Pass through the kbHash field from the unencrypted record. If
-  // encryption fails, we can use this to try to detect whether we are
-  // being compromised or if the record here was encoded with a
-  // different kB.
-  encode(record) {
-    const encodePromise = super.encode(record);
-    return Task.spawn(function* () {
-      const encoded = yield encodePromise;
-      encoded.kbHash = record.kbHash;
-      return encoded;
-    });
-  }
-
-  decode(record) {
-    const decodePromise = super.decode(record);
-    return Task.spawn(function* () {
-      try {
-        return yield decodePromise;
-      } catch (e) {
-        if (Utils.isHMACMismatch(e)) {
-          const currentKBHash = yield ExtensionStorageSync.getKBHash();
-          if (record.kbHash != currentKBHash) {
-            // Some other client encoded this with a kB that we don't
-            // have access to.
-            KeyRingEncryptionRemoteTransformer.throwOutdatedKB(currentKBHash, record.kbHash);
-          }
-        }
-        throw e;
-      }
-    });
-  }
-
-  // Generator and discriminator for KB-is-outdated exceptions.
-  static throwOutdatedKB(shouldBe, is) {
-    throw new Error(`kB hash on record is outdated: should be ${shouldBe}, is ${is}`);
-  }
-
-  static isOutdatedKB(exc) {
-    const kbMessage = "kB hash on record is outdated: ";
-    return exc && exc.message && exc.message.indexOf &&
-      (exc.message.indexOf(kbMessage) == 0);
-  }
-}
--- a/services/sync/modules/record.js
+++ b/services/sync/modules/record.js
@@ -276,41 +276,28 @@ RecordManager.prototype = {
 };
 
 /**
  * Keeps track of mappings between collection names ('tabs') and KeyBundles.
  *
  * You can update this thing simply by giving it /info/collections. It'll
  * use the last modified time to bring itself up to date.
  */
-this.CollectionKeyManager = function CollectionKeyManager(lastModified, default_, collections) {
-  this.lastModified = lastModified || 0;
-  this._default = default_ || null;
-  this._collections = collections || {};
+this.CollectionKeyManager = function CollectionKeyManager() {
+  this.lastModified = 0;
+  this._collections = {};
+  this._default = null;
 
   this._log = Log.repository.getLogger("Sync.CollectionKeyManager");
 }
 
 // TODO: persist this locally as an Identity. Bug 610913.
 // Note that the last modified time needs to be preserved.
 CollectionKeyManager.prototype = {
 
-  /**
-   * Generate a new CollectionKeyManager that has the same attributes
-   * as this one.
-   */
-  clone() {
-    const newCollections = {};
-    for (let c in this._collections) {
-      newCollections[c] = this._collections[c];
-    }
-
-    return new CollectionKeyManager(this.lastModified, this._default, newCollections);
-  },
-
   // Return information about old vs new keys:
   // * same: true if two collections are equal
   // * changed: an array of collection names that changed.
   _compareKeyBundleCollections: function _compareKeyBundleCollections(m1, m2) {
     let changed = [];
 
     function process(m1, m2) {
       for (let k1 in m1) {
@@ -377,91 +364,41 @@ CollectionKeyManager.prototype = {
   asWBO: function(collection, id) {
     return this._makeWBO(this._collections, this._default);
   },
 
   /**
    * Compute a new default key, and new keys for any specified collections.
    */
   newKeys: function(collections) {
-    let newDefaultKeyBundle = this.newDefaultKeyBundle();
+    let newDefaultKey = new BulkKeyBundle(DEFAULT_KEYBUNDLE_NAME);
+    newDefaultKey.generateRandom();
 
     let newColls = {};
     if (collections) {
       collections.forEach(function (c) {
         let b = new BulkKeyBundle(c);
         b.generateRandom();
         newColls[c] = b;
       });
     }
-    return [newDefaultKeyBundle, newColls];
+    return [newDefaultKey, newColls];
   },
 
   /**
    * Generates new keys, but does not replace our local copy. Use this to
    * verify an upload before storing.
    */
   generateNewKeysWBO: function(collections) {
     let newDefaultKey, newColls;
     [newDefaultKey, newColls] = this.newKeys(collections);
 
     return this._makeWBO(newColls, newDefaultKey);
   },
 
-  /**
-   * Create a new default key.
-   *
-   * @returns {BulkKeyBundle}
-   */
-  newDefaultKeyBundle() {
-    const key = new BulkKeyBundle(DEFAULT_KEYBUNDLE_NAME);
-    key.generateRandom();
-    return key;
-  },
-
-  /**
-   * Create a new default key and store it as this._default, since without one you cannot use setContents.
-   */
-  generateDefaultKey() {
-    this._default = this.newDefaultKeyBundle();
-  },
-
-  /**
-   * Return true if keys are already present for each of the given
-   * collections.
-   */
-  hasKeysFor(collections) {
-    // We can't use filter() here because sometimes collections is an iterator.
-    for (let collection of collections) {
-      if (!this._collections[collection]) {
-        return false;
-      }
-    }
-    return true;
-  },
-
-  /**
-   * Return a new CollectionKeyManager that has keys for each of the
-   * given collections (creating new ones for collections where we
-   * don't already have keys).
-   */
-  ensureKeysFor(collections) {
-    const newKeys = Object.assign({}, this._collections);
-    for (let c of collections) {
-      if (newKeys[c]) {
-        continue;  // don't replace existing keys
-      }
-
-      const b = new BulkKeyBundle(c);
-      b.generateRandom();
-      newKeys[c] = b;
-    }
-    return new CollectionKeyManager(this.lastModified, this._default, newKeys);
-  },
-
   // Take the fetched info/collections WBO, checking the change
   // time of the crypto collection.
   updateNeeded: function(info_collections) {
 
     this._log.info("Testing for updateNeeded. Last modified: " + this.lastModified);
 
     // No local record of modification time? Need an update.
     if (!this.lastModified)
@@ -482,16 +419,19 @@ CollectionKeyManager.prototype = {
   //
   // * If the default key was modified, return true.
   // * If the default key was not modified, but per-collection keys were,
   //   return an array of such.
   // * Otherwise, return false -- we were up-to-date.
   //
   setContents: function setContents(payload, modified) {
 
+    if (!modified)
+      throw "No modified time provided to setContents.";
+
     let self = this;
 
     this._log.info("Setting collection keys contents. Our last modified: " +
                    this.lastModified + ", input modified: " + modified + ".");
 
     if (!payload)
       throw "No payload in CollectionKeyManager.setContents().";
 
@@ -511,47 +451,44 @@ CollectionKeyManager.prototype = {
     if ("collections" in payload) {
       this._log.info("Processing downloaded per-collection keys.");
       let colls = payload.collections;
       for (let k in colls) {
         let v = colls[k];
         if (v) {
           let keyObj = new BulkKeyBundle(k);
           keyObj.keyPairB64 = v;
-          newCollections[k] = keyObj;
+          if (keyObj) {
+            newCollections[k] = keyObj;
+          }
         }
       }
     }
 
     // Check to see if these are already our keys.
     let sameDefault = (this._default && this._default.equals(newDefault));
     let collComparison = this._compareKeyBundleCollections(newCollections, this._collections);
     let sameColls = collComparison.same;
 
     if (sameDefault && sameColls) {
-      self._log.info("New keys are the same as our old keys!");
-      if (modified) {
-        self._log.info("Bumped local modified time.");
-        self.lastModified = modified;
-      }
+      self._log.info("New keys are the same as our old keys! Bumped local modified time.");
+      self.lastModified = modified;
       return false;
     }
 
     // Make sure things are nice and tidy before we set.
     this.clear();
 
     this._log.info("Saving downloaded keys.");
     this._default     = newDefault;
     this._collections = newCollections;
 
     // Always trust the server.
-    if (modified) {
-      self._log.info("Bumping last modified to " + modified);
-      self.lastModified = modified;
-    }
+    self._log.info("Bumping last modified to " + modified);
+    self.lastModified = modified;
 
     return sameDefault ? collComparison.changed : true;
   },
 
   updateContents: function updateContents(syncKeyBundle, storage_keys) {
     let log = this._log;
     log.info("Updating collection keys...");
 
--- a/services/sync/modules/service.js
+++ b/services/sync/modules/service.js
@@ -39,17 +39,16 @@ Cu.import("resource://services-sync/util
 const ENGINE_MODULES = {
   Addons: "addons.js",
   Bookmarks: "bookmarks.js",
   Form: "forms.js",
   History: "history.js",
   Password: "passwords.js",
   Prefs: "prefs.js",
   Tab: "tabs.js",
-  ExtensionStorage: "extension-storage.js",
 };
 
 const STORAGE_INFO_TYPES = [INFO_COLLECTIONS,
                             INFO_COLLECTION_USAGE,
                             INFO_COLLECTION_COUNTS,
                             INFO_QUOTA];
 
 function Sync11Service() {
--- a/services/sync/modules/telemetry.js
+++ b/services/sync/modules/telemetry.js
@@ -46,17 +46,17 @@ const TOPICS = [
   "weave:engine:validate:finish",
   "weave:engine:validate:error",
 ];
 
 const PING_FORMAT_VERSION = 1;
 
 // The set of engines we record telemetry for - any other engines are ignored.
 const ENGINES = new Set(["addons", "bookmarks", "clients", "forms", "history",
-                         "passwords", "prefs", "tabs", "extension-storage"]);
+                         "passwords", "prefs", "tabs"]);
 
 // A regex we can use to replace the profile dir in error messages. We use a
 // regexp so we can simply replace all case-insensitive occurences.
 // This escaping function is from:
 // https://developer.mozilla.org/en/docs/Web/JavaScript/Guide/Regular_Expressions
 const reProfileDir = new RegExp(
         OS.Constants.Path.profileDir.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"),
         "gi");
--- a/services/sync/moz.build
+++ b/services/sync/moz.build
@@ -47,17 +47,16 @@ EXTRA_PP_JS_MODULES['services-sync'] += 
 # Definitions used by constants.js
 DEFINES['weave_version'] = '1.54.0'
 DEFINES['weave_id'] = '{340c2bbc-ce74-4362-90b5-7c26312808ef}'
 
 EXTRA_JS_MODULES['services-sync'].engines += [
     'modules/engines/addons.js',
     'modules/engines/bookmarks.js',
     'modules/engines/clients.js',
-    'modules/engines/extension-storage.js',
     'modules/engines/forms.js',
     'modules/engines/history.js',
     'modules/engines/passwords.js',
     'modules/engines/prefs.js',
     'modules/engines/tabs.js',
 ]
 
 EXTRA_JS_MODULES['services-sync'].stages += [
--- a/services/sync/services-sync.js
+++ b/services/sync/services-sync.js
@@ -63,17 +63,16 @@ pref("services.sync.log.logger.service.j
 pref("services.sync.log.logger.engine.bookmarks", "Debug");
 pref("services.sync.log.logger.engine.clients", "Debug");
 pref("services.sync.log.logger.engine.forms", "Debug");
 pref("services.sync.log.logger.engine.history", "Debug");
 pref("services.sync.log.logger.engine.passwords", "Debug");
 pref("services.sync.log.logger.engine.prefs", "Debug");
 pref("services.sync.log.logger.engine.tabs", "Debug");
 pref("services.sync.log.logger.engine.addons", "Debug");
-pref("services.sync.log.logger.engine.extension-storage", "Debug");
 pref("services.sync.log.logger.engine.apps", "Debug");
 pref("services.sync.log.logger.identity", "Debug");
 pref("services.sync.log.logger.userapi", "Debug");
 pref("services.sync.log.cryptoDebug", false);
 
 pref("services.sync.fxa.termsURL", "https://accounts.firefox.com/legal/terms");
 pref("services.sync.fxa.privacyURL", "https://accounts.firefox.com/legal/privacy");
 
--- a/services/sync/tests/unit/head_helpers.js
+++ b/services/sync/tests/unit/head_helpers.js
@@ -71,34 +71,16 @@ function ExtensionsTestPath(path) {
 function loadAddonTestFunctions() {
   const path = ExtensionsTestPath("/head_addons.js");
   let file = do_get_file(path);
   let uri = Services.io.newFileURI(file);
   Services.scriptloader.loadSubScript(uri.spec, gGlobalScope);
   createAppInfo("xpcshell@tests.mozilla.org", "XPCShell", "1", "1.9.2");
 }
 
-function webExtensionsTestPath(path) {
-  if (path[0] != "/") {
-    throw Error("Path must begin with '/': " + path);
-  }
-
-  return "../../../../toolkit/components/extensions/test/xpcshell" + path;
-}
-
-/**
- * Loads the WebExtension test functions by importing its test file.
- */
-function loadWebExtensionTestFunctions() {
-  const path = webExtensionsTestPath("/head_sync.js");
-  let file = do_get_file(path);
-  let uri = Services.io.newFileURI(file);
-  Services.scriptloader.loadSubScript(uri.spec, gGlobalScope);
-}
-
 function getAddonInstall(name) {
   let f = do_get_file(ExtensionsTestPath("/addons/" + name + ".xpi"));
   let cb = Async.makeSyncCallback();
   AddonManager.getInstallForFile(f, cb);
 
   return Async.waitForSyncCallback(cb);
 }
 
deleted file mode 100644
--- a/services/sync/tests/unit/test_extension_storage_crypto.js
+++ /dev/null
@@ -1,93 +0,0 @@
-/* Any copyright is dedicated to the Public Domain.
- * http://creativecommons.org/publicdomain/zero/1.0/ */
-
-"use strict";
-
-Cu.import("resource://services-crypto/utils.js");
-Cu.import("resource://services-sync/engines/extension-storage.js");
-Cu.import("resource://services-sync/util.js");
-
-/**
- * Like Assert.throws, but for generators.
- *
- * @param {string | Object | function} constraint
- *        What to use to check the exception.
- * @param {function} f
- *        The function to call.
- */
-function* throwsGen(constraint, f) {
-  let threw = false;
-  let exception;
-  try {
-    yield* f();
-  }
-  catch (e) {
-    threw = true;
-    exception = e;
-  }
-
-  ok(threw, "did not throw an exception");
-
-  const debuggingMessage = `got ${exception}, expected ${constraint}`;
-  let message = exception;
-  if (typeof exception === "object") {
-    message = exception.message;
-  }
-
-  if (typeof constraint === "function") {
-    ok(constraint(message), debuggingMessage);
-  } else {
-    ok(constraint === message, debuggingMessage);
-  }
-
-}
-
-/**
- * An EncryptionRemoteTransformer that uses a fixed key bundle,
- * suitable for testing.
- */
-class StaticKeyEncryptionRemoteTransformer extends EncryptionRemoteTransformer {
-  constructor(keyBundle) {
-    super();
-    this.keyBundle = keyBundle;
-  }
-
-  getKeys() {
-    return Promise.resolve(this.keyBundle);
-  }
-}
-const BORING_KB = "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef";
-const STRETCHED_KEY = CryptoUtils.hkdf(BORING_KB, undefined, `testing storage.sync encryption`, 2*32);
-const KEY_BUNDLE = {
-  sha256HMACHasher: Utils.makeHMACHasher(Ci.nsICryptoHMAC.SHA256, Utils.makeHMACKey(STRETCHED_KEY.slice(0, 32))),
-  encryptionKeyB64: btoa(STRETCHED_KEY.slice(32, 64)),
-};
-const transformer = new StaticKeyEncryptionRemoteTransformer(KEY_BUNDLE);
-
-add_task(function* test_encryption_transformer_roundtrip() {
-  const POSSIBLE_DATAS = [
-    "string",
-    2,          // number
-    [1, 2, 3],  // array
-    {key: "value"}, // object
-  ];
-
-  for (let data of POSSIBLE_DATAS) {
-    const record = {data: data, id: "key-some_2D_key", key: "some-key"};
-
-    deepEqual(record, yield transformer.decode(yield transformer.encode(record)));
-  }
-});
-
-add_task(function* test_refuses_to_decrypt_tampered() {
-  const encryptedRecord = yield transformer.encode({data: [1, 2, 3], id: "key-some_2D_key", key: "some-key"});
-  const tamperedHMAC = Object.assign({}, encryptedRecord, {hmac: "0000000000000000000000000000000000000000000000000000000000000001"});
-  yield* throwsGen(Utils.isHMACMismatch, function*() {
-    yield transformer.decode(tamperedHMAC);
-  });
-
-  const tamperedIV = Object.assign({}, encryptedRecord, {IV: "aaaaaaaaaaaaaaaaaaaaaa=="});
-  yield* throwsGen(Utils.isHMACMismatch, function*() {
-    yield transformer.decode(tamperedIV);
-  });
-});
deleted file mode 100644
--- a/services/sync/tests/unit/test_extension_storage_engine.js
+++ /dev/null
@@ -1,62 +0,0 @@
-/* Any copyright is dedicated to the Public Domain.
- * http://creativecommons.org/publicdomain/zero/1.0/ */
-
-"use strict";
-
-Cu.import("resource://services-sync/engines.js");
-Cu.import("resource://services-sync/engines/extension-storage.js");
-Cu.import("resource://services-sync/service.js");
-Cu.import("resource://services-sync/util.js");
-Cu.import("resource://testing-common/services/sync/utils.js");
-Cu.import("resource://gre/modules/ExtensionStorageSync.jsm");
-
-Service.engineManager.register(ExtensionStorageEngine);
-const engine = Service.engineManager.get("extension-storage");
-do_get_profile();   // so we can use FxAccounts
-loadWebExtensionTestFunctions();
-
-function mock(options) {
-  let calls = [];
-  let ret = function() {
-    calls.push(arguments);
-    return options.returns;
-  }
-  Object.setPrototypeOf(ret, {
-    __proto__: Function.prototype,
-    get calls() {
-      return calls;
-    }
-  });
-  return ret;
-}
-
-add_task(function* test_calling_sync_calls__sync() {
-  let oldSync = ExtensionStorageEngine.prototype._sync;
-  let syncMock = ExtensionStorageEngine.prototype._sync = mock({returns: true});
-  try {
-    // I wanted to call the main sync entry point for the entire
-    // package, but that fails because it tries to sync ClientEngine
-    // first, which fails.
-    yield engine.sync();
-  } finally {
-    ExtensionStorageEngine.prototype._sync = oldSync;
-  }
-  equal(syncMock.calls.length, 1);
-});
-
-add_task(function* test_calling_sync_calls_ext_storage_sync() {
-  const extension = {id: "my-extension"};
-  let oldSync = ExtensionStorageSync.syncAll;
-  let syncMock = ExtensionStorageSync.syncAll = mock({returns: Promise.resolve()});
-  try {
-    yield* withSyncContext(function* (context) {
-      // Set something so that everyone knows that we're using storage.sync
-      yield ExtensionStorageSync.set(extension, {"a": "b"}, context);
-
-      yield engine._sync();
-    });
-  } finally {
-    ExtensionStorageSync.syncAll = oldSync;
-  }
-  do_check_true(syncMock.calls.length >= 1);
-});
deleted file mode 100644
--- a/services/sync/tests/unit/test_extension_storage_tracker.js
+++ /dev/null
@@ -1,38 +0,0 @@
-/* Any copyright is dedicated to the Public Domain.
- * http://creativecommons.org/publicdomain/zero/1.0/ */
-
-"use strict";
-
-Cu.import("resource://services-sync/constants.js");
-Cu.import("resource://services-sync/engines.js");
-Cu.import("resource://services-sync/engines/extension-storage.js");
-Cu.import("resource://services-sync/service.js");
-Cu.import("resource://services-sync/util.js");
-Cu.import("resource://gre/modules/ExtensionStorageSync.jsm");
-
-Service.engineManager.register(ExtensionStorageEngine);
-const engine = Service.engineManager.get("extension-storage");
-do_get_profile();   // so we can use FxAccounts
-loadWebExtensionTestFunctions();
-
-add_task(function* test_changing_extension_storage_changes_score() {
-  const tracker = engine._tracker;
-  const extension = {id: "my-extension-id"};
-  Svc.Obs.notify("weave:engine:start-tracking");
-  yield* withSyncContext(function*(context) {
-    yield ExtensionStorageSync.set(extension, {"a": "b"}, context);
-  });
-  do_check_eq(tracker.score, SCORE_INCREMENT_MEDIUM);
-
-  tracker.resetScore();
-  yield* withSyncContext(function*(context) {
-    yield ExtensionStorageSync.remove(extension, "a", context);
-  });
-  do_check_eq(tracker.score, SCORE_INCREMENT_MEDIUM);
-
-  Svc.Obs.notify("weave:engine:stop-tracking");
-});
-
-function run_test() {
-  run_next_test();
-}
--- a/services/sync/tests/unit/test_load_modules.js
+++ b/services/sync/tests/unit/test_load_modules.js
@@ -4,17 +4,16 @@
 const modules = [
   "addonutils.js",
   "addonsreconciler.js",
   "browserid_identity.js",
   "constants.js",
   "engines/addons.js",
   "engines/bookmarks.js",
   "engines/clients.js",
-  "engines/extension-storage.js",
   "engines/forms.js",
   "engines/history.js",
   "engines/passwords.js",
   "engines/prefs.js",
   "engines/tabs.js",
   "engines.js",
   "identity.js",
   "jpakeclient.js",
--- a/services/sync/tests/unit/test_records_crypto.js
+++ b/services/sync/tests/unit/test_records_crypto.js
@@ -143,40 +143,14 @@ function run_test() {
     }
     do_check_eq("Record SHA256 HMAC mismatch", err.substr(0, 27));
 
     // Explicitly check that it's using the bookmarks key.
     // This should succeed.
     do_check_eq(bookmarkItem.decrypt(Service.collectionKeys.keyForCollection("bookmarks")).stuff,
         "my payload here");
 
-    do_check_true(Service.collectionKeys.hasKeysFor(["bookmarks"]));
-
-    // Add a key for some new collection and verify that it isn't the
-    // default key.
-    do_check_false(Service.collectionKeys.hasKeysFor(["forms"]));
-    do_check_false(Service.collectionKeys.hasKeysFor(["bookmarks", "forms"]));
-    let oldFormsKey = Service.collectionKeys.keyForCollection("forms");
-    do_check_eq(oldFormsKey, Service.collectionKeys._default);
-    let newKeys = Service.collectionKeys.ensureKeysFor(["forms"]);
-    do_check_true(newKeys.hasKeysFor(["forms"]));
-    do_check_true(newKeys.hasKeysFor(["bookmarks", "forms"]));
-    let newFormsKey = newKeys.keyForCollection("forms");
-    do_check_neq(newFormsKey, oldFormsKey);
-
-    // Verify that this doesn't overwrite keys
-    let regetKeys = newKeys.ensureKeysFor(["forms"]);
-    do_check_eq(regetKeys.keyForCollection("forms"), newFormsKey);
-
-    const emptyKeys = new CollectionKeyManager();
-    payload = {
-      default: Service.collectionKeys._default.keyPairB64,
-      collections: {}
-    };
-    // Verify that not passing `modified` doesn't throw
-    emptyKeys.setContents(payload, null);
-
     log.info("Done!");
   }
   finally {
     server.stop(do_test_finished);
   }
 }
--- a/services/sync/tests/unit/xpcshell.ini
+++ b/services/sync/tests/unit/xpcshell.ini
@@ -10,17 +10,16 @@ support-files =
   missing-sourceuri.xml
   missing-xpi-search.xml
   places_v10_from_v11.sqlite
   rewrite-search.xml
   sync_ping_schema.json
   systemaddon-search.xml
   !/services/common/tests/unit/head_helpers.js
   !/toolkit/mozapps/extensions/test/xpcshell/head_addons.js
-  !/toolkit/components/extensions/test/xpcshell/head_sync.js
 
 # The manifest is roughly ordered from low-level to high-level. When making
 # systemic sweeping changes, this makes it easier to identify errors closer to
 # the source.
 
 # Ensure we can import everything.
 [test_load_modules.js]
 
@@ -158,19 +157,16 @@ tags = addons
 [test_bookmark_smart_bookmarks.js]
 [test_bookmark_store.js]
 # Too many intermittent "ASSERTION: thread pool wasn't shutdown: '!mPool'" (bug 804479)
 skip-if = debug
 [test_bookmark_tracker.js]
 [test_bookmark_validator.js]
 [test_clients_engine.js]
 [test_clients_escape.js]
-[test_extension_storage_crypto.js]
-[test_extension_storage_engine.js]
-[test_extension_storage_tracker.js]
 [test_forms_store.js]
 [test_forms_tracker.js]
 # Too many intermittent "ASSERTION: thread pool wasn't shutdown: '!mPool'" (bug 804479)
 skip-if = debug
 [test_history_engine.js]
 [test_history_store.js]
 [test_history_tracker.js]
 # Too many intermittent "ASSERTION: thread pool wasn't shutdown: '!mPool'" (bug 804479)
deleted file mode 100644
--- a/toolkit/components/extensions/ExtensionStorageSync.jsm
+++ /dev/null
@@ -1,909 +0,0 @@
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-// TODO:
-// * find out how the Chrome implementation deals with conflicts
-
-"use strict";
-
-/* exported extensionIdToCollectionId */
-
-this.EXPORTED_SYMBOLS = ["ExtensionStorageSync"];
-
-const Ci = Components.interfaces;
-const Cc = Components.classes;
-const Cu = Components.utils;
-const Cr = Components.results;
-const global = this;
-
-Cu.import("resource://gre/modules/AppConstants.jsm");
-const KINTO_PROD_SERVER_URL = "https://webextensions.settings.services.mozilla.com/v1";
-const KINTO_DEV_SERVER_URL = "https://webextensions.dev.mozaws.net/v1";
-const KINTO_DEFAULT_SERVER_URL = AppConstants.RELEASE_OR_BETA ? KINTO_PROD_SERVER_URL : KINTO_DEV_SERVER_URL;
-
-const STORAGE_SYNC_ENABLED_PREF = "webextensions.storage.sync.enabled";
-const STORAGE_SYNC_SERVER_URL_PREF = "webextensions.storage.sync.serverURL";
-const STORAGE_SYNC_SCOPE = "sync:addon_storage";
-const STORAGE_SYNC_CRYPTO_COLLECTION_NAME = "storage-sync-crypto";
-const STORAGE_SYNC_CRYPTO_KEYRING_RECORD_ID = "keys";
-const FXA_OAUTH_OPTIONS = {
-  scope: STORAGE_SYNC_SCOPE,
-};
-// Default is 5sec, which seems a bit aggressive on the open internet
-const KINTO_REQUEST_TIMEOUT = 30000;
-
-Cu.import("resource://gre/modules/XPCOMUtils.jsm");
-const {
-  runSafeSyncWithoutClone,
-} = Cu.import("resource://gre/modules/ExtensionUtils.jsm");
-
-XPCOMUtils.defineLazyModuleGetter(this, "AppsUtils",
-                                  "resource://gre/modules/AppsUtils.jsm");
-XPCOMUtils.defineLazyModuleGetter(this, "CollectionKeyManager",
-                                  "resource://services-sync/record.js");
-XPCOMUtils.defineLazyModuleGetter(this, "CommonUtils",
-                                  "resource://services-common/utils.js");
-XPCOMUtils.defineLazyModuleGetter(this, "CryptoUtils",
-                                  "resource://services-crypto/utils.js");
-XPCOMUtils.defineLazyModuleGetter(this, "EncryptionRemoteTransformer",
-                                  "resource://services-sync/engines/extension-storage.js");
-XPCOMUtils.defineLazyModuleGetter(this, "ExtensionStorage",
-                                  "resource://gre/modules/ExtensionStorage.jsm");
-XPCOMUtils.defineLazyModuleGetter(this, "fxAccounts",
-                                  "resource://gre/modules/FxAccounts.jsm");
-XPCOMUtils.defineLazyModuleGetter(this, "KintoHttpClient",
-                                  "resource://services-common/kinto-http-client.js");
-XPCOMUtils.defineLazyModuleGetter(this, "loadKinto",
-                                  "resource://services-common/kinto-offline-client.js");
-XPCOMUtils.defineLazyModuleGetter(this, "Log",
-                                  "resource://gre/modules/Log.jsm");
-XPCOMUtils.defineLazyModuleGetter(this, "Observers",
-                                  "resource://services-common/observers.js");
-XPCOMUtils.defineLazyModuleGetter(this, "Task",
-                                  "resource://gre/modules/Task.jsm");
-XPCOMUtils.defineLazyModuleGetter(this, "KeyRingEncryptionRemoteTransformer",
-                                  "resource://services-sync/engines/extension-storage.js");
-XPCOMUtils.defineLazyPreferenceGetter(this, "prefPermitsStorageSync",
-                                      STORAGE_SYNC_ENABLED_PREF, false);
-XPCOMUtils.defineLazyPreferenceGetter(this, "prefStorageSyncServerURL",
-                                      STORAGE_SYNC_SERVER_URL_PREF,
-                                      KINTO_DEFAULT_SERVER_URL);
-
-/* globals prefPermitsStorageSync, prefStorageSyncServerURL */
-
-// Map of Extensions to Promise<Collections>.
-const collectionPromises = new Map();
-// Map of Extensions to Set<Contexts> to track contexts that are still
-// "live" and could still use this collection.
-const extensionContexts = new WeakMap();
-// Borrow logger from Sync.
-const log = Log.repository.getLogger("Sync.Engine.Extension-Storage");
-
-// Kinto record IDs have two condtions:
-//
-// - They must contain only ASCII alphanumerics plus - and _. To fix
-// this, we encode all non-letters using _C_, where C is the
-// percent-encoded character, so space becomes _20_
-// and underscore becomes _5F_.
-//
-// - They must start with an ASCII letter. To ensure this, we prefix
-// all keys with "key-".
-function keyToId(key) {
-  function escapeChar(match) {
-    return "_" + match.codePointAt(0).toString(16).toUpperCase() + "_";
-  }
-  return "key-" + key.replace(/[^a-zA-Z0-9]/g, escapeChar);
-}
-
-// Convert a Kinto ID back into a chrome.storage key.
-// Returns null if a key couldn't be parsed.
-function idToKey(id) {
-  function unescapeNumber(match, group1) {
-    return String.fromCodePoint(parseInt(group1, 16));
-  }
-  // An escaped ID should match this regex.
-  // An escaped ID should consist of only letters and numbers, plus
-  // code points escaped as _[0-9a-f]+_.
-  const ESCAPED_ID_FORMAT = /^(?:[a-zA-Z0-9]|_[0-9A-F]+_)*$/;
-
-  if (!id.startsWith("key-")) {
-    return null;
-  }
-  const unprefixed = id.slice(4);
-  // Verify that the ID is the correct format.
-  if (!ESCAPED_ID_FORMAT.test(unprefixed)) {
-    return null;
-  }
-  return unprefixed.replace(/_([0-9A-F]+)_/g, unescapeNumber);
-}
-
-// An "id schema" used to validate Kinto IDs and generate new ones.
-const storageSyncIdSchema = {
-  // We should never generate IDs; chrome.storage only acts as a
-  // key-value store, so we should always have a key.
-  generate() {
-    throw new Error("cannot generate IDs");
-  },
-
-  // See keyToId and idToKey for more details.
-  validate(id) {
-    return idToKey(id) !== null;
-  },
-};
-
-/**
- * Return a KintoBase object, suitable for using in Firefox.
- *
- * This centralizes the logic used to create Kinto instances, which
- * we will need to do in several places.
- *
- * @returns {Kinto}
- */
-function makeKinto() {
-  const Kinto = loadKinto();
-  return new Kinto({
-    adapter: Kinto.adapters.FirefoxAdapter,
-    adapterOptions: {path: "storage-sync.sqlite"},
-    timeout: KINTO_REQUEST_TIMEOUT,
-  });
-}
-
-// An "id schema" used for the system collection, which doesn't
-// require validation or generation of IDs.
-const cryptoCollectionIdSchema = {
-  generate() {
-    throw new Error("cannot generate IDs for system collection");
-  },
-
-  validate(id) {
-    return true;
-  },
-};
-
-/**
- * Wrapper around the global handle on the crypto collection.
- *
- * Responsible for making sure that the handle is cleaned up when not
- * in use, and opened when it might be in use.
- *
- * We need a global state here because the transformers need access to
- * the state, even though the transformers are created outside the
- * lifetime of a single sync.
- */
-const cryptoCollection = this.cryptoCollection = {
-  /**
-   * The current outstanding number of handles.
-   */
-  refCount: 0,
-
-  /**
-   * A promise for the real underlying Kinto Collection object.
-   *
-   * This will be set and unset as a function of the
-   * incrementUses/decrementUses calls.
-   */
-  _kintoCollectionPromise: null,
-
-  /**
-   * Call this to register your use of the cryptoCollection.
-   *
-   * Be sure to call `decrementUses()` when you're not going to use
-   * this any more.
-   */
-  incrementUses: Task.async(function* () {
-    const oldRefCount = this.refCount;
-    this.refCount += 1;
-    if (oldRefCount == 0) {
-      const db = makeKinto();
-      const kintoCollection = db.collection(STORAGE_SYNC_CRYPTO_COLLECTION_NAME, {
-        idSchema: cryptoCollectionIdSchema,
-        remoteTransformers: [new KeyRingEncryptionRemoteTransformer()],
-      });
-      this._kintoCollectionPromise = kintoCollection.db.open().then(() => kintoCollection);
-    }
-  }),
-
-  /**
-   * Call this to signal release of the cryptoCollection.
-   */
-  decrementUses: Task.async(function* () {
-    if (this.refCount == 0) {
-      Cu.reportError(new Error("too many decrementUses() of cryptoCollection!"));
-      return;
-    }
-    this.refCount -= 1;
-    if (this.refCount == 0) {
-      const oldPromise = this._kintoCollectionPromise;
-      this._kintoCollectionPromise = null;
-      const collection = yield oldPromise;
-      yield collection.db.close();
-    }
-  }),
-
-  isActive() {
-    return this.refCount != 0;
-  },
-
-  /**
-   * Retrieve the keyring record from the crypto collection.
-   *
-   * You can use this if you want to check metadata on the keyring
-   * record rather than use the keyring itself.
-   *
-   * @returns {Promise<Object>}
-   */
-  getKeyRingRecord: Task.async(function* () {
-    const collection = yield this._kintoCollectionPromise;
-    const cryptoKeyRecord = yield collection.getAny(STORAGE_SYNC_CRYPTO_KEYRING_RECORD_ID);
-
-    let data = cryptoKeyRecord.data;
-    if (!data) {
-      // This is a new keyring. Invent an ID for this record. If this
-      // changes, it means a client replaced the keyring, so we need to
-      // reupload everything.
-      const uuidgen = Cc["@mozilla.org/uuid-generator;1"].getService(Ci.nsIUUIDGenerator);
-      const uuid = uuidgen.generateUUID();
-      data = {uuid};
-    }
-    return data;
-  }),
-
-  /**
-   * Retrieve the actual keyring from the crypto collection.
-   *
-   * @returns {Promise<CollectionKeyManager>}
-   */
-  getKeyRing: Task.async(function* () {
-    const cryptoKeyRecord = yield this.getKeyRingRecord();
-    const collectionKeys = new CollectionKeyManager();
-    if (cryptoKeyRecord.keys) {
-      collectionKeys.setContents(cryptoKeyRecord.keys, cryptoKeyRecord.last_modified);
-    } else {
-      // We never actually use the default key, so it's OK if we
-      // generate one multiple times.
-      collectionKeys.generateDefaultKey();
-    }
-    // Pass through uuid field so that we can save it if we need to.
-    collectionKeys.uuid = cryptoKeyRecord.uuid;
-    return collectionKeys;
-  }),
-
-  updateKBHash: Task.async(function* (kbHash) {
-    const coll = yield this._kintoCollectionPromise;
-    yield coll.update({id: STORAGE_SYNC_CRYPTO_KEYRING_RECORD_ID,
-                       kbHash: kbHash},
-                      {patch: true});
-  }),
-
-  upsert: Task.async(function* (record) {
-    const collection = yield this._kintoCollectionPromise;
-    yield collection.upsert(record);
-  }),
-
-  sync: Task.async(function* () {
-    if (!this._kintoCollectionPromise) {
-      throw new Error("tried to sync without any live uses of the Kinto collection!");
-    }
-
-    const collection = yield this._kintoCollectionPromise;
-    return yield ExtensionStorageSync._syncCollection(collection, {
-      strategy: "server_wins",
-    });
-  }),
-
-  /**
-   * Reset sync status for ALL collections by directly
-   * accessing the FirefoxAdapter.
-   */
-  resetSyncStatus: Task.async(function* () {
-    const coll = yield this._kintoCollectionPromise;
-    yield coll.db.resetSyncStatus();
-  }),
-
-  // Used only for testing.
-  _clear: Task.async(function* () {
-    const collection = yield this._kintoCollectionPromise;
-    yield collection.clear();
-  }),
-};
-
-/**
- * An EncryptionRemoteTransformer that uses the special "keys" record
- * to find a key for a given extension.
- *
- * @param {string} extensionId The extension ID for which to find a key.
- */
-class CollectionKeyEncryptionRemoteTransformer extends EncryptionRemoteTransformer {
-  constructor(extensionId) {
-    super();
-    this.extensionId = extensionId;
-  }
-
-  getKeys() {
-    const self = this;
-    return Task.spawn(function* () {
-      // FIXME: cache the crypto record for the duration of a sync cycle?
-      const collectionKeys = yield cryptoCollection.getKeyRing();
-      if (!collectionKeys.hasKeysFor([self.extensionId])) {
-        // This should never happen. Keys should be created (and
-        // synced) at the beginning of the sync cycle.
-        throw new Error(`tried to encrypt records for ${this.extensionId}, but key is not present`);
-      }
-      return collectionKeys.keyForCollection(self.extensionId);
-    });
-  }
-}
-global.CollectionKeyEncryptionRemoteTransformer = CollectionKeyEncryptionRemoteTransformer;
-
-/**
- * Actually for-real close the collection associated with a
- * collection.
- *
- * @param {Extension} extension
- *                    The extension whose uses are all over.
- * @returns {Promise<()>} Promise that resolves when everything is clean.
- */
-const closeExtensionCollection = Task.async(function* (extension) {
-  const collectionPromise = collectionPromises.get(extension);
-  if (!collectionPromise) {
-    Cu.reportError(new Error(`Internal error: trying to close extension ${extension.id}` +
-                             "that doesn't have a collection"));
-    return;
-  }
-  collectionPromises.delete(extension);
-  const coll = yield collectionPromise;
-  yield coll.db.close();
-  yield cryptoCollection.decrementUses();
-});
-
-/**
- * Clean up now that one context is no longer using this extension's collection.
- *
- * @param {Extension} extension
- *                    The extension whose context just ended.
- * @param {Context} context
- *                  The context that just ended.
- * @returns {Promise<()>} Promise that resolves when everything is clean.
- */
-function cleanUpForContext(extension, context) {
-  const contexts = extensionContexts.get(extension);
-  if (!contexts) {
-    Cu.reportError(new Error(`Internal error: cannot find any contexts for extension ${extension.id}`));
-    // Try to shut down cleanly anyhow?
-    return closeExtensionCollection(extension);
-  }
-  contexts.delete(context);
-  if (contexts.size === 0) {
-    // Nobody else is using this collection. Clean up.
-    extensionContexts.delete(extension);
-    return closeExtensionCollection(extension);
-  }
-}
-
-/**
- * Generate a promise that produces the Collection for an extension.
- *
- * @param {Extension} extension
- *                    The extension whose collection needs to
- *                    be opened.
- * @param {Context} context
- *                  The context for this extension. The Collection
- *                  will shut down automatically when all contexts
- *                  close.
- * @returns {Promise<Collection>}
- */
-const openCollection = Task.async(function* (extension, context) {
-  let collectionId = extension.id;
-  const db = makeKinto();
-  const coll = db.collection(collectionId, {
-    idSchema: storageSyncIdSchema,
-    remoteTransformers: [new CollectionKeyEncryptionRemoteTransformer(extension.id)],
-  });
-  yield coll.db.open();
-  yield cryptoCollection.incrementUses();
-  return coll;
-});
-
-/**
- * Hash an extension ID for a given user so that an attacker can't
- * identify the extensions a user has installed.
- *
- * @param {User} user
- *               The user for whom to choose a collection to sync
- *               an extension to.
- * @param {string} extensionId The extension ID to obfuscate.
- * @returns {string} A collection ID suitable for use to sync to.
- */
-function extensionIdToCollectionId(user, extensionId) {
-  const userFingerprint = CryptoUtils.hkdf(user.uid, undefined,
-                                           "identity.mozilla.com/picl/v1/chrome.storage.sync.collectionIds", 2 * 32);
-  let data = new TextEncoder().encode(userFingerprint + extensionId);
-  let hasher = Cc["@mozilla.org/security/hash;1"]
-                 .createInstance(Ci.nsICryptoHash);
-  hasher.init(hasher.SHA256);
-  hasher.update(data, data.length);
-
-  return CommonUtils.bytesAsHex(hasher.finish(false));
-}
-
-this.ExtensionStorageSync = {
-  _fxaService: fxAccounts,
-  listeners: new WeakMap(),
-
-  syncAll: Task.async(function* () {
-    // Add a use for the syncing process itself, so that we don't
-    // break if someone uninstalls their last extension during a sync
-    yield cryptoCollection.incrementUses();
-    try {
-      const extensions = collectionPromises.keys();
-      const extIds = Array.from(extensions, extension => extension.id);
-      log.debug(`Syncing extension settings for ${JSON.stringify(extIds)}\n`);
-      if (extIds.length == 0) {
-        // No extensions to sync. Crypto probably isn't even
-        // initialized. Get out.
-        return;
-      }
-      yield this.ensureKeysFor(extIds);
-      yield this.checkSyncKeyRing();
-      const promises = Array.from(collectionPromises.entries(), ([extension, collPromise]) => {
-        return collPromise.then(coll => {
-          return this.sync(extension, coll);
-        });
-      });
-      yield Promise.all(promises);
-    } finally {
-      yield cryptoCollection.decrementUses();
-    }
-  }),
-
-  sync: Task.async(function* (extension, collection) {
-    const signedInUser = yield this._fxaService.getSignedInUser();
-    if (!signedInUser) {
-      // FIXME: this should support syncing to self-hosted
-      log.info("User was not signed into FxA; cannot sync");
-      throw new Error("Not signed in to FxA");
-    }
-    const collectionId = extensionIdToCollectionId(signedInUser, extension.id);
-    let syncResults;
-    try {
-      syncResults = yield this._syncCollection(collection, {
-        strategy: "client_wins",
-        collection: collectionId,
-      });
-    } catch (err) {
-      log.warn("Syncing failed", err);
-      throw err;
-    }
-
-    let changes = {};
-    for (const record of syncResults.created) {
-      changes[record.key] = {
-        newValue: record.data,
-      };
-    }
-    for (const record of syncResults.updated) {
-      // N.B. It's safe to just pick old.key because it's not
-      // possible to "rename" a record in the storage.sync API.
-      const key = record.old.key;
-      changes[key] = {
-        oldValue: record.old.data,
-        newValue: record.new.data,
-      };
-    }
-    for (const record of syncResults.deleted) {
-      changes[record.key] = {
-        oldValue: record.data,
-      };
-    }
-    for (const conflict of syncResults.resolved) {
-      // FIXME: Should we even send a notification? If so, what
-      // best values for "old" and "new"? This might violate
-      // client code's assumptions, since from their perspective,
-      // we were in state L, but this diff is from R -> L.
-      changes[conflict.remote.key] = {
-        oldValue: conflict.local.data,
-        newValue: conflict.remote.data,
-      };
-    }
-    if (Object.keys(changes).length > 0) {
-      this.notifyListeners(extension, changes);
-    }
-  }),
-
-  /**
-   * Utility function that handles the common stuff about syncing all
-   * Kinto collections (including "meta" collections like the crypto
-   * one).
-   *
-   * @param {Collection} collection
-   * @param {Object} options
-   *                 Additional options to be passed to sync().
-   * @returns {Promise<SyncResultObject>}
-   */
-  _syncCollection: Task.async(function* (collection, options) {
-    // FIXME: this should support syncing to self-hosted
-    return yield this._requestWithToken(`Syncing ${collection.name}`, function* (token) {
-      const allOptions = Object.assign({}, {
-        remote: prefStorageSyncServerURL,
-        headers: {
-          Authorization: "Bearer " + token,
-        },
-      }, options);
-
-      return yield collection.sync(allOptions);
-    });
-  }),
-
-  // Make a Kinto request with a current FxA token.
-  // If the response indicates that the token might have expired,
-  // retry the request.
-  _requestWithToken: Task.async(function* (description, f) {
-    const fxaToken = yield this._fxaService.getOAuthToken(FXA_OAUTH_OPTIONS);
-    try {
-      return yield f(fxaToken);
-    } catch (e) {
-      log.error(`${description}: request failed`, e);
-      if (e && e.data && e.data.code == 401) {
-        // Our token might have expired. Refresh and retry.
-        log.info("Token might have expired");
-        yield this._fxaService.removeCachedOAuthToken({token: fxaToken});
-        const newToken = yield this._fxaService.getOAuthToken(FXA_OAUTH_OPTIONS);
-
-        // If this fails too, let it go.
-        return yield f(newToken);
-      }
-      // Otherwise, we don't know how to handle this error, so just reraise.
-      throw e;
-    }
-  }),
-
-  /**
-   * Helper similar to _syncCollection, but for deleting the user's bucket.
-   */
-  _deleteBucket: Task.async(function* () {
-    return yield this._requestWithToken("Clearing server", function* (token) {
-      const headers = {Authorization: "Bearer " + token};
-      const kintoHttp = new KintoHttpClient(prefStorageSyncServerURL, {
-        headers: headers,
-        timeout: KINTO_REQUEST_TIMEOUT,
-      });
-      return yield kintoHttp.deleteBucket("default");
-    });
-  }),
-
-  /**
-   * Recursive promise that terminates when our local collectionKeys,
-   * as well as that on the server, have keys for all the extensions
-   * in extIds.
-   *
-   * @param {Array<string>} extIds
-   *                        The IDs of the extensions which need keys.
-   * @returns {Promise<CollectionKeyManager>}
-   */
-  ensureKeysFor: Task.async(function* (extIds) {
-    const collectionKeys = yield cryptoCollection.getKeyRing();
-    if (collectionKeys.hasKeysFor(extIds)) {
-      return collectionKeys;
-    }
-
-    const kbHash = yield this.getKBHash();
-    const newKeys = yield collectionKeys.ensureKeysFor(extIds);
-    const newRecord = {
-      id: STORAGE_SYNC_CRYPTO_KEYRING_RECORD_ID,
-      keys: newKeys.asWBO().cleartext,
-      uuid: collectionKeys.uuid,
-      // Add a field for the current kB hash.
-      kbHash: kbHash,
-    };
-    yield cryptoCollection.upsert(newRecord);
-    const result = yield this._syncKeyRing(newRecord);
-    if (result.resolved.length != 0) {
-      // We had a conflict which was automatically resolved. We now
-      // have a new keyring which might have keys for the
-      // collections. Recurse.
-      return yield this.ensureKeysFor(extIds);
-    }
-
-    // No conflicts. We're good.
-    return newKeys;
-  }),
-
-  /**
-   * Get the current user's hashed kB.
-   *
-   * @returns sha256 of the user's kB as a hex string
-   */
-  getKBHash: Task.async(function* () {
-    const signedInUser = yield this._fxaService.getSignedInUser();
-    if (!signedInUser) {
-      throw new Error("User isn't signed in!");
-    }
-
-    if (!signedInUser.kB) {
-      throw new Error("User doesn't have kB??");
-    }
-
-    let kBbytes = CommonUtils.hexToBytes(signedInUser.kB);
-    let hasher = Cc["@mozilla.org/security/hash;1"]
-                    .createInstance(Ci.nsICryptoHash);
-    hasher.init(hasher.SHA256);
-    return CommonUtils.bytesAsHex(CryptoUtils.digestBytes(signedInUser.uid + kBbytes, hasher));
-  }),
-
-  /**
-   * Update the kB in the crypto record.
-   */
-  updateKeyRingKB: Task.async(function* () {
-    const signedInUser = yield this._fxaService.getSignedInUser();
-    if (!signedInUser) {
-      // Although this function is meant to be called on login,
-      // it's not unreasonable to check any time, even if we aren't
-      // logged in.
-      //
-      // If we aren't logged in, we don't have any information about
-      // the user's kB, so we can't be sure that the user changed
-      // their kB, so just return.
-      return;
-    }
-
-    const thisKBHash = yield this.getKBHash();
-    yield cryptoCollection.updateKBHash(thisKBHash);
-  }),
-
-  /**
-   * Make sure the keyring is up to date and synced.
-   *
-   * This is called on log-in events to maintain the keyring in the
-   * correct state on the server. It's also called on syncs to make
-   * sure that we don't sync anything to any collection unless the key
-   * for that collection is on the server.
-   */
-  checkSyncKeyRing: Task.async(function* () {
-    if (!cryptoCollection.isActive()) {
-      // We got called while no extensions use storage.sync. We don't
-      // have any access to the crypto record, so just let this
-      // notification slip through our fingers. If we do get
-      // extensions later, we'll pick this up on a subsequent sync.
-      log.info("Tried to check keyring, but no extensions are loaded. Ignoring.");
-      return;
-    }
-
-    yield this.updateKeyRingKB();
-
-    const cryptoKeyRecord = yield cryptoCollection.getKeyRingRecord();
-    if (cryptoKeyRecord && cryptoKeyRecord._status !== "synced") {
-      // We haven't successfully synced the keyring since the last
-      // change. This could be because kB changed and we touched the
-      // keyring, or it could be because we failed to sync after
-      // adding a key. Either way, take this opportunity to sync the
-      // keyring.
-      yield this._syncKeyRing(cryptoKeyRecord);
-    }
-  }),
-
-  _syncKeyRing: Task.async(function* (cryptoKeyRecord) {
-    try {
-      // Try to sync using server_wins.
-      //
-      // We use server_wins here because whatever is on the server is
-      // at least consistent with itself -- the crypto in the keyring
-      // matches the crypto on the collection records. This is because
-      // we generate and upload keys just before syncing data.
-      //
-      // It's possible that we can't decode the version on the server.
-      // This can happen if a user is locked out of their account, and
-      // does a "reset password" to get in on a new device. In this
-      // case, we are in a bind -- we can't decrypt the record on the
-      // server, so we can't merge keys. If this happens, we try to
-      // figure out if we're the one with the correct (new) kB or if
-      // we just got locked out because we have the old kB. If we're
-      // the one with the correct kB, we wipe the server and reupload
-      // everything, including a new keyring.
-      //
-      // If another device has wiped the server, we need to reupload
-      // everything we have on our end too, so we detect this by
-      // adding a UUID to the keyring. UUIDs are preserved throughout
-      // the lifetime of a keyring, so the only time a keyring UUID
-      // changes is when a new keyring is uploaded, which only happens
-      // after a server wipe. So when we get a "conflict" (resolved by
-      // server_wins), we check whether the server version has a new
-      // UUID. If so, reset our sync status, so that we'll reupload
-      // everything.
-      const result = yield cryptoCollection.sync();
-      if (result.resolved.length > 0) {
-        if (result.resolved[0].uuid != cryptoKeyRecord.uuid) {
-          log.info("Detected a new UUID. Reseting sync status for everything.");
-          yield cryptoCollection.resetSyncStatus();
-          // Any open collections might have a lastModified; we need
-          // to wipe that too.
-          for (let [, cPromise] of collectionPromises) {
-            const coll = yield cPromise;
-            // FIXME: should there be a method here?
-            coll._lastModified = null;
-          }
-
-          // Server version is now correct. Return that result.
-          return result;
-        }
-      }
-      // No conflicts, or conflict was just someone else adding keys.
-      return result;
-    } catch (e) {
-      if (KeyRingEncryptionRemoteTransformer.isOutdatedKB(e)) {
-        // Check if our token is still valid, or if we got locked out
-        // between starting the sync and talking to Kinto.
-        const isSessionValid = yield this._fxaService.sessionStatus();
-        if (isSessionValid) {
-          yield this._deleteBucket();
-          yield cryptoCollection.resetSyncStatus();
-
-          // Reupload our keyring, which is the only new keyring.
-          // We don't want client_wins here because another device
-          // could have uploaded another keyring in the meantime.
-          return yield cryptoCollection.sync();
-        }
-      }
-      throw e;
-    }
-  }),
-
-  /**
-   * Get the collection for an extension, consulting a cache to
-   * save time.
-   *
-   * @param {Extension} extension
-   *                    The extension for which we are seeking
-   *                    a collection.
-   * @param {Context} context
-   *                  The context of the extension, so that we can
-   *                  clean up the collection when the extension ends.
-   * @returns {Promise<Collection>}
-   */
-  getCollection(extension, context) {
-    if (prefPermitsStorageSync !== true) {
-      return Promise.reject({message: `Please set ${STORAGE_SYNC_ENABLED_PREF} to true in about:config`});
-    }
-    if (!collectionPromises.has(extension)) {
-      const collectionPromise = openCollection(extension, context);
-      collectionPromises.set(extension, collectionPromise);
-      collectionPromise.catch(Cu.reportError);
-    }
-
-    // Register that the extension and context are in use.
-    if (!extensionContexts.has(extension)) {
-      extensionContexts.set(extension, new Set());
-    }
-    const contexts = extensionContexts.get(extension);
-    if (!contexts.has(context)) {
-      // New context. Register it and make sure it cleans itself up
-      // when it closes.
-      contexts.add(context);
-      context.callOnClose({
-        close: () => cleanUpForContext(extension, context),
-      });
-    }
-
-    return collectionPromises.get(extension);
-  },
-
-  set: Task.async(function* (extension, items, context) {
-    const coll = yield this.getCollection(extension, context);
-    const keys = Object.keys(items);
-    const ids = keys.map(keyToId);
-    const changes = yield coll.execute(txn => {
-      let changes = {};
-      for (let [i, key] of keys.entries()) {
-        const id = ids[i];
-        let item = items[key];
-        let {oldRecord} = txn.upsert({
-          id,
-          key,
-          data: item,
-        });
-        changes[key] = {
-          newValue: item,
-        };
-        if (oldRecord && oldRecord.data) {
-          // Extract the "data" field from the old record, which
-          // represents the value part of the key-value store
-          changes[key].oldValue = oldRecord.data;
-        }
-      }
-      return changes;
-    }, {preloadIds: ids});
-    this.notifyListeners(extension, changes);
-  }),
-
-  remove: Task.async(function* (extension, keys, context) {
-    const coll = yield this.getCollection(extension, context);
-    keys = [].concat(keys);
-    const ids = keys.map(keyToId);
-    let changes = {};
-    yield coll.execute(txn => {
-      for (let [i, key] of keys.entries()) {
-        const id = ids[i];
-        const res = txn.deleteAny(id);
-        if (res.deleted) {
-          changes[key] = {
-            oldValue: res.data.data,
-          };
-        }
-      }
-      return changes;
-    }, {preloadIds: ids});
-    if (Object.keys(changes).length > 0) {
-      this.notifyListeners(extension, changes);
-    }
-  }),
-
-  clear: Task.async(function* (extension, context) {
-    // We can't call Collection#clear here, because that just clears
-    // the local database. We have to explicitly delete everything so
-    // that the deletions can be synced as well.
-    const coll = yield this.getCollection(extension, context);
-    const res = yield coll.list();
-    const records = res.data;
-    const keys = records.map(record => record.key);
-    yield this.remove(extension, keys, context);
-  }),
-
-  get: Task.async(function* (extension, spec, context) {
-    const coll = yield this.getCollection(extension, context);
-    let keys, records;
-    if (spec === null) {
-      records = {};
-      const res = yield coll.list();
-      for (let record of res.data) {
-        records[record.key] = record.data;
-      }
-      return records;
-    }
-    if (typeof spec === "string") {
-      keys = [spec];
-      records = {};
-    } else if (Array.isArray(spec)) {
-      keys = spec;
-      records = {};
-    } else {
-      keys = Object.keys(spec);
-      records = Cu.cloneInto(spec, global);
-    }
-
-    for (let key of keys) {
-      const res = yield coll.getAny(keyToId(key));
-      if (res.data && res.data._status != "deleted") {
-        records[res.data.key] = res.data.data;
-      }
-    }
-
-    return records;
-  }),
-
-  addOnChangedListener(extension, listener, context) {
-    let listeners = this.listeners.get(extension) || new Set();
-    listeners.add(listener);
-    this.listeners.set(extension, listeners);
-
-    // Force opening the collection so that we will sync for this extension.
-    return this.getCollection(extension, context);
-  },
-
-  removeOnChangedListener(extension, listener) {
-    let listeners = this.listeners.get(extension);
-    listeners.delete(listener);
-    if (listeners.size == 0) {
-      this.listeners.delete(extension);
-    }
-  },
-
-  notifyListeners(extension, changes) {
-    Observers.notify("ext.storage.sync-changed");
-    let listeners = this.listeners.get(extension) || new Set();
-    if (listeners) {
-      for (let listener of listeners) {
-        runSafeSyncWithoutClone(listener, changes);
-      }
-    }
-  },
-};
--- a/toolkit/components/extensions/ext-c-storage.js
+++ b/toolkit/components/extensions/ext-c-storage.js
@@ -36,28 +36,13 @@ function storageApiFactory(context) {
         },
         set: function(items) {
           items = sanitize(items);
           return context.childManager.callParentAsyncFunction("storage.local.set", [
             items,
           ]);
         },
       },
-
-      sync: {
-        get: function(keys) {
-          keys = sanitize(keys);
-          return context.childManager.callParentAsyncFunction("storage.sync.get", [
-            keys,
-          ]);
-        },
-        set: function(items) {
-          items = sanitize(items);
-          return context.childManager.callParentAsyncFunction("storage.sync.set", [
-            items,
-          ]);
-        },
-      },
     },
   };
 }
 extensions.registerSchemaAPI("storage", "addon_child", storageApiFactory);
 extensions.registerSchemaAPI("storage", "content_child", storageApiFactory);
--- a/toolkit/components/extensions/ext-storage.js
+++ b/toolkit/components/extensions/ext-storage.js
@@ -1,68 +1,46 @@
 "use strict";
 
 var {classes: Cc, interfaces: Ci, utils: Cu} = Components;
 
 XPCOMUtils.defineLazyModuleGetter(this, "ExtensionStorage",
                                   "resource://gre/modules/ExtensionStorage.jsm");
-XPCOMUtils.defineLazyModuleGetter(this, "ExtensionStorageSync",
-                                  "resource://gre/modules/ExtensionStorageSync.jsm");
 
 Cu.import("resource://gre/modules/ExtensionUtils.jsm");
 var {
   EventManager,
 } = ExtensionUtils;
 
 function storageApiFactory(context) {
   let {extension} = context;
   return {
     storage: {
       local: {
-        get: function(spec) {
-          return ExtensionStorage.get(extension.id, spec);
+        get: function(keys) {
+          return ExtensionStorage.get(extension.id, keys);
         },
         set: function(items) {
           return ExtensionStorage.set(extension.id, items, context);
         },
-        remove: function(keys) {
-          return ExtensionStorage.remove(extension.id, keys);
+        remove: function(items) {
+          return ExtensionStorage.remove(extension.id, items);
         },
         clear: function() {
           return ExtensionStorage.clear(extension.id);
         },
       },
 
-      sync: {
-        get: function(spec) {
-          return ExtensionStorageSync.get(extension, spec, context);
-        },
-        set: function(items) {
-          return ExtensionStorageSync.set(extension, items, context);
-        },
-        remove: function(keys) {
-          return ExtensionStorageSync.remove(extension, keys, context);
-        },
-        clear: function() {
-          return ExtensionStorageSync.clear(extension, context);
-        },
-      },
-
-      onChanged: new EventManager(context, "storage.onChanged", fire => {
-        let listenerLocal = changes => {
+      onChanged: new EventManager(context, "storage.local.onChanged", fire => {
+        let listener = changes => {
           fire(changes, "local");
         };
-        let listenerSync = changes => {
-          fire(changes, "sync");
-        };
 
-        ExtensionStorage.addOnChangedListener(extension.id, listenerLocal);
-        ExtensionStorageSync.addOnChangedListener(extension, listenerSync, context);
+        ExtensionStorage.addOnChangedListener(extension.id, listener);
         return () => {
-          ExtensionStorage.removeOnChangedListener(extension.id, listenerLocal);
-          ExtensionStorageSync.removeOnChangedListener(extension, listenerSync);
+          ExtensionStorage.removeOnChangedListener(extension.id, listener);
         };
       }).api(),
     },
   };
 }
 extensions.registerSchemaAPI("storage", "addon_parent", storageApiFactory);
 extensions.registerSchemaAPI("storage", "content_parent", storageApiFactory);
--- a/toolkit/components/extensions/moz.build
+++ b/toolkit/components/extensions/moz.build
@@ -6,17 +6,16 @@
 
 EXTRA_JS_MODULES += [
     'Extension.jsm',
     'ExtensionAPI.jsm',
     'ExtensionChild.jsm',
     'ExtensionContent.jsm',
     'ExtensionManagement.jsm',
     'ExtensionStorage.jsm',
-    'ExtensionStorageSync.jsm',
     'ExtensionUtils.jsm',
     'LegacyExtensionsUtils.jsm',
     'MessageChannel.jsm',
     'NativeMessaging.jsm',
     'Schemas.jsm',
 ]
 
 EXTRA_COMPONENTS += [
--- a/toolkit/components/extensions/schemas/storage.json
+++ b/toolkit/components/extensions/schemas/storage.json
@@ -174,16 +174,17 @@
             "type": "string",
             "description": "The name of the storage area (<code>\"sync\"</code>, <code>\"local\"</code> or <code>\"managed\"</code>) the changes are for."
           }
         ]
       }
     ],
     "properties": {
       "sync": {
+        "unsupported": true,
         "$ref": "StorageArea",
         "description": "Items in the <code>sync</code> storage area are synced by the browser.",
         "properties": {
           "QUOTA_BYTES": {
             "value": 102400,
             "description": "The maximum total amount (in bytes) of data that can be stored in sync storage, as measured by the JSON stringification of every value plus every key's length. Updates that would cause this limit to be exceeded fail immediately and set $(ref:runtime.lastError)."
           },
           "QUOTA_BYTES_PER_ITEM": {
deleted file mode 100644
--- a/toolkit/components/extensions/test/xpcshell/head_sync.js
+++ /dev/null
@@ -1,67 +0,0 @@
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-"use strict";
-
-/* exported withSyncContext */
-
-Components.utils.import("resource://gre/modules/Services.jsm", this);
-Components.utils.import("resource://gre/modules/ExtensionUtils.jsm", this);
-
-var {
-  BaseContext,
-} = ExtensionUtils;
-
-class Context extends BaseContext {
-  constructor(principal) {
-    super();
-    Object.defineProperty(this, "principal", {
-      value: principal,
-      configurable: true,
-    });
-    this.sandbox = Components.utils.Sandbox(principal, {wantXrays: false});
-    this.extension = {id: "test@web.extension"};
-  }
-
-  get cloneScope() {
-    return this.sandbox;
-  }
-}
-
-/**
- * Call the given function with a newly-constructed context.
- * Unload the context on the way out.
- *
- * @param {function} f    the function to call
- */
-function* withContext(f) {
-  const ssm = Services.scriptSecurityManager;
-  const PRINCIPAL1 = ssm.createCodebasePrincipalFromOrigin("http://www.example.org");
-  const context = new Context(PRINCIPAL1);
-  try {
-    yield* f(context);
-  } finally {
-    yield context.unload();
-  }
-}
-
-/**
- * Like withContext(), but also turn on the "storage.sync" pref for
- * the duration of the function.
- * Calls to this function can be replaced with calls to withContext
- * once the pref becomes on by default.
- *
- * @param {function} f    the function to call
- */
-function* withSyncContext(f) {
-  const STORAGE_SYNC_PREF = "webextensions.storage.sync.enabled";
-  let prefs = Services.prefs;
-
-  try {
-    prefs.setBoolPref(STORAGE_SYNC_PREF, true);
-    yield* withContext(f);
-  } finally {
-    prefs.clearUserPref(STORAGE_SYNC_PREF);
-  }
-}
--- a/toolkit/components/extensions/test/xpcshell/test_ext_storage.js
+++ b/toolkit/components/extensions/test/xpcshell/test_ext_storage.js
@@ -1,357 +1,188 @@
 /* -*- Mode: indent-tabs-mode: nil; js-indent-level: 2 -*- */
 /* vim: set sts=2 sw=2 et tw=80: */
 "use strict";
 
-const STORAGE_SYNC_PREF = "webextensions.storage.sync.enabled";
-Cu.import("resource://gre/modules/Preferences.jsm");
+function backgroundScript() {
+  let storage = browser.storage.local;
+  function check(prop, value) {
+    return storage.get(null).then(data => {
+      browser.test.assertEq(value, data[prop], "null getter worked for " + prop);
+      return storage.get(prop);
+    }).then(data => {
+      browser.test.assertEq(value, data[prop], "string getter worked for " + prop);
+      return storage.get([prop]);
+    }).then(data => {
+      browser.test.assertEq(value, data[prop], "array getter worked for " + prop);
+      return storage.get({[prop]: undefined});
+    }).then(data => {
+      browser.test.assertEq(value, data[prop], "object getter worked for " + prop);
+    });
+  }
+
+  let globalChanges = {};
+
+  browser.storage.onChanged.addListener((changes, storage) => {
+    browser.test.assertEq("local", storage, "storage is local");
+    Object.assign(globalChanges, changes);
+  });
+
+  function checkChanges(changes) {
+    function checkSub(obj1, obj2) {
+      for (let prop in obj1) {
+        browser.test.assertEq(obj1[prop].oldValue, obj2[prop].oldValue);
+        browser.test.assertEq(obj1[prop].newValue, obj2[prop].newValue);
+      }
+    }
+
+    checkSub(changes, globalChanges);
+    checkSub(globalChanges, changes);
+    globalChanges = {};
+  }
+
+  /* eslint-disable dot-notation */
+
+  // Set some data and then test getters.
+  storage.set({"test-prop1": "value1", "test-prop2": "value2"}).then(() => {
+    checkChanges({"test-prop1": {newValue: "value1"}, "test-prop2": {newValue: "value2"}});
+    return check("test-prop1", "value1");
+  }).then(() => {
+    return check("test-prop2", "value2");
+  }).then(() => {
+    return storage.get({"test-prop1": undefined, "test-prop2": undefined, "other": "default"});
+  }).then(data => {
+    browser.test.assertEq("value1", data["test-prop1"], "prop1 correct");
+    browser.test.assertEq("value2", data["test-prop2"], "prop2 correct");
+    browser.test.assertEq("default", data["other"], "other correct");
+    return storage.get(["test-prop1", "test-prop2", "other"]);
+  }).then(data => {
+    browser.test.assertEq("value1", data["test-prop1"], "prop1 correct");
+    browser.test.assertEq("value2", data["test-prop2"], "prop2 correct");
+    browser.test.assertFalse("other" in data, "other correct");
+
+  // Remove data in various ways.
+  }).then(() => {
+    return storage.remove("test-prop1");
+  }).then(() => {
+    checkChanges({"test-prop1": {oldValue: "value1"}});
+    return storage.get(["test-prop1", "test-prop2"]);
+  }).then(data => {
+    browser.test.assertFalse("test-prop1" in data, "prop1 absent");
+    browser.test.assertTrue("test-prop2" in data, "prop2 present");
 
-/**
- * Utility function to ensure that all supported APIs for getting are
- * tested.
- *
- * @param {string} areaName
- *        either "local" or "sync" according to what we want to test
- * @param {string} prop
- *        "key" to look up using the storage API
- * @param {Object} value
- *        "value" to compare against
- * @returns {Promise}
- */
-function checkGet(areaName, prop, value) {
-  let storage = browser.storage[areaName];
-  return storage.get(null).then(data => {
-    browser.test.assertEq(value, data[prop], `null getter worked for ${prop} in ${areaName}`);
-    return storage.get(prop);
+    return storage.set({"test-prop1": "value1"});
+  }).then(() => {
+    checkChanges({"test-prop1": {newValue: "value1"}});
+    return storage.get(["test-prop1", "test-prop2"]);
+  }).then(data => {
+    browser.test.assertEq("value1", data["test-prop1"], "prop1 correct");
+    browser.test.assertEq("value2", data["test-prop2"], "prop2 correct");
+  }).then(() => {
+    return storage.remove(["test-prop1", "test-prop2"]);
+  }).then(() => {
+    checkChanges({"test-prop1": {oldValue: "value1"}, "test-prop2": {oldValue: "value2"}});
+    return storage.get(["test-prop1", "test-prop2"]);
+  }).then(data => {
+    browser.test.assertFalse("test-prop1" in data, "prop1 absent");
+    browser.test.assertFalse("test-prop2" in data, "prop2 absent");
+
+  // test storage.clear
+  }).then(() => {
+    return storage.set({"test-prop1": "value1", "test-prop2": "value2"});
+  }).then(() => {
+    return storage.clear();
+  }).then(() => {
+    checkChanges({"test-prop1": {oldValue: "value1"}, "test-prop2": {oldValue: "value2"}});
+    return storage.get(["test-prop1", "test-prop2"]);
   }).then(data => {
-    browser.test.assertEq(value, data[prop], `string getter worked for ${prop} in ${areaName}`);
-    return storage.get([prop]);
+    browser.test.assertFalse("test-prop1" in data, "prop1 absent");
+    browser.test.assertFalse("test-prop2" in data, "prop2 absent");
+
+  // Test cache invalidation.
+  }).then(() => {
+    return storage.set({"test-prop1": "value1", "test-prop2": "value2"});
+  }).then(() => {
+    globalChanges = {};
+    // Schedule sendMessage after onMessage because the other end immediately
+    // sends a message.
+    Promise.resolve().then(() => {
+      browser.test.sendMessage("invalidate");
+    });
+    return new Promise(resolve => browser.test.onMessage.addListener(resolve));
+  }).then(() => {
+    return check("test-prop1", "value1");
+  }).then(() => {
+    return check("test-prop2", "value2");
+
+  // Make sure we can store complex JSON data.
+  }).then(() => {
+    return storage.set({
+      "test-prop1": {
+        str: "hello",
+        bool: true,
+        null: null,
+        undef: undefined,
+        obj: {},
+        arr: [1, 2],
+        date: new Date(0),
+        regexp: /regexp/,
+        func: function func() {},
+        window,
+      },
+    });
+  }).then(() => {
+    return storage.set({"test-prop2": function func() {}});
+  }).then(() => {
+    browser.test.assertEq("value1", globalChanges["test-prop1"].oldValue, "oldValue correct");
+    browser.test.assertEq("object", typeof(globalChanges["test-prop1"].newValue), "newValue is obj");
+    globalChanges = {};
+    return storage.get({"test-prop1": undefined, "test-prop2": undefined});
   }).then(data => {
-    browser.test.assertEq(value, data[prop], `array getter worked for ${prop} in ${areaName}`);
-    return storage.get({[prop]: undefined});
-  }).then(data => {
-    browser.test.assertEq(value, data[prop], `object getter worked for ${prop} in ${areaName}`);
+    let obj = data["test-prop1"];
+
+    browser.test.assertEq("hello", obj.str, "string part correct");
+    browser.test.assertEq(true, obj.bool, "bool part correct");
+    browser.test.assertEq(null, obj.null, "null part correct");
+    browser.test.assertEq(undefined, obj.undef, "undefined part correct");
+    browser.test.assertEq(undefined, obj.func, "function part correct");
+    browser.test.assertEq(undefined, obj.window, "window part correct");
+    browser.test.assertEq("1970-01-01T00:00:00.000Z", obj.date, "date part correct");
+    browser.test.assertEq("/regexp/", obj.regexp, "date part correct");
+    browser.test.assertEq("object", typeof(obj.obj), "object part correct");
+    browser.test.assertTrue(Array.isArray(obj.arr), "array part present");
+    browser.test.assertEq(1, obj.arr[0], "arr[0] part correct");
+    browser.test.assertEq(2, obj.arr[1], "arr[1] part correct");
+    browser.test.assertEq(2, obj.arr.length, "arr.length part correct");
+
+    obj = data["test-prop2"];
+
+    browser.test.assertEq("[object Object]", {}.toString.call(obj), "function serialized as a plain object");
+    browser.test.assertEq(0, Object.keys(obj).length, "function serialized as an empty object");
+  }).then(() => {
+    browser.test.notifyPass("storage");
+  }).catch(e => {
+    browser.test.fail(`Error: ${e} :: ${e.stack}`);
+    browser.test.notifyFail("storage");
   });
 }
 
-add_task(function* test_local_cache_invalidation() {
-  function background(checkGet) {
-    browser.test.onMessage.addListener(msg => {
-      if (msg === "set-initial") {
-        browser.storage.local.set({"test-prop1": "value1", "test-prop2": "value2"}).then(() => {
-          browser.test.sendMessage("set-initial-done");
-        });
-      } else if (msg === "check") {
-        checkGet("local", "test-prop1", "value1").then(() => {
-          return checkGet("local", "test-prop2", "value2");
-        }).then(() => {
-          browser.test.sendMessage("check-done");
-        });
-      }
-    });
+let extensionData = {
+  background: backgroundScript,
+  manifest: {
+    permissions: ["storage"],
+  },
+};
 
-    browser.test.sendMessage("ready");
-  }
-
-  let extension = ExtensionTestUtils.loadExtension({
-    manifest: {
-      permissions: ["storage"],
-    },
-    background: `(${background})(${checkGet})`,
-  });
+add_task(function* test_backgroundScript() {
+  let extension = ExtensionTestUtils.loadExtension(extensionData);
 
   yield extension.startup();
-  yield extension.awaitMessage("ready");
 
-  extension.sendMessage("set-initial");
-  yield extension.awaitMessage("set-initial-done");
+  yield extension.awaitMessage("invalidate");
 
   Services.obs.notifyObservers(null, "extension-invalidate-storage-cache", "");
 
-  extension.sendMessage("check");
-  yield extension.awaitMessage("check-done");
-
-  yield extension.unload();
-});
+  extension.sendMessage("invalidated");
 
-add_task(function* test_config_flag_needed() {
-  function background() {
-    let promises = [];
-    let apiTests = [
-      {method: "get", args: ["foo"]},
-      {method: "set", args: [{foo: "bar"}]},
-      {method: "remove", args: ["foo"]},
-      {method: "clear", args: []},
-    ];
-    apiTests.forEach(testDef => {
-      const test = browser.storage.sync[testDef.method](...testDef.args).then(() => {
-        browser.test.fail("didn't fail with extension.storage.sync.enabled = false");
-        return Promise.reject();
-      }).catch(error => {
-        browser.test.assertEq("Please set webextensions.storage.sync.enabled to " +
-                              "true in about:config", error.message,
-                              `storage.sync.${testDef.method} is behind a flag`);
-        return Promise.resolve();
-      });
-      promises.push(test);
-    });
-
-    Promise.all(promises).then(() => browser.test.notifyPass("flag needed"));
-  }
-
-  ok(!Preferences.get(STORAGE_SYNC_PREF));
-  let extension = ExtensionTestUtils.loadExtension({
-    manifest: {
-      permissions: ["storage"],
-    },
-    background: `(${background})(${checkGet})`,
-  });
-
-  yield extension.startup();
-  yield extension.awaitFinish("flag needed");
+  yield extension.awaitFinish("storage");
   yield extension.unload();
 });
-
-add_task(function* test_reloading_extensions_works() {
-  // Just some random extension ID that we can re-use
-  const extensionId = "my-extension-id@1";
-
-  function loadExtension() {
-    function background() {
-      browser.storage.sync.set({"a": "b"}).then(() => {
-        browser.test.notifyPass("set-works");
-      });
-    }
-
-    return ExtensionTestUtils.loadExtension({
-      manifest: {
-        permissions: ["storage"],
-      },
-      background: `(${background})()`,
-    }, extensionId);
-  }
-
-  Preferences.set(STORAGE_SYNC_PREF, true);
-
-  let extension1 = loadExtension();
-
-  yield extension1.startup();
-  yield extension1.awaitFinish("set-works");
-  yield extension1.unload();
-
-  let extension2 = loadExtension();
-
-  yield extension2.startup();
-  yield extension2.awaitFinish("set-works");
-  yield extension2.unload();
-
-  Preferences.reset(STORAGE_SYNC_PREF);
-});
-
-do_register_cleanup(() => {
-  Preferences.reset(STORAGE_SYNC_PREF);
-});
-
-add_task(function* test_backgroundScript() {
-  function backgroundScript(checkGet) {
-    let globalChanges, gResolve;
-    function clearGlobalChanges() {
-      globalChanges = new Promise(resolve => { gResolve = resolve; });
-    }
-    clearGlobalChanges();
-    let expectedAreaName;
-
-    browser.storage.onChanged.addListener((changes, areaName) => {
-      browser.test.assertEq(expectedAreaName, areaName,
-        "Expected area name received by listener");
-      gResolve(changes);
-    });
-
-    function checkChanges(areaName, changes, message) {
-      function checkSub(obj1, obj2) {
-        for (let prop in obj1) {
-          browser.test.assertTrue(obj1[prop] !== undefined,
-                                  `checkChanges ${areaName} ${prop} is missing (${message})`);
-          browser.test.assertTrue(obj2[prop] !== undefined,
-                                  `checkChanges ${areaName} ${prop} is missing (${message})`);
-          browser.test.assertEq(obj1[prop].oldValue, obj2[prop].oldValue,
-                                `checkChanges ${areaName} ${prop} old (${message})`);
-          browser.test.assertEq(obj1[prop].newValue, obj2[prop].newValue,
-                                `checkChanges ${areaName} ${prop} new (${message})`);
-        }
-      }
-
-      return globalChanges.then(recentChanges => {
-        checkSub(changes, recentChanges);
-        checkSub(recentChanges, changes);
-        clearGlobalChanges();
-      });
-    }
-
-    /* eslint-disable dot-notation */
-    function runTests(areaName) {
-      expectedAreaName = areaName;
-      let storage = browser.storage[areaName];
-      // Set some data and then test getters.
-      return storage.set({"test-prop1": "value1", "test-prop2": "value2"}).then(() => {
-        return checkChanges(areaName,
-          {"test-prop1": {newValue: "value1"}, "test-prop2": {newValue: "value2"}},
-          "set (a)");
-      }).then(() => {
-        return checkGet(areaName, "test-prop1", "value1");
-      }).then(() => {
-        return checkGet(areaName, "test-prop2", "value2");
-      }).then(() => {
-        return storage.get({"test-prop1": undefined, "test-prop2": undefined, "other": "default"});
-      }).then(data => {
-        browser.test.assertEq("value1", data["test-prop1"], "prop1 correct (a)");
-        browser.test.assertEq("value2", data["test-prop2"], "prop2 correct (a)");
-        browser.test.assertEq("default", data["other"], "other correct");
-        return storage.get(["test-prop1", "test-prop2", "other"]);
-      }).then(data => {
-        browser.test.assertEq("value1", data["test-prop1"], "prop1 correct (b)");
-        browser.test.assertEq("value2", data["test-prop2"], "prop2 correct (b)");
-        browser.test.assertFalse("other" in data, "other correct");
-
-        // Remove data in various ways.
-      }).then(() => {
-        return storage.remove("test-prop1");
-      }).then(() => {
-        return checkChanges(areaName, {"test-prop1": {oldValue: "value1"}}, "remove string");
-      }).then(() => {
-        return storage.get(["test-prop1", "test-prop2"]);
-      }).then(data => {
-        browser.test.assertFalse("test-prop1" in data, "prop1 absent (remove string)");
-        browser.test.assertTrue("test-prop2" in data, "prop2 present (remove string)");
-
-        return storage.set({"test-prop1": "value1"});
-      }).then(() => {
-        return checkChanges(areaName, {"test-prop1": {newValue: "value1"}}, "set (c)");
-      }).then(() => {
-        return storage.get(["test-prop1", "test-prop2"]);
-      }).then(data => {
-        browser.test.assertEq(data["test-prop1"], "value1", "prop1 correct (c)");
-        browser.test.assertEq(data["test-prop2"], "value2", "prop2 correct (c)");
-      }).then(() => {
-        return storage.remove(["test-prop1", "test-prop2"]);
-      }).then(() => {
-        return checkChanges(areaName,
-          {"test-prop1": {oldValue: "value1"}, "test-prop2": {oldValue: "value2"}},
-          "remove array");
-      }).then(() => {
-        return storage.get(["test-prop1", "test-prop2"]);
-      }).then(data => {
-        browser.test.assertFalse("test-prop1" in data, "prop1 absent (remove array)");
-        browser.test.assertFalse("test-prop2" in data, "prop2 absent (remove array)");
-
-        // test storage.clear
-      }).then(() => {
-        return storage.set({"test-prop1": "value1", "test-prop2": "value2"});
-      }).then(() => {
-        // Make sure that set() handler happened before we clear the
-        // promise again.
-        return globalChanges;
-      }).then(() => {
-        clearGlobalChanges();
-        return storage.clear();
-      }).then(() => {
-        return checkChanges(areaName,
-          {"test-prop1": {oldValue: "value1"}, "test-prop2": {oldValue: "value2"}},
-          "clear");
-      }).then(() => {
-        return storage.get(["test-prop1", "test-prop2"]);
-      }).then(data => {
-        browser.test.assertFalse("test-prop1" in data, "prop1 absent (clear)");
-        browser.test.assertFalse("test-prop2" in data, "prop2 absent (clear)");
-
-        // Make sure we can store complex JSON data.
-      }).then(() => {
-        // known previous values
-        return storage.set({"test-prop1": "value1", "test-prop2": "value2"});
-      }).then(() => {
-        // Make sure the set() handler landed.
-        return globalChanges;
-      }).then(() => {
-        clearGlobalChanges();
-        return storage.set({
-          "test-prop1": {
-            str: "hello",
-            bool: true,
-            null: null,
-            undef: undefined,
-            obj: {},
-            arr: [1, 2],
-            date: new Date(0),
-            regexp: /regexp/,
-            func: function func() {},
-            window,
-          },
-        });
-      }).then(() => {
-        return storage.set({"test-prop2": function func() {}});
-      }).then(() => globalChanges).then(recentChanges => {
-        browser.test.assertEq("value1", recentChanges["test-prop1"].oldValue, "oldValue correct");
-        browser.test.assertEq("object", typeof(recentChanges["test-prop1"].newValue), "newValue is obj");
-        clearGlobalChanges();
-        return storage.get({"test-prop1": undefined, "test-prop2": undefined});
-      }).then(data => {
-        let obj = data["test-prop1"];
-
-        browser.test.assertEq("hello", obj.str, "string part correct");
-        browser.test.assertEq(true, obj.bool, "bool part correct");
-        browser.test.assertEq(null, obj.null, "null part correct");
-        browser.test.assertEq(undefined, obj.undef, "undefined part correct");
-        browser.test.assertEq(undefined, obj.func, "function part correct");
-        browser.test.assertEq(undefined, obj.window, "window part correct");
-        browser.test.assertEq("1970-01-01T00:00:00.000Z", obj.date, "date part correct");
-        browser.test.assertEq("/regexp/", obj.regexp, "regexp part correct");
-        browser.test.assertEq("object", typeof(obj.obj), "object part correct");
-        browser.test.assertTrue(Array.isArray(obj.arr), "array part present");
-        browser.test.assertEq(1, obj.arr[0], "arr[0] part correct");
-        browser.test.assertEq(2, obj.arr[1], "arr[1] part correct");
-        browser.test.assertEq(2, obj.arr.length, "arr.length part correct");
-
-        obj = data["test-prop2"];
-
-        browser.test.assertEq("[object Object]", {}.toString.call(obj), "function serialized as a plain object");
-        browser.test.assertEq(0, Object.keys(obj).length, "function serialized as an empty object");
-      }).catch(e => {
-        browser.test.fail(`Error: ${e} :: ${e.stack}`);
-        browser.test.notifyFail("storage");
-      });
-    }
-
-    browser.test.onMessage.addListener(msg => {
-      let promise;
-      if (msg === "test-local") {
-        promise = runTests("local");
-      } else if (msg === "test-sync") {
-        promise = runTests("sync");
-      }
-      promise.then(() => browser.test.sendMessage("test-finished"));
-    });
-
-    browser.test.sendMessage("ready");
-  }
-
-  let extensionData = {
-    background: `(${backgroundScript})(${checkGet})`,
-    manifest: {
-      permissions: ["storage"],
-    },
-  };
-
-  Preferences.set(STORAGE_SYNC_PREF, true);
-
-  let extension = ExtensionTestUtils.loadExtension(extensionData);
-  yield extension.startup();
-  yield extension.awaitMessage("ready");
-
-  extension.sendMessage("test-local");
-  yield extension.awaitMessage("test-finished");
-
-  extension.sendMessage("test-sync");
-  yield extension.awaitMessage("test-finished");
-
-  Preferences.reset(STORAGE_SYNC_PREF);
-  yield extension.unload();
-});
deleted file mode 100644
--- a/toolkit/components/extensions/test/xpcshell/test_ext_storage_sync.js
+++ /dev/null
@@ -1,927 +0,0 @@
-/* Any copyright is dedicated to the Public Domain.
- * http://creativecommons.org/publicdomain/zero/1.0/ */
-
-"use strict";
-
-do_get_profile();   // so we can use FxAccounts
-
-Cu.import("resource://testing-common/httpd.js");
-Cu.import("resource://services-common/utils.js");
-Cu.import("resource://gre/modules/ExtensionStorageSync.jsm");
-const {
-  CollectionKeyEncryptionRemoteTransformer,
-  cryptoCollection,
-  idToKey,
-  extensionIdToCollectionId,
-  keyToId,
-} = Cu.import("resource://gre/modules/ExtensionStorageSync.jsm");
-Cu.import("resource://services-sync/engines/extension-storage.js");
-Cu.import("resource://services-sync/keys.js");
-Cu.import("resource://services-sync/util.js");
-
-/* globals BulkKeyBundle, CommonUtils, EncryptionRemoteTransformer */
-/* globals KeyRingEncryptionRemoteTransformer */
-/* globals Utils */
-
-function handleCannedResponse(cannedResponse, request, response) {
-  response.setStatusLine(null, cannedResponse.status.status,
-                         cannedResponse.status.statusText);
-  // send the headers
-  for (let headerLine of cannedResponse.sampleHeaders) {
-    let headerElements = headerLine.split(":");
-    response.setHeader(headerElements[0], headerElements[1].trimLeft());
-  }
-  response.setHeader("Date", (new Date()).toUTCString());
-
-  response.write(cannedResponse.responseBody);
-}
-
-function collectionRecordsPath(collectionId) {
-  return `/buckets/default/collections/${collectionId}/records`;
-}
-
-class KintoServer {
-  constructor() {
-    // Set up an HTTP Server
-    this.httpServer = new HttpServer();
-    this.httpServer.start(-1);
-
-    // Map<CollectionId, Set<Object>> corresponding to the data in the
-    // Kinto server
-    this.collections = new Map();
-
-    // ETag to serve with responses
-    this.etag = 1;
-
-    this.port = this.httpServer.identity.primaryPort;
-    // POST requests we receive from the client go here
-    this.posts = [];
-    // DELETEd buckets will go here.
-    this.deletedBuckets = [];
-    // Anything in here will force the next POST to generate a conflict
-    this.conflicts = [];
-
-    this.installConfigPath();
-    this.installBatchPath();
-    this.installCatchAll();
-  }
-
-  clearPosts() {
-    this.posts = [];
-  }
-
-  getPosts() {
-    return this.posts;
-  }
-
-  getDeletedBuckets() {
-    return this.deletedBuckets;
-  }
-
-  installConfigPath() {
-    const configPath = "/v1/";
-    const responseBody = JSON.stringify({
-      "settings": {"batch_max_requests": 25},
-      "url": `http://localhost:${this.port}/v1/`,
-      "documentation": "https://kinto.readthedocs.org/",
-      "version": "1.5.1",
-      "commit": "cbc6f58",
-      "hello": "kinto",
-    });
-    const configResponse = {
-      "sampleHeaders": [
-        "Access-Control-Allow-Origin: *",
-        "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
-        "Content-Type: application/json; charset=UTF-8",
-        "Server: waitress",
-      ],
-      "status": {status: 200, statusText: "OK"},
-      "responseBody": responseBody,
-    };
-
-    function handleGetConfig(request, response) {
-      if (request.method != "GET") {
-        dump(`ARGH, got ${request.method}\n`);
-      }
-      return handleCannedResponse(configResponse, request, response);
-    }
-
-    this.httpServer.registerPathHandler(configPath, handleGetConfig);
-  }
-
-  installBatchPath() {
-    const batchPath = "/v1/batch";
-
-    function handlePost(request, response) {
-      let bodyStr = CommonUtils.readBytesFromInputStream(request.bodyInputStream);
-      let body = JSON.parse(bodyStr);
-      let defaults = body.defaults;
-      for (let req of body.requests) {
-        let headers = Object.assign({}, defaults && defaults.headers || {}, req.headers);
-        // FIXME: assert auth is "Bearer ...token..."
-        this.posts.push(Object.assign({}, req, {headers}));
-      }
-
-      response.setStatusLine(null, 200, "OK");
-      response.setHeader("Content-Type", "application/json; charset=UTF-8");
-      response.setHeader("Date", (new Date()).toUTCString());
-
-      let postResponse = {
-        responses: body.requests.map(req => {
-          return {
-            path: req.path,
-            status: 201,   // FIXME -- only for new posts??
-            headers: {"ETag": 3000},   // FIXME???
-            body: {"data": Object.assign({}, req.body.data, {last_modified: this.etag}),
-                   "permissions": []},
-          };
-        }),
-      };
-
-      if (this.conflicts.length > 0) {
-        const {collectionId, encrypted} = this.conflicts.shift();
-        this.collections.get(collectionId).add(encrypted);
-        dump(`responding with etag ${this.etag}\n`);
-        postResponse = {
-          responses: body.requests.map(req => {
-            return {
-              path: req.path,
-              status: 412,
-              headers: {"ETag": this.etag}, // is this correct??
-              body: {
-                details: {
-                  existing: encrypted,
-                },
-              },
-            };
-          }),
-        };
-      }
-
-      response.write(JSON.stringify(postResponse));
-
-      //   "sampleHeaders": [
-      //     "Access-Control-Allow-Origin: *",
-      //     "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
-      //     "Server: waitress",
-      //     "Etag: \"4000\""
-      //   ],
-    }
-
-    this.httpServer.registerPathHandler(batchPath, handlePost.bind(this));
-  }
-
-  installCatchAll() {
-    this.httpServer.registerPathHandler("/", (request, response) => {
-      dump(`got request: ${request.method}:${request.path}?${request.queryString}\n`);
-      dump(`${CommonUtils.readBytesFromInputStream(request.bodyInputStream)}\n`);
-    });
-  }
-
-  installCollection(collectionId) {
-    this.collections.set(collectionId, new Set());
-
-    const remoteRecordsPath = "/v1" + collectionRecordsPath(encodeURIComponent(collectionId));
-
-    function handleGetRecords(request, response) {
-      if (request.method != "GET") {
-        do_throw(`only GET is supported on ${remoteRecordsPath}`);
-      }
-
-      response.setStatusLine(null, 200, "OK");
-      response.setHeader("Content-Type", "application/json; charset=UTF-8");
-      response.setHeader("Date", (new Date()).toUTCString());
-      response.setHeader("ETag", this.etag.toString());
-
-      const records = this.collections.get(collectionId);
-      // Can't JSON a Set directly, so convert to Array
-      const data = Array.from(records);
-      for (const record of records) {
-        if (record._onlyOnce) {
-          records.delete(record);
-        }
-      }
-
-      const body = JSON.stringify({
-        "data": data,
-      });
-      response.write(body);
-    }
-
-    this.httpServer.registerPathHandler(remoteRecordsPath, handleGetRecords.bind(this));
-  }
-
-  installDeleteBucket() {
-    this.httpServer.registerPrefixHandler("/v1/buckets/", (request, response) => {
-      if (request.method != "DELETE") {
-        dump(`got a non-delete action on bucket: ${request.method} ${request.path}\n`);
-        return;
-      }
-
-      const noPrefix = request.path.slice("/v1/buckets/".length);
-      const [bucket, afterBucket] = noPrefix.split("/", 1);
-      if (afterBucket && afterBucket != "") {
-        dump(`got a delete for a non-bucket: ${request.method} ${request.path}\n`);
-      }
-
-      this.deletedBuckets.push(bucket);
-      // Fake like this actually deletes the records.
-      for (const [, set] of this.collections) {
-        set.clear();
-      }
-
-      response.write(JSON.stringify({
-        data: {
-          deleted: true,
-          last_modified: 1475161309026,
-          id: "b09f1618-d789-302d-696e-74ec53ee18a8", // FIXME
-        },
-      }));
-    });
-  }
-
-  // Utility function to install a keyring at the start of a test.
-  installKeyRing(keysData, etag, {conflict = false} = {}) {
-    this.installCollection("storage-sync-crypto");
-    const keysRecord = {
-      "id": "keys",
-      "keys": keysData,
-      "last_modified": etag,
-    };
-    this.etag = etag;
-    const methodName = conflict ? "encryptAndAddRecordWithConflict" : "encryptAndAddRecord";
-    this[methodName](new KeyRingEncryptionRemoteTransformer(),
-                     "storage-sync-crypto", keysRecord);
-  }
-
-  encryptAndAddRecord(transformer, collectionId, record) {
-    return transformer.encode(record).then(encrypted => {
-      this.collections.get(collectionId).add(encrypted);
-    });
-  }
-
-  // Like encryptAndAddRecord, but add a flag that will only serve
-  // this record once.
-  //
-  // Since in real life, Kinto only serves a record as part of a changes feed
-  // once, this can be useful for testing complicated syncing logic.
-  encryptAndAddRecordOnlyOnce(transformer, collectionId, record) {
-    return transformer.encode(record).then(encrypted => {
-      encrypted._onlyOnce = true;
-      this.collections.get(collectionId).add(encrypted);
-    });
-  }
-
-  // Conflicts block the next push and then appear in the collection specified.
-  encryptAndAddRecordWithConflict(transformer, collectionId, record) {
-    return transformer.encode(record).then(encrypted => {
-      this.conflicts.push({collectionId, encrypted});
-    });
-  }
-
-  clearCollection(collectionId) {
-    this.collections.get(collectionId).clear();
-  }
-
-  stop() {
-    this.httpServer.stop(() => { });
-  }
-}
-
-// Run a block of code with access to a KintoServer.
-function* withServer(f) {
-  let server = new KintoServer();
-  // Point the sync.storage client to use the test server we've just started.
-  Services.prefs.setCharPref("webextensions.storage.sync.serverURL",
-                             `http://localhost:${server.port}/v1`);
-  try {
-    yield* f(server);
-  } finally {
-    server.stop();
-  }
-}
-
-// Run a block of code with access to both a sync context and a
-// KintoServer. This is meant as a workaround for eslint's refusal to
-// let me have 5 nested callbacks.
-function* withContextAndServer(f) {
-  yield* withSyncContext(function* (context) {
-    yield* withServer(function* (server) {
-      yield* f(context, server);
-    });
-  });
-}
-
-// Run a block of code with fxa mocked out to return a specific user.
-function* withSignedInUser(user, f) {
-  const oldESSFxAccounts = ExtensionStorageSync._fxaService;
-  const oldERTFxAccounts = EncryptionRemoteTransformer.prototype._fxaService;
-  ExtensionStorageSync._fxaService = EncryptionRemoteTransformer.prototype._fxaService = {
-    getSignedInUser() {
-      return Promise.resolve(user);
-    },
-    getOAuthToken() {
-      return Promise.resolve("some-access-token");
-    },
-    sessionStatus() {
-      return Promise.resolve(true);
-    },
-  };
-
-  try {
-    yield* f();
-  } finally {
-    ExtensionStorageSync._fxaService = oldESSFxAccounts;
-    EncryptionRemoteTransformer.prototype._fxaService = oldERTFxAccounts;
-  }
-}
-
-// Some assertions that make it easier to write tests about what was
-// posted and when.
-
-// Assert that the request was made with the correct access token.
-// This should be true of all requests, so this is usually called from
-// another assertion.
-function assertAuthenticatedRequest(post) {
-  equal(post.headers.Authorization, "Bearer some-access-token");
-}
-
-// Assert that this post was made with the correct request headers to
-// create a new resource while protecting against someone else
-// creating it at the same time (in other words, "If-None-Match: *").
-// Also calls assertAuthenticatedRequest(post).
-function assertPostedNewRecord(post) {
-  assertAuthenticatedRequest(post);
-  equal(post.headers["If-None-Match"], "*");
-}
-
-// Assert that this post was made with the correct request headers to
-// update an existing resource while protecting against concurrent
-// modification (in other words, `If-Match: "${etag}"`).
-// Also calls assertAuthenticatedRequest(post).
-function assertPostedUpdatedRecord(post, since) {
-  assertAuthenticatedRequest(post);
-  equal(post.headers["If-Match"], `"${since}"`);
-}
-
-// Assert that this post was an encrypted keyring, and produce the
-// decrypted body. Sanity check the body while we're here.
-const assertPostedEncryptedKeys = Task.async(function* (post) {
-  equal(post.path, collectionRecordsPath("storage-sync-crypto") + "/keys");
-
-  let body = yield new KeyRingEncryptionRemoteTransformer().decode(post.body.data);
-  ok(body.keys, `keys object should be present in decoded body`);
-  ok(body.keys.default, `keys object should have a default key`);
-  return body;
-});
-
-// assertEqual, but for keyring[extensionId] == key.
-function assertKeyRingKey(keyRing, extensionId, expectedKey, message) {
-  if (!message) {
-    message = `expected keyring's key for ${extensionId} to match ${expectedKey.keyPairB64}`;
-  }
-  ok(keyRing.hasKeysFor([extensionId]),
-     `expected keyring to have a key for ${extensionId}\n`);
-  deepEqual(keyRing.keyForCollection(extensionId).keyPairB64, expectedKey.keyPairB64,
-            message);
-}
-
-// Tests using this ID will share keys in local storage, so be careful.
-const extensionId = "{13bdde76-4dc7-11e6-9bdc-54ee758d6342}";
-const extension = {id: extensionId};
-
-const BORING_KB = "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef";
-const ANOTHER_KB = "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcde0";
-const loggedInUser = {
-  uid: "0123456789abcdef0123456789abcdef",
-  kB: BORING_KB,
-  oauthTokens: {
-    "sync:addon-storage": {
-      token: "some-access-token",
-    },
-  },
-};
-const collectionId = extensionIdToCollectionId(loggedInUser, extensionId);
-
-function uuid() {
-  const uuidgen = Cc["@mozilla.org/uuid-generator;1"].getService(Ci.nsIUUIDGenerator);
-  return uuidgen.generateUUID();
-}
-
-add_task(function* test_key_to_id() {
-  equal(keyToId("foo"), "key-foo");
-  equal(keyToId("my-new-key"), "key-my_2D_new_2D_key");
-  equal(keyToId(""), "key-");
-  equal(keyToId("™"), "key-_2122_");
-  equal(keyToId("\b"), "key-_8_");
-  equal(keyToId("abc\ndef"), "key-abc_A_def");
-  equal(keyToId("Kinto's fancy_string"), "key-Kinto_27_s_20_fancy_5F_string");
-
-  const KEYS = ["foo", "my-new-key", "", "Kinto's fancy_string", "™", "\b"];
-  for (let key of KEYS) {
-    equal(idToKey(keyToId(key)), key);
-  }
-
-  equal(idToKey("hi"), null);
-  equal(idToKey("-key-hi"), null);
-  equal(idToKey("key--abcd"), null);
-  equal(idToKey("key-%"), null);
-  equal(idToKey("key-_HI"), null);
-  equal(idToKey("key-_HI_"), null);
-  equal(idToKey("key-"), "");
-  equal(idToKey("key-1"), "1");
-  equal(idToKey("key-_2D_"), "-");
-});
-
-add_task(function* test_extension_id_to_collection_id() {
-  const newKBUser = Object.assign(loggedInUser, {kB: ANOTHER_KB});
-  const extensionId = "{9419cce6-5435-11e6-84bf-54ee758d6342}";
-  const extensionId2 = "{9419cce6-5435-11e6-84bf-54ee758d6343}";
-
-  // "random" 32-char hex userid
-  equal(extensionIdToCollectionId(loggedInUser, extensionId),
-        "abf4e257dad0c89027f8f25bd196d4d69c100df375655a0c49f4cea7b791ea7d");
-  equal(extensionIdToCollectionId(loggedInUser, extensionId),
-        extensionIdToCollectionId(newKBUser, extensionId));
-  equal(extensionIdToCollectionId(loggedInUser, extensionId2),
-        "6584b0153336fb274912b31a3225c15a92b703cdc3adfe1917c1aa43122a52b8");
-});
-
-add_task(function* ensureKeysFor_posts_new_keys() {
-  const extensionId = uuid();
-  yield* withContextAndServer(function* (context, server) {
-    yield* withSignedInUser(loggedInUser, function* () {
-      server.installCollection("storage-sync-crypto");
-      server.etag = 1000;
-
-      // Prompt ExtensionStorageSync to initialize crypto
-      yield ExtensionStorageSync.get({id: extensionId}, "random-key", context);
-
-      let newKeys = yield ExtensionStorageSync.ensureKeysFor([extensionId]);
-      ok(newKeys.hasKeysFor([extensionId]), `key isn't present for ${extensionId}`);
-
-      let posts = server.getPosts();
-      equal(posts.length, 1);
-      const post = posts[0];
-      assertPostedNewRecord(post);
-      const body = yield assertPostedEncryptedKeys(post);
-      ok(body.keys.collections[extensionId], `keys object should have a key for ${extensionId}`);
-    });
-  });
-});
-
-add_task(function* ensureKeysFor_pulls_key() {
-  // ensureKeysFor is implemented by adding a key to our local record
-  // and doing a sync. This means that if the same key exists
-  // remotely, we get a "conflict". Ensure that we handle this
-  // correctly -- we keep the server key (since presumably it's
-  // already been used to encrypt records) and we don't wipe out other
-  // collections' keys.
-  const extensionId = uuid();
-  const extensionId2 = uuid();
-  const DEFAULT_KEY = new BulkKeyBundle("[default]");
-  DEFAULT_KEY.generateRandom();
-  const RANDOM_KEY = new BulkKeyBundle(extensionId);
-  RANDOM_KEY.generateRandom();
-  yield* withContextAndServer(function* (context, server) {
-    yield* withSignedInUser(loggedInUser, function* () {
-      const keysData = {
-        "default": DEFAULT_KEY.keyPairB64,
-        "collections": {
-          [extensionId]: RANDOM_KEY.keyPairB64,
-        },
-      };
-      server.installKeyRing(keysData, 999);
-
-      // Prompt ExtensionStorageSync to initialize crypto
-      yield ExtensionStorageSync.get({id: extensionId}, "random-key", context);
-
-      let collectionKeys = yield ExtensionStorageSync.ensureKeysFor([extensionId]);
-      assertKeyRingKey(collectionKeys, extensionId, RANDOM_KEY);
-
-      let posts = server.getPosts();
-      equal(posts.length, 0,
-            "ensureKeysFor shouldn't push when the server keyring has the right key");
-
-      // Another client generates a key for extensionId2
-      const newKey = new BulkKeyBundle(extensionId2);
-      newKey.generateRandom();
-      keysData.collections[extensionId2] = newKey.keyPairB64;
-      server.clearCollection("storage-sync-crypto");
-      server.installKeyRing(keysData, 1000);
-
-      let newCollectionKeys = yield ExtensionStorageSync.ensureKeysFor([extensionId, extensionId2]);
-      assertKeyRingKey(newCollectionKeys, extensionId2, newKey);
-      assertKeyRingKey(newCollectionKeys, extensionId, RANDOM_KEY,
-                       `ensureKeysFor shouldn't lose the old key for ${extensionId}`);
-
-      posts = server.getPosts();
-      equal(posts.length, 0, "ensureKeysFor shouldn't push when updating keys");
-    });
-  });
-});
-
-add_task(function* ensureKeysFor_handles_conflicts() {
-  // Syncing is done through a pull followed by a push of any merged
-  // changes. Accordingly, the only way to have a "true" conflict --
-  // i.e. with the server rejecting a change -- is if
-  // someone pushes changes between our pull and our push. Ensure that
-  // if this happens, we still behave sensibly (keep the remote key).
-  const extensionId = uuid();
-  const DEFAULT_KEY = new BulkKeyBundle("[default]");
-  DEFAULT_KEY.generateRandom();
-  const RANDOM_KEY = new BulkKeyBundle(extensionId);
-  RANDOM_KEY.generateRandom();
-  yield* withContextAndServer(function* (context, server) {
-    yield* withSignedInUser(loggedInUser, function* () {
-      const keysData = {
-        "default": DEFAULT_KEY.keyPairB64,
-        "collections": {
-          [extensionId]: RANDOM_KEY.keyPairB64,
-        },
-      };
-      server.installKeyRing(keysData, 765, {conflict: true});
-
-      // Prompt ExtensionStorageSync to initialize crypto
-      yield ExtensionStorageSync.get({id: extensionId}, "random-key", context);
-      yield cryptoCollection._clear();
-
-      let collectionKeys = yield ExtensionStorageSync.ensureKeysFor([extensionId]);
-      assertKeyRingKey(collectionKeys, extensionId, RANDOM_KEY,
-                       `syncing keyring should keep the server key for ${extensionId}`);
-
-      let posts = server.getPosts();
-      equal(posts.length, 1,
-            "syncing keyring should have tried to post a keyring");
-      const failedPost = posts[0];
-      assertPostedNewRecord(failedPost);
-      let body = yield assertPostedEncryptedKeys(failedPost);
-      // This key will be the one the client generated locally, so
-      // we don't know what its value will be
-      ok(body.keys.collections[extensionId],
-         `decrypted failed post should have a key for ${extensionId}`);
-      notEqual(body.keys.collections[extensionId], RANDOM_KEY.keyPairB64,
-               `decrypted failed post should have a randomly-generated key for ${extensionId}`);
-    });
-  });
-});
-
-add_task(function* checkSyncKeyRing_reuploads_keys() {
-  // Verify that when keys are present, they are reuploaded with the
-  // new kB when we call touchKeys().
-  const extensionId = uuid();
-  let extensionKey;
-  yield* withContextAndServer(function* (context, server) {
-    yield* withSignedInUser(loggedInUser, function* () {
-      server.installCollection("storage-sync-crypto");
-      server.etag = 765;
-
-      // Prompt ExtensionStorageSync to initialize crypto
-      yield ExtensionStorageSync.get({id: extensionId}, "random-key", context);
-      yield cryptoCollection._clear();
-
-      // Do an `ensureKeysFor` to generate some keys.
-      let collectionKeys = yield ExtensionStorageSync.ensureKeysFor([extensionId]);
-      ok(collectionKeys.hasKeysFor([extensionId]),
-         `ensureKeysFor should return a keyring that has a key for ${extensionId}`);
-      extensionKey = collectionKeys.keyForCollection(extensionId).keyPairB64;
-      equal(server.getPosts().length, 1,
-            "generating a key that doesn't exist on the server should post it");
-    });
-
-    // The user changes their password. This is their new kB, with
-    // the last f changed to an e.
-    const NOVEL_KB = "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdee";
-    const newUser = Object.assign({}, loggedInUser, {kB: NOVEL_KB});
-    let postedKeys;
-    yield* withSignedInUser(newUser, function* () {
-      yield ExtensionStorageSync.checkSyncKeyRing();
-
-      let posts = server.getPosts();
-      equal(posts.length, 2,
-            "when kB changes, checkSyncKeyRing should post the keyring reencrypted with the new kB");
-      postedKeys = posts[1];
-      assertPostedUpdatedRecord(postedKeys, 765);
-
-      let body = yield assertPostedEncryptedKeys(postedKeys);
-      deepEqual(body.keys.collections[extensionId], extensionKey,
-                `the posted keyring should have the same key for ${extensionId} as the old one`);
-    });
-
-    // Verify that with the old kB, we can't decrypt the record.
-    yield* withSignedInUser(loggedInUser, function* () {
-      let error;
-      try {
-        yield new KeyRingEncryptionRemoteTransformer().decode(postedKeys.body.data);
-      } catch (e) {
-        error = e;
-      }
-      ok(error, "decrypting the keyring with the old kB should fail");
-      ok(Utils.isHMACMismatch(error) || KeyRingEncryptionRemoteTransformer.isOutdatedKB(error),
-         "decrypting the keyring with the old kB should throw an HMAC mismatch");
-    });
-  });
-});
-
-add_task(function* checkSyncKeyRing_overwrites_on_conflict() {
-  // If there is already a record on the server that was encrypted
-  // with a different kB, we wipe the server, clear sync state, and
-  // overwrite it with our keys.
-  const extensionId = uuid();
-  const transformer = new KeyRingEncryptionRemoteTransformer();
-  let extensionKey;
-  yield* withSyncContext(function* (context) {
-    yield* withServer(function* (server) {
-      // The old device has this kB, which is very similar to the
-      // current kB but with the last f changed to an e.
-      const NOVEL_KB = "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdee";
-      const oldUser = Object.assign({}, loggedInUser, {kB: NOVEL_KB});
-      server.installCollection("storage-sync-crypto");
-      server.installDeleteBucket();
-      server.etag = 765;
-      yield* withSignedInUser(oldUser, function* () {
-        const FAKE_KEYRING = {
-          id: "keys",
-          keys: {},
-          uuid: "abcd",
-          kbHash: "abcd",
-        };
-        yield server.encryptAndAddRecord(transformer, "storage-sync-crypto", FAKE_KEYRING);
-      });
-
-      // Now we have this new user with a different kB.
-      yield* withSignedInUser(loggedInUser, function* () {
-        // Prompt ExtensionStorageSync to initialize crypto
-        yield ExtensionStorageSync.get({id: extensionId}, "random-key", context);
-        yield cryptoCollection._clear();
-
-        // Do an `ensureKeysFor` to generate some keys.
-        // This will try to sync, notice that the record is
-        // undecryptable, and clear the server.
-        let collectionKeys = yield ExtensionStorageSync.ensureKeysFor([extensionId]);
-        ok(collectionKeys.hasKeysFor([extensionId]),
-           `ensureKeysFor should always return a keyring with a key for ${extensionId}`);
-        extensionKey = collectionKeys.keyForCollection(extensionId).keyPairB64;
-
-        deepEqual(server.getDeletedBuckets(), ["default"],
-                  "Kinto server should have been wiped when keyring was thrown away");
-
-        let posts = server.getPosts();
-        equal(posts.length, 1,
-             "new keyring should have been uploaded");
-        const postedKeys = posts[0];
-        // The POST was to an empty server, so etag shouldn't be respected
-        equal(postedKeys.headers.Authorization, "Bearer some-access-token",
-              "keyring upload should be authorized");
-        equal(postedKeys.headers["If-None-Match"], "*",
-              "keyring upload should be to empty Kinto server");
-        equal(postedKeys.path, collectionRecordsPath("storage-sync-crypto") + "/keys",
-              "keyring upload should be to keyring path");
-
-        let body = yield new KeyRingEncryptionRemoteTransformer().decode(postedKeys.body.data);
-        ok(body.uuid, "new keyring should have a UUID");
-        notEqual(body.uuid, "abcd",
-                 "new keyring should not have the same UUID as previous keyring");
-        ok(body.keys,
-           "new keyring should have a keys attribute");
-        ok(body.keys.default, "new keyring should have a default key");
-        // We should keep the extension key that was in our uploaded version.
-        deepEqual(extensionKey, body.keys.collections[extensionId],
-                  "ensureKeysFor should have returned keyring with the same key that was uploaded");
-
-        // This should be a no-op; the keys were uploaded as part of ensurekeysfor
-        yield ExtensionStorageSync.checkSyncKeyRing();
-        equal(server.getPosts().length, 1,
-              "checkSyncKeyRing should not need to post keys after they were reuploaded");
-      });
-    });
-  });
-});
-
-add_task(function* checkSyncKeyRing_flushes_on_uuid_change() {
-  // If we can decrypt the record, but the UUID has changed, that
-  // means another client has wiped the server and reuploaded a
-  // keyring, so reset sync state and reupload everything.
-  const extensionId = uuid();
-  const extension = {id: extensionId};
-  const collectionId = extensionIdToCollectionId(loggedInUser, extensionId);
-  const transformer = new KeyRingEncryptionRemoteTransformer();
-  yield* withSyncContext(function* (context) {
-    yield* withServer(function* (server) {
-      server.installCollection("storage-sync-crypto");
-      server.installCollection(collectionId);
-      server.installDeleteBucket();
-      yield* withSignedInUser(loggedInUser, function* () {
-        // Prompt ExtensionStorageSync to initialize crypto
-        yield ExtensionStorageSync.get(extension, "random-key", context);
-        yield cryptoCollection._clear();
-
-        // Do an `ensureKeysFor` to get access to keys.
-        let collectionKeys = yield ExtensionStorageSync.ensureKeysFor([extensionId]);
-        ok(collectionKeys.hasKeysFor([extensionId]),
-           `ensureKeysFor should always return a keyring that has a key for ${extensionId}`);
-        const extensionKey = collectionKeys.keyForCollection(extensionId).keyPairB64;
-
-        // Set something to make sure that it gets re-uploaded when
-        // uuid changes.
-        yield ExtensionStorageSync.set(extension, {"my-key": 5}, context);
-        yield ExtensionStorageSync.syncAll();
-
-        let posts = server.getPosts();
-        equal(posts.length, 2,
-              "should have posted a new keyring and an extension datum");
-        const postedKeys = posts[0];
-        equal(postedKeys.path, collectionRecordsPath("storage-sync-crypto") + "/keys",
-              "should have posted keyring to /keys");
-
-        let body = yield transformer.decode(postedKeys.body.data);
-        ok(body.uuid,
-           "keyring should have a UUID");
-        ok(body.keys,
-           "keyring should have a keys attribute");
-        ok(body.keys.default,
-           "keyring should have a default key");
-        deepEqual(extensionKey, body.keys.collections[extensionId],
-                  "new keyring should have the same key that we uploaded");
-
-        // Another client comes along and replaces the UUID.
-        // In real life, this would mean changing the keys too, but
-        // this test verifies that just changing the UUID is enough.
-        const newKeyRingData = Object.assign({}, body, {
-          uuid: "abcd",
-          // Technically, last_modified should be served outside the
-          // object, but the transformer will pass it through in
-          // either direction, so this is OK.
-          last_modified: 765,
-        });
-        server.clearCollection("storage-sync-crypto");
-        server.etag = 765;
-        yield server.encryptAndAddRecordOnlyOnce(transformer, "storage-sync-crypto", newKeyRingData);
-
-        // Fake adding another extension just so that the keyring will
-        // really get synced.
-        const newExtension = uuid();
-        const newKeyRing = yield ExtensionStorageSync.ensureKeysFor([newExtension]);
-
-        // This should have detected the UUID change and flushed everything.
-        // The keyring should, however, be the same, since we just
-        // changed the UUID of the previously POSTed one.
-        deepEqual(newKeyRing.keyForCollection(extensionId).keyPairB64, extensionKey,
-                  "ensureKeysFor should have pulled down a new keyring with the same keys");
-
-        // Syncing should reupload the data for the extension.
-        yield ExtensionStorageSync.syncAll();
-        posts = server.getPosts();
-        equal(posts.length, 4,
-              "should have posted keyring for new extension and reuploaded extension data");
-
-        const finalKeyRingPost = posts[2];
-        const reuploadedPost = posts[3];
-
-        equal(finalKeyRingPost.path, collectionRecordsPath("storage-sync-crypto") + "/keys",
-              "keyring for new extension should have been posted to /keys");
-        let finalKeyRing = yield transformer.decode(finalKeyRingPost.body.data);
-        equal(finalKeyRing.uuid, "abcd",
-              "newly uploaded keyring should preserve UUID from replacement keyring");
-
-        // Confirm that the data got reuploaded
-        equal(reuploadedPost.path, collectionRecordsPath(collectionId) + "/key-my_2D_key",
-              "extension data should be posted to path corresponding to its key");
-        let reuploadedData = yield new CollectionKeyEncryptionRemoteTransformer(extensionId).decode(reuploadedPost.body.data);
-        equal(reuploadedData.key, "my-key",
-              "extension data should have a key attribute corresponding to the extension data key");
-        equal(reuploadedData.data, 5,
-              "extension data should have a data attribute corresponding to the extension data value");
-      });
-    });
-  });
-});
-
-add_task(function* test_storage_sync_pulls_changes() {
-  yield* withContextAndServer(function* (context, server) {
-    yield* withSignedInUser(loggedInUser, function* () {
-      let transformer = new CollectionKeyEncryptionRemoteTransformer(extensionId);
-      server.installCollection(collectionId);
-      server.installCollection("storage-sync-crypto");
-
-      let calls = [];
-      yield ExtensionStorageSync.addOnChangedListener(extension, function() {
-        calls.push(arguments);
-      }, context);
-
-      // This has to happen AFTER invoking EncryptionStorageSync so
-      // that it can set up the crypto keys collection.
-      yield ExtensionStorageSync.ensureKeysFor([extensionId]);
-      yield server.encryptAndAddRecord(transformer, collectionId, {
-        "id": "key-remote_2D_key",
-        "key": "remote-key",
-        "data": 6,
-      });
-
-      yield ExtensionStorageSync.syncAll();
-      const remoteValue = (yield ExtensionStorageSync.get(extension, "remote-key", context))["remote-key"];
-      equal(remoteValue, 6,
-            "ExtensionStorageSync.get() returns value retrieved from sync");
-
-      equal(calls.length, 1,
-            "syncing calls on-changed listener");
-      deepEqual(calls[0][0], {"remote-key": {newValue: 6}});
-      calls = [];
-
-      // Syncing again doesn't do anything
-      yield ExtensionStorageSync.syncAll();
-
-      equal(calls.length, 0,
-            "syncing again shouldn't call on-changed listener");
-
-      // Updating the server causes us to pull down the new value
-      server.etag = 1000;
-      server.clearCollection(collectionId);
-      yield server.encryptAndAddRecord(transformer, collectionId, {
-        "id": "key-remote_2D_key",
-        "key": "remote-key",
-        "data": 7,
-      });
-
-      yield ExtensionStorageSync.syncAll();
-      const remoteValue2 = (yield ExtensionStorageSync.get(extension, "remote-key", context))["remote-key"];
-      equal(remoteValue2, 7,
-            "ExtensionStorageSync.get() returns value updated from sync");
-
-      equal(calls.length, 1,
-            "syncing calls on-changed listener on update");
-      deepEqual(calls[0][0], {"remote-key": {oldValue: 6, newValue: 7}});
-    });
-  });
-});
-
-add_task(function* test_storage_sync_pushes_changes() {
-  yield* withContextAndServer(function* (context, server) {
-    yield* withSignedInUser(loggedInUser, function* () {
-      let transformer = new CollectionKeyEncryptionRemoteTransformer(extensionId);
-      server.installCollection(collectionId);
-      server.installCollection("storage-sync-crypto");
-      server.etag = 1000;
-
-      yield ExtensionStorageSync.set(extension, {"my-key": 5}, context);
-
-      // install this AFTER we set the key to 5...
-      let calls = [];
-      ExtensionStorageSync.addOnChangedListener(extension, function() {
-        calls.push(arguments);
-      }, context);
-
-      yield ExtensionStorageSync.syncAll();
-      const localValue = (yield ExtensionStorageSync.get(extension, "my-key", context))["my-key"];
-      equal(localValue, 5,
-            "pushing an ExtensionStorageSync value shouldn't change local value");
-
-      let posts = server.getPosts();
-      equal(posts.length, 1,
-            "pushing a value should cause a post to the server");
-      const post = posts[0];
-      assertPostedNewRecord(post);
-      equal(post.path, collectionRecordsPath(collectionId) + "/key-my_2D_key",
-            "pushing a value should have a path corresponding to its id");
-
-      const encrypted = post.body.data;
-      ok(encrypted.ciphertext,
-         "pushing a value should post an encrypted record");
-      ok(!encrypted.data,
-         "pushing a value should not have any plaintext data");
-      equal(encrypted.id, "key-my_2D_key",
-            "pushing a value should use a kinto-friendly record ID");
-
-      const record = yield transformer.decode(encrypted);
-      equal(record.key, "my-key",
-            "when decrypted, a pushed value should have a key field corresponding to its storage.sync key");
-      equal(record.data, 5,
-            "when decrypted, a pushed value should have a data field corresponding to its storage.sync value");
-      equal(record.id, "key-my_2D_key",
-            "when decrypted, a pushed value should have an id field corresponding to its record ID");
-
-      equal(calls.length, 0,
-            "pushing a value shouldn't call the on-changed listener");
-
-      yield ExtensionStorageSync.set(extension, {"my-key": 6}, context);
-      yield ExtensionStorageSync.syncAll();
-
-      // Doesn't push keys because keys were pushed by a previous test.
-      posts = server.getPosts();
-      equal(posts.length, 2,
-            "updating a value should trigger another push");
-      const updatePost = posts[1];
-      assertPostedUpdatedRecord(updatePost, 1000);
-      equal(updatePost.path, collectionRecordsPath(collectionId) + "/key-my_2D_key",
-            "pushing an updated value should go to the same path");
-
-      const updateEncrypted = updatePost.body.data;
-      ok(updateEncrypted.ciphertext,
-         "pushing an updated value should still be encrypted");
-      ok(!updateEncrypted.data,
-         "pushing an updated value should not have any plaintext visible");
-      equal(updateEncrypted.id, "key-my_2D_key",
-            "pushing an updated value should maintain the same ID");
-    });
-  });
-});
--- a/toolkit/components/extensions/test/xpcshell/xpcshell.ini
+++ b/toolkit/components/extensions/test/xpcshell/xpcshell.ini
@@ -1,10 +1,10 @@
 [DEFAULT]
-head = head.js head_sync.js
+head = head.js
 tail =
 firefox-appdir = browser
 skip-if = appname == "thunderbird"
 support-files =
   data/**
 tags = webextensions
 
 [test_csp_custom_policies.js]
@@ -52,17 +52,16 @@ skip-if = release_or_beta
 [test_ext_runtime_sendMessage_no_receiver.js]
 [test_ext_runtime_sendMessage_self.js]
 [test_ext_schemas.js]
 [test_ext_schemas_api_injection.js]
 [test_ext_schemas_async.js]
 [test_ext_schemas_allowed_contexts.js]
 [test_ext_simple.js]
 [test_ext_storage.js]
-[test_ext_storage_sync.js]
 [test_ext_topSites.js]
 skip-if = os == "android"
 [test_getAPILevelForWindow.js]
 [test_ext_legacy_extension_context.js]
 [test_ext_legacy_extension_embedding.js]
 [test_locale_converter.js]
 [test_locale_data.js]
 [test_native_messaging.js]