Bug 1210296 part 2 - Update tests. r=kitcambridge,markh,tcsc
authorEdouard Oger <eoger@fastmail.com>
Mon, 05 Jun 2017 18:49:43 -0400
changeset 607439 e4fe9862f23d76849c28283fdc7cb7c3c9f11bfb
parent 607438 c25b27ba5335e3ed15d5aac70b9ee6b9e0c5b4ca
child 607440 11e51e8118a77a9d9ee0ffc656e3c69c4829b3f2
push id67985
push userbmo:emilio+bugs@crisal.io
push dateWed, 12 Jul 2017 08:36:44 +0000
reviewerskitcambridge, markh, tcsc
bugs1210296
milestone56.0a1
Bug 1210296 part 2 - Update tests. r=kitcambridge,markh,tcsc MozReview-Commit-ID: 41dnyvdlNJv
services/sync/modules-testing/fakeservices.js
services/sync/modules-testing/rotaryengine.js
services/sync/tests/unit/head_errorhandler_common.js
services/sync/tests/unit/head_helpers.js
services/sync/tests/unit/test_addons_engine.js
services/sync/tests/unit/test_addons_reconciler.js
services/sync/tests/unit/test_addons_store.js
services/sync/tests/unit/test_addons_tracker.js
services/sync/tests/unit/test_bookmark_batch_fail.js
services/sync/tests/unit/test_bookmark_decline_undecline.js
services/sync/tests/unit/test_bookmark_duping.js
services/sync/tests/unit/test_bookmark_engine.js
services/sync/tests/unit/test_bookmark_invalid.js
services/sync/tests/unit/test_bookmark_livemarks.js
services/sync/tests/unit/test_bookmark_order.js
services/sync/tests/unit/test_bookmark_places_query_rewriting.js
services/sync/tests/unit/test_bookmark_repair.js
services/sync/tests/unit/test_bookmark_repair_requestor.js
services/sync/tests/unit/test_bookmark_repair_responder.js
services/sync/tests/unit/test_bookmark_smart_bookmarks.js
services/sync/tests/unit/test_bookmark_store.js
services/sync/tests/unit/test_bookmark_tracker.js
services/sync/tests/unit/test_clients_engine.js
services/sync/tests/unit/test_clients_escape.js
services/sync/tests/unit/test_collections_recovery.js
services/sync/tests/unit/test_corrupt_keys.js
services/sync/tests/unit/test_declined.js
services/sync/tests/unit/test_doctor.js
services/sync/tests/unit/test_engine.js
services/sync/tests/unit/test_engine_abort.js
services/sync/tests/unit/test_engine_changes_during_sync.js
services/sync/tests/unit/test_enginemanager.js
services/sync/tests/unit/test_errorhandler_1.js
services/sync/tests/unit/test_errorhandler_2.js
services/sync/tests/unit/test_errorhandler_eol.js
services/sync/tests/unit/test_errorhandler_sync_checkServerError.js
services/sync/tests/unit/test_extension_storage_engine.js
services/sync/tests/unit/test_extension_storage_tracker.js
services/sync/tests/unit/test_form_validator.js
services/sync/tests/unit/test_forms_store.js
services/sync/tests/unit/test_forms_tracker.js
services/sync/tests/unit/test_fxa_node_reassignment.js
services/sync/tests/unit/test_history_store.js
services/sync/tests/unit/test_history_tracker.js
services/sync/tests/unit/test_hmac_error.js
services/sync/tests/unit/test_interval_triggers.js
services/sync/tests/unit/test_node_reassignment.js
services/sync/tests/unit/test_password_engine.js
services/sync/tests/unit/test_password_store.js
services/sync/tests/unit/test_password_tracker.js
services/sync/tests/unit/test_password_validator.js
services/sync/tests/unit/test_places_guid_downgrade.js
services/sync/tests/unit/test_postqueue.js
services/sync/tests/unit/test_prefs_store.js
services/sync/tests/unit/test_prefs_tracker.js
services/sync/tests/unit/test_resource_ua.js
services/sync/tests/unit/test_score_triggers.js
services/sync/tests/unit/test_service_detect_upgrade.js
services/sync/tests/unit/test_service_login.js
services/sync/tests/unit/test_service_startOver.js
services/sync/tests/unit/test_service_startup.js
services/sync/tests/unit/test_service_sync_401.js
services/sync/tests/unit/test_service_sync_locked.js
services/sync/tests/unit/test_service_sync_remoteSetup.js
services/sync/tests/unit/test_service_sync_specified.js
services/sync/tests/unit/test_service_sync_updateEnabledEngines.js
services/sync/tests/unit/test_service_verifyLogin.js
services/sync/tests/unit/test_service_wipeClient.js
services/sync/tests/unit/test_service_wipeServer.js
services/sync/tests/unit/test_syncedtabs.js
services/sync/tests/unit/test_syncengine.js
services/sync/tests/unit/test_syncengine_sync.js
services/sync/tests/unit/test_syncscheduler.js
services/sync/tests/unit/test_tab_engine.js
services/sync/tests/unit/test_tab_store.js
services/sync/tests/unit/test_tab_tracker.js
services/sync/tests/unit/test_telemetry.js
services/sync/tests/unit/test_utils_catch.js
services/sync/tests/unit/test_utils_json.js
services/sync/tests/unit/test_utils_lock.js
services/sync/tests/unit/test_utils_notify.js
services/sync/tps/extensions/tps/resource/auth/fxaccounts.jsm
services/sync/tps/extensions/tps/resource/modules/addons.jsm
services/sync/tps/extensions/tps/resource/tps.jsm
--- a/services/sync/modules-testing/fakeservices.js
+++ b/services/sync/modules-testing/fakeservices.js
@@ -30,35 +30,28 @@ this.FakeFilesystemService = function Fa
   // always throw exceptions when the real ones do. Anyway...)
   for (let name of ["jsonSave", "jsonLoad", "jsonMove", "jsonRemove"]) {
     let origName = "_real_" + name;
     if (!Utils[origName]) {
       Utils[origName] = Utils[name];
     }
   }
 
-  Utils.jsonSave = function jsonSave(filePath, that, obj, callback) {
+  Utils.jsonSave = async function jsonSave(filePath, that, obj) {
     let json = typeof obj == "function" ? obj.call(that) : obj;
     self.fakeContents["weave/" + filePath + ".json"] = JSON.stringify(json);
-    if (callback) {
-      callback.call(that);
-    }
-    return Promise.resolve();
   };
 
-  Utils.jsonLoad = function jsonLoad(filePath, that, cb) {
+  Utils.jsonLoad = async function jsonLoad(filePath, that) {
     let obj;
     let json = self.fakeContents["weave/" + filePath + ".json"];
     if (json) {
       obj = JSON.parse(json);
     }
-    if (cb) {
-      cb.call(that, obj);
-    }
-    return Promise.resolve(obj);
+    return obj;
   };
 
   Utils.jsonMove = function jsonMove(aFrom, aTo, that) {
     const fromPath = "weave/" + aFrom + ".json";
     self.fakeContents["weave/" + aTo + ".json"] = self.fakeContents[fromPath];
     delete self.fakeContents[fromPath];
     return Promise.resolve();
   };
--- a/services/sync/modules-testing/rotaryengine.js
+++ b/services/sync/modules-testing/rotaryengine.js
@@ -34,61 +34,61 @@ Utils.deferGetSet(RotaryRecord, "clearte
 
 this.RotaryStore = function RotaryStore(name, engine) {
   Store.call(this, name, engine);
   this.items = {};
 }
 RotaryStore.prototype = {
   __proto__: Store.prototype,
 
-  create: function create(record) {
+  async create(record) {
     this.items[record.id] = record.denomination;
   },
 
-  remove: function remove(record) {
+  async remove(record) {
     delete this.items[record.id];
   },
 
-  update: function update(record) {
+  async update(record) {
     this.items[record.id] = record.denomination;
   },
 
-  itemExists: function itemExists(id) {
+  async itemExists(id) {
     return (id in this.items);
   },
 
-  createRecord: function createRecord(id, collection) {
+  async createRecord(id, collection) {
     let record = new RotaryRecord(collection, id);
 
     if (!(id in this.items)) {
       record.deleted = true;
       return record;
     }
 
     record.denomination = this.items[id] || "Data for new record: " + id;
     return record;
   },
 
-  changeItemID: function changeItemID(oldID, newID) {
+  async changeItemID(oldID, newID) {
     if (oldID in this.items) {
       this.items[newID] = this.items[oldID];
     }
 
     delete this.items[oldID];
   },
 
-  getAllIDs: function getAllIDs() {
+  async getAllIDs() {
     let ids = {};
     for (let id in this.items) {
       ids[id] = true;
     }
     return ids;
   },
 
-  wipe: function wipe() {
+  async wipe() {
     this.items = {};
   }
 };
 
 this.RotaryTracker = function RotaryTracker(name, engine) {
   Tracker.call(this, name, engine);
 }
 RotaryTracker.prototype = {
@@ -104,18 +104,18 @@ this.RotaryEngine = function RotaryEngin
   this.previousFailed = [];
 }
 RotaryEngine.prototype = {
   __proto__: SyncEngine.prototype,
   _storeObj: RotaryStore,
   _trackerObj: RotaryTracker,
   _recordObj: RotaryRecord,
 
-  _findDupe: function _findDupe(item) {
-    // This is a semaphore used for testing proper reconciling on dupe
+  async _findDupe(item) {
+    // This is a Special Value® used for testing proper reconciling on dupe
     // detection.
     if (item.id == "DUPE_INCOMING") {
       return "DUPE_LOCAL";
     }
 
     for (let [id, value] of Object.entries(this._store.items)) {
       if (item.denomination == value) {
         return id;
--- a/services/sync/tests/unit/head_errorhandler_common.js
+++ b/services/sync/tests/unit/head_errorhandler_common.js
@@ -20,17 +20,17 @@ const EHTestsCommon = {
 
   service_unavailable(request, response) {
     let body = "Service Unavailable";
     response.setStatusLine(request.httpVersion, 503, "Service Unavailable");
     response.setHeader("Retry-After", "42");
     response.bodyOutputStream.write(body, body.length);
   },
 
-  sync_httpd_setup() {
+  async sync_httpd_setup() {
     let global = new ServerWBO("global", {
       syncID: Service.syncID,
       storageVersion: STORAGE_VERSION,
       engines: {clients: {version: Service.clientsEngine.version,
                           syncID: Service.clientsEngine.syncID},
                 catapult: {version: Service.engineManager.get("catapult").version,
                            syncID: Service.engineManager.get("catapult").syncID}}
     });
@@ -78,17 +78,17 @@ const EHTestsCommon = {
 
   CatapultEngine: (function() {
     function CatapultEngine() {
       SyncEngine.call(this, "Catapult", Service);
     }
     CatapultEngine.prototype = {
       __proto__: SyncEngine.prototype,
       exception: null, // tests fill this in
-      _sync: function _sync() {
+      async _sync() {
         if (this.exception) {
           throw this.exception;
         }
       }
     };
 
     return CatapultEngine;
   }()),
--- a/services/sync/tests/unit/head_helpers.js
+++ b/services/sync/tests/unit/head_helpers.js
@@ -13,16 +13,23 @@
 Cu.import("resource://services-common/async.js");
 Cu.import("resource://testing-common/services/common/utils.js");
 Cu.import("resource://testing-common/PlacesTestUtils.jsm");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://gre/modules/XPCOMUtils.jsm");
 Cu.import("resource://gre/modules/PlacesUtils.jsm");
 Cu.import("resource://gre/modules/ObjectUtils.jsm");
 
+add_task(async function head_setup() {
+  // If a test imports Service, make sure it is initialized first.
+  if (this.Service) {
+    await this.Service.promiseInitialized;
+  }
+});
+
 // ================================================
 // Load mocking/stubbing library, sinon
 // docs: http://sinonjs.org/releases/v2.3.2/
 Cu.import("resource://gre/modules/Timer.jsm");
 const {Loader} = Cu.import("resource://gre/modules/commonjs/toolkit/loader.js", {});
 const loader = new Loader.Loader({
   paths: {
     "": "resource://testing-common/",
@@ -379,17 +386,17 @@ function wait_for_pings(expectedPings) {
         resolve(pings);
       }
     };
   });
 }
 
 async function wait_for_ping(callback, allowErrorPings, getFullPing = false) {
   let pingsPromise = wait_for_pings(1);
-  callback();
+  await callback();
   let [record] = await pingsPromise;
   if (allowErrorPings) {
     assert_valid_ping(record);
   } else {
     assert_success_ping(record);
   }
   if (getFullPing) {
     return record;
@@ -402,37 +409,37 @@ async function wait_for_ping(callback, a
 function sync_and_validate_telem(allowErrorPings, getFullPing = false) {
   return wait_for_ping(() => Service.sync(), allowErrorPings, getFullPing);
 }
 
 // Used for the (many) cases where we do a 'partial' sync, where only a single
 // engine is actually synced, but we still want to ensure we're generating a
 // valid ping. Returns a promise that resolves to the ping, or rejects with the
 // thrown error after calling an optional callback.
-function sync_engine_and_validate_telem(engine, allowErrorPings, onError) {
-  return new Promise((resolve, reject) => {
-    let telem = get_sync_test_telemetry();
-    let caughtError = null;
-    // Clear out status, so failures from previous syncs won't show up in the
-    // telemetry ping.
-    let ns = {};
-    Cu.import("resource://services-sync/status.js", ns);
-    ns.Status._engines = {};
-    ns.Status.partial = false;
-    // Ideally we'd clear these out like we do with engines, (probably via
-    // Status.resetSync()), but this causes *numerous* tests to fail, so we just
-    // assume that if no failureReason or engine failures are set, and the
-    // status properties are the same as they were initially, that it's just
-    // a leftover.
-    // This is only an issue since we're triggering the sync of just one engine,
-    // without doing any other parts of the sync.
-    let initialServiceStatus = ns.Status._service;
-    let initialSyncStatus = ns.Status._sync;
+async function sync_engine_and_validate_telem(engine, allowErrorPings, onError) {
+  let telem = get_sync_test_telemetry();
+  let caughtError = null;
+  // Clear out status, so failures from previous syncs won't show up in the
+  // telemetry ping.
+  let ns = {};
+  Cu.import("resource://services-sync/status.js", ns);
+  ns.Status._engines = {};
+  ns.Status.partial = false;
+  // Ideally we'd clear these out like we do with engines, (probably via
+  // Status.resetSync()), but this causes *numerous* tests to fail, so we just
+  // assume that if no failureReason or engine failures are set, and the
+  // status properties are the same as they were initially, that it's just
+  // a leftover.
+  // This is only an issue since we're triggering the sync of just one engine,
+  // without doing any other parts of the sync.
+  let initialServiceStatus = ns.Status._service;
+  let initialSyncStatus = ns.Status._sync;
 
-    let oldSubmit = telem.submit;
+  let oldSubmit = telem.submit;
+  let submitPromise = new Promise((resolve, reject) => {
     telem.submit = function(ping) {
       telem.submit = oldSubmit;
       ping.syncs.forEach(record => {
         if (record && record.status) {
           // did we see anything to lead us to believe that something bad actually happened
           let realProblem = record.failureReason || record.engines.some(e => {
             if (e.failureReason || e.status) {
               return true;
@@ -467,37 +474,38 @@ function sync_engine_and_validate_telem(
         if (onError) {
           onError(ping.syncs[0], ping);
         }
         reject(caughtError);
       } else {
         resolve(ping.syncs[0]);
       }
     }
-    // neuter the scheduler as it interacts badly with some of the tests - the
-    // engine being synced usually isn't the registered engine, so we see
-    // scored incremented and not removed, which schedules unexpected syncs.
-    let oldObserve = Service.scheduler.observe;
-    Service.scheduler.observe = () => {};
+  });
+  // neuter the scheduler as it interacts badly with some of the tests - the
+  // engine being synced usually isn't the registered engine, so we see
+  // scored incremented and not removed, which schedules unexpected syncs.
+  let oldObserve = Service.scheduler.observe;
+  Service.scheduler.observe = () => {};
+  try {
+    Svc.Obs.notify("weave:service:sync:start");
     try {
-      Svc.Obs.notify("weave:service:sync:start");
-      try {
-        engine.sync();
-      } catch (e) {
-        caughtError = e;
-      }
-      if (caughtError) {
-        Svc.Obs.notify("weave:service:sync:error", caughtError);
-      } else {
-        Svc.Obs.notify("weave:service:sync:finish");
-      }
-    } finally {
-      Service.scheduler.observe = oldObserve;
+      await engine.sync();
+    } catch (e) {
+      caughtError = e;
     }
-  });
+    if (caughtError) {
+      Svc.Obs.notify("weave:service:sync:error", caughtError);
+    } else {
+      Svc.Obs.notify("weave:service:sync:finish");
+    }
+  } finally {
+    Service.scheduler.observe = oldObserve;
+  }
+  return submitPromise;
 }
 
 // Returns a promise that resolves once the specified observer notification
 // has fired.
 function promiseOneObserver(topic, callback) {
   return new Promise((resolve, reject) => {
     let observer = function(subject, data) {
       Svc.Obs.remove(topic, observer);
@@ -505,37 +513,31 @@ function promiseOneObserver(topic, callb
     }
     Svc.Obs.add(topic, observer)
   });
 }
 
 function promiseStopServer(server) {
   return new Promise(resolve => server.stop(resolve));
 }
-
-function promiseNextTick() {
-  return new Promise(resolve => {
-    Utils.nextTick(resolve);
-  });
-}
 // Avoid an issue where `client.name2` containing unicode characters causes
 // a number of tests to fail, due to them assuming that we do not need to utf-8
 // encode or decode data sent through the mocked server (see bug 1268912).
 // We stash away the original implementation so test_utils_misc.js can test it.
 Utils._orig_getDefaultDeviceName = Utils.getDefaultDeviceName;
 Utils.getDefaultDeviceName = function() {
   return "Test device name";
 };
 
-function registerRotaryEngine() {
+async function registerRotaryEngine() {
   let {RotaryEngine} =
     Cu.import("resource://testing-common/services/sync/rotaryengine.js", {});
   Service.engineManager.clear();
 
-  Service.engineManager.register(RotaryEngine);
+  await Service.engineManager.register(RotaryEngine);
   let engine = Service.engineManager.get("rotary");
   engine.enabled = true;
 
   return { engine, tracker: engine._tracker };
 }
 
 // Set the validation prefs to attempt validation every time to avoid non-determinism.
 function enableValidationPrefs() {
--- a/services/sync/tests/unit/test_addons_engine.js
+++ b/services/sync/tests/unit/test_addons_engine.js
@@ -8,120 +8,138 @@ Cu.import("resource://gre/modules/Prefer
 Cu.import("resource://gre/modules/Services.jsm");
 Cu.import("resource://services-common/async.js");
 Cu.import("resource://services-sync/addonsreconciler.js");
 Cu.import("resource://services-sync/engines/addons.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
-var prefs = new Preferences();
+const prefs = new Preferences();
 prefs.set("extensions.getAddons.get.url",
           "http://localhost:8888/search/guid:%IDS%");
 prefs.set("extensions.install.requireSecureOrigin", false);
 
-loadAddonTestFunctions();
-startupManager();
-
-var engineManager = Service.engineManager;
+let engine;
+let reconciler;
+let tracker;
 
-engineManager.register(AddonsEngine);
-var engine = engineManager.get("addons");
-var reconciler = engine._reconciler;
-var tracker = engine._tracker;
-
-function advance_test() {
+async function resetReconciler() {
   reconciler._addons = {};
   reconciler._changes = [];
 
-  let cb = Async.makeSpinningCallback();
-  reconciler.saveState(null, cb);
-  cb.wait();
+  await reconciler.saveState();
 
   tracker.clearChangedIDs();
-  run_next_test();
 }
 
+add_task(async function setup() {
+  initTestLogging("Trace");
+  Log.repository.getLogger("Sync.Engine.Addons").level =
+    Log.Level.Trace;
+  Log.repository.getLogger("Sync.Store.Addons").level = Log.Level.Trace;
+  Log.repository.getLogger("Sync.Tracker.Addons").level =
+    Log.Level.Trace;
+  Log.repository.getLogger("Sync.AddonsRepository").level =
+    Log.Level.Trace;
+
+  loadAddonTestFunctions();
+  startupManager();
+
+  await Service.engineManager.register(AddonsEngine);
+  engine = Service.engineManager.get("addons");
+  reconciler = engine._reconciler;
+  tracker = engine._tracker;
+
+  reconciler.startListening();
+
+  // Don't flush to disk in the middle of an event listener!
+  // This causes test hangs on WinXP.
+  reconciler._shouldPersist = false;
+
+  await resetReconciler();
+});
+
 // This is a basic sanity test for the unit test itself. If this breaks, the
 // add-ons API likely changed upstream.
-add_test(function test_addon_install() {
+add_task(async function test_addon_install() {
   _("Ensure basic add-on APIs work as expected.");
 
   let install = getAddonInstall("test_bootstrap1_1");
   do_check_neq(install, null);
   do_check_eq(install.type, "extension");
   do_check_eq(install.name, "Test Bootstrap 1");
 
-  advance_test();
+  await resetReconciler();
 });
 
-add_test(function test_find_dupe() {
+add_task(async function test_find_dupe() {
   _("Ensure the _findDupe() implementation is sane.");
 
   // This gets invoked at the top of sync, which is bypassed by this
   // test, so we do it manually.
-  engine._refreshReconcilerState();
+  await engine._refreshReconcilerState();
 
   let addon = installAddon("test_bootstrap1_1");
 
   let record = {
     id:            Utils.makeGUID(),
     addonID:       addon.id,
     enabled:       true,
     applicationID: Services.appinfo.ID,
     source:        "amo"
   };
 
-  let dupe = engine._findDupe(record);
+  let dupe = await engine._findDupe(record);
   do_check_eq(addon.syncGUID, dupe);
 
   record.id = addon.syncGUID;
-  dupe = engine._findDupe(record);
+  dupe = await engine._findDupe(record);
   do_check_eq(null, dupe);
 
   uninstallAddon(addon);
-  advance_test();
+  await resetReconciler();
 });
 
-add_test(function test_get_changed_ids() {
+add_task(async function test_get_changed_ids() {
   _("Ensure getChangedIDs() has the appropriate behavior.");
 
   _("Ensure getChangedIDs() returns an empty object by default.");
-  let changes = engine.getChangedIDs();
+  let changes = await engine.getChangedIDs();
   do_check_eq("object", typeof(changes));
   do_check_eq(0, Object.keys(changes).length);
 
   _("Ensure tracker changes are populated.");
   let now = new Date();
   let changeTime = now.getTime() / 1000;
   let guid1 = Utils.makeGUID();
   tracker.addChangedID(guid1, changeTime);
 
-  changes = engine.getChangedIDs();
+  changes = await engine.getChangedIDs();
   do_check_eq("object", typeof(changes));
   do_check_eq(1, Object.keys(changes).length);
   do_check_true(guid1 in changes);
   do_check_eq(changeTime, changes[guid1]);
 
   tracker.clearChangedIDs();
 
   _("Ensure reconciler changes are populated.");
   let addon = installAddon("test_bootstrap1_1");
   tracker.clearChangedIDs(); // Just in case.
-  changes = engine.getChangedIDs();
+  changes = await engine.getChangedIDs();
   do_check_eq("object", typeof(changes));
   do_check_eq(1, Object.keys(changes).length);
   do_check_true(addon.syncGUID in changes);
   _("Change time: " + changeTime + ", addon change: " + changes[addon.syncGUID]);
   do_check_true(changes[addon.syncGUID] >= changeTime);
 
   let oldTime = changes[addon.syncGUID];
   let guid2 = addon.syncGUID;
   uninstallAddon(addon);
-  changes = engine.getChangedIDs();
+  changes = await engine.getChangedIDs();
   do_check_eq(1, Object.keys(changes).length);
   do_check_true(guid2 in changes);
   do_check_true(changes[guid2] > oldTime);
 
   _("Ensure non-syncable add-ons aren't picked up by reconciler changes.");
   reconciler._addons  = {};
   reconciler._changes = [];
   let record = {
@@ -132,21 +150,21 @@ add_test(function test_get_changed_ids()
     modified:       new Date(),
     type:           "UNSUPPORTED",
     scope:          0,
     foreignInstall: false
   };
   reconciler.addons["DUMMY"] = record;
   reconciler._addChange(record.modified, CHANGE_INSTALLED, record);
 
-  changes = engine.getChangedIDs();
+  changes = await engine.getChangedIDs();
   _(JSON.stringify(changes));
   do_check_eq(0, Object.keys(changes).length);
 
-  advance_test();
+  await resetReconciler();
 });
 
 add_task(async function test_disabled_install_semantics() {
   _("Ensure that syncing a disabled add-on preserves proper state.");
 
   // This is essentially a test for bug 712542, which snuck into the original
   // add-on sync drop. It ensures that when an add-on is installed that the
   // disabled state and incoming syncGUID is preserved, even on the next sync.
@@ -189,63 +207,44 @@ add_task(async function test_disabled_in
     enabled:       false,
     deleted:       false,
     source:        "amo",
   });
   let wbo = new ServerWBO(id, record, now - 2);
   server.insertWBO(USER, "addons", wbo);
 
   _("Performing sync of add-ons engine.");
-  engine._sync();
+  await engine._sync();
 
   // At this point the non-restartless extension should be staged for install.
 
   // Don't need this server any more.
   await promiseStopServer(amoServer);
 
   // We ensure the reconciler has recorded the proper ID and enabled state.
   let addon = reconciler.getAddonStateFromSyncGUID(id);
   do_check_neq(null, addon);
   do_check_eq(false, addon.enabled);
 
   // We fake an app restart and perform another sync, just to make sure things
   // are sane.
   restartManager();
 
-  engine._sync();
+  await engine._sync();
 
   // The client should not upload a new record. The old record should be
   // retained and unmodified.
   let collection = server.getCollection(USER, "addons");
   do_check_eq(1, collection.count());
 
   let payload = collection.payloads()[0];
   do_check_neq(null, collection.wbo(id));
   do_check_eq(ADDON_ID, payload.addonID);
   do_check_false(payload.enabled);
 
-  promiseStopServer(server);
+  await promiseStopServer(server);
 });
 
 add_test(function cleanup() {
   // There's an xpcom-shutdown hook for this, but let's give this a shot.
   reconciler.stopListening();
   run_next_test();
 });
-
-function run_test() {
-  initTestLogging("Trace");
-  Log.repository.getLogger("Sync.Engine.Addons").level =
-    Log.Level.Trace;
-  Log.repository.getLogger("Sync.Store.Addons").level = Log.Level.Trace;
-  Log.repository.getLogger("Sync.Tracker.Addons").level =
-    Log.Level.Trace;
-  Log.repository.getLogger("Sync.AddonsRepository").level =
-    Log.Level.Trace;
-
-  reconciler.startListening();
-
-  // Don't flush to disk in the middle of an event listener!
-  // This causes test hangs on WinXP.
-  reconciler._shouldPersist = false;
-
-  advance_test();
-}
--- a/services/sync/tests/unit/test_addons_reconciler.js
+++ b/services/sync/tests/unit/test_addons_reconciler.js
@@ -7,63 +7,58 @@ Cu.import("resource://gre/modules/AddonM
 Cu.import("resource://services-sync/addonsreconciler.js");
 Cu.import("resource://services-sync/engines/addons.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 
 loadAddonTestFunctions();
 startupManager();
 
-function run_test() {
+add_task(async function run_test() {
   initTestLogging("Trace");
   Log.repository.getLogger("Sync.AddonsReconciler").level = Log.Level.Trace;
   Log.repository.getLogger("Sync.AddonsReconciler").level =
     Log.Level.Trace;
 
   Svc.Prefs.set("engine.addons", true);
-  Service.engineManager.register(AddonsEngine);
+  await Service.engineManager.register(AddonsEngine);
+});
 
-  run_next_test();
-}
-
-add_test(function test_defaults() {
+add_task(async function test_defaults() {
   _("Ensure new objects have reasonable defaults.");
 
   let reconciler = new AddonsReconciler();
+  await reconciler.ensureStateLoaded();
 
   do_check_false(reconciler._listening);
   do_check_eq("object", typeof(reconciler.addons));
   do_check_eq(0, Object.keys(reconciler.addons).length);
   do_check_eq(0, reconciler._changes.length);
   do_check_eq(0, reconciler._listeners.length);
-
-  run_next_test();
 });
 
-add_test(function test_load_state_empty_file() {
+add_task(async function test_load_state_empty_file() {
   _("Ensure loading from a missing file results in defaults being set.");
 
   let reconciler = new AddonsReconciler();
-
-  reconciler.loadState(null, function(error, loaded) {
-    do_check_eq(null, error);
-    do_check_false(loaded);
+  await reconciler.ensureStateLoaded();
 
-    do_check_eq("object", typeof(reconciler.addons));
-    do_check_eq(0, Object.keys(reconciler.addons).length);
-    do_check_eq(0, reconciler._changes.length);
+  let loaded = await reconciler.loadState();
+  do_check_false(loaded);
 
-    run_next_test();
-  });
+  do_check_eq("object", typeof(reconciler.addons));
+  do_check_eq(0, Object.keys(reconciler.addons).length);
+  do_check_eq(0, reconciler._changes.length);
 });
 
-add_test(function test_install_detection() {
+add_task(async function test_install_detection() {
   _("Ensure that add-on installation results in appropriate side-effects.");
 
   let reconciler = new AddonsReconciler();
+  await reconciler.ensureStateLoaded();
   reconciler.startListening();
 
   let before = new Date();
   let addon = installAddon("test_bootstrap1_1");
   let after = new Date();
 
   do_check_eq(1, Object.keys(reconciler.addons).length);
   do_check_true(addon.id in reconciler.addons);
@@ -86,24 +81,23 @@ add_test(function test_install_detection
 
   do_check_eq(1, reconciler._changes.length);
   let change = reconciler._changes[0];
   do_check_true(change[0] >= before && change[1] <= after);
   do_check_eq(CHANGE_INSTALLED, change[1]);
   do_check_eq(addon.id, change[2]);
 
   uninstallAddon(addon);
-
-  run_next_test();
 });
 
-add_test(function test_uninstall_detection() {
+add_task(async function test_uninstall_detection() {
   _("Ensure that add-on uninstallation results in appropriate side-effects.");
 
   let reconciler = new AddonsReconciler();
+  await reconciler.ensureStateLoaded();
   reconciler.startListening();
 
   reconciler._addons = {};
   reconciler._changes = [];
 
   let addon = installAddon("test_bootstrap1_1");
   let id = addon.id;
 
@@ -115,53 +109,46 @@ add_test(function test_uninstall_detecti
 
   let record = reconciler.addons[id];
   do_check_false(record.installed);
 
   do_check_eq(1, reconciler._changes.length);
   let change = reconciler._changes[0];
   do_check_eq(CHANGE_UNINSTALLED, change[1]);
   do_check_eq(id, change[2]);
-
-  run_next_test();
 });
 
-add_test(function test_load_state_future_version() {
+add_task(async function test_load_state_future_version() {
   _("Ensure loading a file from a future version results in no data loaded.");
 
   const FILENAME = "TEST_LOAD_STATE_FUTURE_VERSION";
 
   let reconciler = new AddonsReconciler();
+  await reconciler.ensureStateLoaded();
 
   // First we populate our new file.
   let state = {version: 100, addons: {foo: {}}, changes: [[1, 1, "foo"]]};
-  let cb = Async.makeSyncCallback();
 
   // jsonSave() expects an object with ._log, so we give it a reconciler
   // instance.
-  Utils.jsonSave(FILENAME, reconciler, state, cb);
-  Async.waitForSyncCallback(cb);
-
-  reconciler.loadState(FILENAME, function(error, loaded) {
-    do_check_eq(null, error);
-    do_check_false(loaded);
+  await Utils.jsonSave(FILENAME, reconciler, state);
 
-    do_check_eq("object", typeof(reconciler.addons));
-    do_check_eq(1, Object.keys(reconciler.addons).length);
-    do_check_eq(1, reconciler._changes.length);
+  let loaded = await reconciler.loadState(FILENAME);
+  do_check_false(loaded);
 
-    run_next_test();
-  });
+  do_check_eq("object", typeof(reconciler.addons));
+  do_check_eq(0, Object.keys(reconciler.addons).length);
+  do_check_eq(0, reconciler._changes.length);
 });
 
-add_test(function test_prune_changes_before_date() {
+add_task(async function test_prune_changes_before_date() {
   _("Ensure that old changes are pruned properly.");
 
   let reconciler = new AddonsReconciler();
-  reconciler._ensureStateLoaded();
+  await reconciler.ensureStateLoaded();
   reconciler._changes = [];
 
   let now = new Date();
   const HOUR_MS = 1000 * 60 * 60;
 
   _("Ensure pruning an empty changes array works.");
   reconciler.pruneChangesBeforeDate(now);
   do_check_eq(0, reconciler._changes.length);
@@ -184,11 +171,9 @@ add_test(function test_prune_changes_bef
   do_check_neq(undefined, reconciler._changes[0]);
   do_check_eq(young, reconciler._changes[0][0]);
   do_check_eq("bar", reconciler._changes[0][2]);
 
   _("Ensure pruning all changes works.");
   reconciler._changes.push([old, CHANGE_INSTALLED, "foo"]);
   reconciler.pruneChangesBeforeDate(now);
   do_check_eq(0, reconciler._changes.length);
-
-  run_next_test();
 });
--- a/services/sync/tests/unit/test_addons_store.js
+++ b/services/sync/tests/unit/test_addons_store.js
@@ -9,17 +9,17 @@ Cu.import("resource://services-sync/addo
 Cu.import("resource://services-sync/engines/addons.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 Cu.import("resource://gre/modules/FileUtils.jsm");
 
 const HTTP_PORT = 8888;
 
-var prefs = new Preferences();
+const prefs = new Preferences();
 
 prefs.set("extensions.getAddons.get.url", "http://localhost:8888/search/guid:%IDS%");
 prefs.set("extensions.install.requireSecureOrigin", false);
 
 const SYSTEM_ADDON_ID = "system1@tests.mozilla.org";
 let systemAddonFile;
 
 // The system add-on must be installed before AddonManager is started.
@@ -34,21 +34,20 @@ function loadSystemAddon() {
   // used by system add-ons.
   registerDirectory("XREAppFeat", distroDir);
 }
 
 loadAddonTestFunctions();
 loadSystemAddon();
 startupManager();
 
-Service.engineManager.register(AddonsEngine);
-var engine     = Service.engineManager.get("addons");
-var tracker    = engine._tracker;
-var store      = engine._store;
-var reconciler = engine._reconciler;
+let engine;
+let tracker;
+let store;
+let reconciler;
 
 /**
  * Create a AddonsRec for this application with the fields specified.
  *
  * @param  id       Sync GUID of record
  * @param  addonId  ID of add-on
  * @param  enabled  Boolean whether record is enabled
  * @param  deleted  Boolean whether record was deleted
@@ -98,174 +97,167 @@ function createAndStartHTTPServer(port) 
 // it missed.
 function checkReconcilerUpToDate(addon) {
   let stateBefore = Object.assign({}, store.reconciler.addons[addon.id]);
   store.reconciler.rectifyStateFromAddon(addon);
   let stateAfter = store.reconciler.addons[addon.id];
   deepEqual(stateBefore, stateAfter);
 }
 
-function run_test() {
+add_task(async function setup() {
   initTestLogging("Trace");
   Log.repository.getLogger("Sync.Engine.Addons").level = Log.Level.Trace;
   Log.repository.getLogger("Sync.Tracker.Addons").level = Log.Level.Trace;
-  Log.repository.getLogger("Sync.AddonsRepository").level =
-    Log.Level.Trace;
+  Log.repository.getLogger("Sync.AddonsRepository").level = Log.Level.Trace;
+
+  await Service.engineManager.register(AddonsEngine);
+  engine     = Service.engineManager.get("addons");
+  tracker    = engine._tracker;
+  store      = engine._store;
+  reconciler = engine._reconciler;
 
   reconciler.startListening();
 
   // Don't flush to disk in the middle of an event listener!
   // This causes test hangs on WinXP.
   reconciler._shouldPersist = false;
+});
 
-  run_next_test();
-}
-
-add_test(function test_remove() {
+add_task(async function test_remove() {
   _("Ensure removing add-ons from deleted records works.");
 
   let addon = installAddon("test_bootstrap1_1");
   let record = createRecordForThisApp(addon.syncGUID, addon.id, true, true);
 
-  let failed = store.applyIncomingBatch([record]);
+  let failed = await store.applyIncomingBatch([record]);
   do_check_eq(0, failed.length);
 
   let newAddon = getAddonFromAddonManagerByID(addon.id);
   do_check_eq(null, newAddon);
-
-  run_next_test();
 });
 
-add_test(function test_apply_enabled() {
+add_task(async function test_apply_enabled() {
   _("Ensures that changes to the userEnabled flag apply.");
 
   let addon = installAddon("test_bootstrap1_1");
   do_check_true(addon.isActive);
   do_check_false(addon.userDisabled);
 
   _("Ensure application of a disable record works as expected.");
   let records = [];
   records.push(createRecordForThisApp(addon.syncGUID, addon.id, false, false));
-  let failed = store.applyIncomingBatch(records);
+  let failed = await store.applyIncomingBatch(records);
   do_check_eq(0, failed.length);
   addon = getAddonFromAddonManagerByID(addon.id);
   do_check_true(addon.userDisabled);
   checkReconcilerUpToDate(addon);
   records = [];
 
   _("Ensure enable record works as expected.");
   records.push(createRecordForThisApp(addon.syncGUID, addon.id, true, false));
-  failed = store.applyIncomingBatch(records);
+  failed = await store.applyIncomingBatch(records);
   do_check_eq(0, failed.length);
   addon = getAddonFromAddonManagerByID(addon.id);
   do_check_false(addon.userDisabled);
   checkReconcilerUpToDate(addon);
   records = [];
 
   _("Ensure enabled state updates don't apply if the ignore pref is set.");
   records.push(createRecordForThisApp(addon.syncGUID, addon.id, false, false));
   Svc.Prefs.set("addons.ignoreUserEnabledChanges", true);
-  failed = store.applyIncomingBatch(records);
+  failed = await store.applyIncomingBatch(records);
   do_check_eq(0, failed.length);
   addon = getAddonFromAddonManagerByID(addon.id);
   do_check_false(addon.userDisabled);
   records = [];
 
   uninstallAddon(addon);
   Svc.Prefs.reset("addons.ignoreUserEnabledChanges");
-  run_next_test();
 });
 
-add_test(function test_apply_enabled_appDisabled() {
+add_task(async function test_apply_enabled_appDisabled() {
   _("Ensures that changes to the userEnabled flag apply when the addon is appDisabled.");
 
   let addon = installAddon("test_install3"); // this addon is appDisabled by default.
   do_check_true(addon.appDisabled);
   do_check_false(addon.isActive);
   do_check_false(addon.userDisabled);
 
   _("Ensure application of a disable record works as expected.");
   store.reconciler.pruneChangesBeforeDate(Date.now() + 10);
   store.reconciler._changes = [];
   let records = [];
   records.push(createRecordForThisApp(addon.syncGUID, addon.id, false, false));
-  let failed = store.applyIncomingBatch(records);
+  let failed = await store.applyIncomingBatch(records);
   do_check_eq(0, failed.length);
   addon = getAddonFromAddonManagerByID(addon.id);
   do_check_true(addon.userDisabled);
   checkReconcilerUpToDate(addon);
   records = [];
 
   _("Ensure enable record works as expected.");
   records.push(createRecordForThisApp(addon.syncGUID, addon.id, true, false));
-  failed = store.applyIncomingBatch(records);
+  failed = await store.applyIncomingBatch(records);
   do_check_eq(0, failed.length);
   addon = getAddonFromAddonManagerByID(addon.id);
   do_check_false(addon.userDisabled);
   checkReconcilerUpToDate(addon);
   records = [];
 
   uninstallAddon(addon);
-  run_next_test();
 });
 
-add_test(function test_ignore_different_appid() {
+add_task(async function test_ignore_different_appid() {
   _("Ensure that incoming records with a different application ID are ignored.");
 
   // We test by creating a record that should result in an update.
   let addon = installAddon("test_bootstrap1_1");
   do_check_false(addon.userDisabled);
 
   let record = createRecordForThisApp(addon.syncGUID, addon.id, false, false);
   record.applicationID = "FAKE_ID";
 
-  let failed = store.applyIncomingBatch([record]);
+  let failed = await store.applyIncomingBatch([record]);
   do_check_eq(0, failed.length);
 
   let newAddon = getAddonFromAddonManagerByID(addon.id);
   do_check_false(newAddon.userDisabled);
 
   uninstallAddon(addon);
-
-  run_next_test();
 });
 
-add_test(function test_ignore_unknown_source() {
+add_task(async function test_ignore_unknown_source() {
   _("Ensure incoming records with unknown source are ignored.");
 
   let addon = installAddon("test_bootstrap1_1");
 
   let record = createRecordForThisApp(addon.syncGUID, addon.id, false, false);
   record.source = "DUMMY_SOURCE";
 
-  let failed = store.applyIncomingBatch([record]);
+  let failed = await store.applyIncomingBatch([record]);
   do_check_eq(0, failed.length);
 
   let newAddon = getAddonFromAddonManagerByID(addon.id);
   do_check_false(newAddon.userDisabled);
 
   uninstallAddon(addon);
-
-  run_next_test();
 });
 
-add_test(function test_apply_uninstall() {
+add_task(async function test_apply_uninstall() {
   _("Ensures that uninstalling an add-on from a record works.");
 
   let addon = installAddon("test_bootstrap1_1");
 
   let records = [];
   records.push(createRecordForThisApp(addon.syncGUID, addon.id, true, true));
-  let failed = store.applyIncomingBatch(records);
+  let failed = await store.applyIncomingBatch(records);
   do_check_eq(0, failed.length);
 
   addon = getAddonFromAddonManagerByID(addon.id);
   do_check_eq(null, addon);
-
-  run_next_test();
 });
 
 add_test(function test_addon_syncability() {
   _("Ensure isAddonSyncable functions properly.");
 
   Svc.Prefs.set("addons.trustedSourceHostnames",
                 "addons.mozilla.org,other.example.com");
 
@@ -375,17 +367,17 @@ add_test(function test_ignore_hotfixes()
   uninstallAddon(addon);
 
   extensionPrefs.reset("hotfix.id");
 
   run_next_test();
 });
 
 
-add_test(function test_get_all_ids() {
+add_task(async function test_get_all_ids() {
   _("Ensures that getAllIDs() returns an appropriate set.");
 
   _("Installing two addons.");
   // XXX - this test seems broken - at this point, before we've installed the
   // addons below, store.getAllIDs() returns all addons installed by previous
   // tests, even though those tests uninstalled the addon.
   // So if any tests above ever add a new addon ID, they are going to need to
   // be added here too.
@@ -394,202 +386,196 @@ add_test(function test_get_all_ids() {
   let addon2 = installAddon("test_bootstrap1_1");
   let addon3 = installAddon("test_install3");
 
   _("Ensure they're syncable.");
   do_check_true(store.isAddonSyncable(addon1));
   do_check_true(store.isAddonSyncable(addon2));
   do_check_true(store.isAddonSyncable(addon3));
 
-  let ids = store.getAllIDs();
+  let ids = await store.getAllIDs();
 
   do_check_eq("object", typeof(ids));
   do_check_eq(3, Object.keys(ids).length);
   do_check_true(addon1.syncGUID in ids);
   do_check_true(addon2.syncGUID in ids);
   do_check_true(addon3.syncGUID in ids);
 
   addon1.install.cancel();
   uninstallAddon(addon2);
   uninstallAddon(addon3);
-
-  run_next_test();
 });
 
-add_test(function test_change_item_id() {
+add_task(async function test_change_item_id() {
   _("Ensures that changeItemID() works properly.");
 
   let addon = installAddon("test_bootstrap1_1");
 
   let oldID = addon.syncGUID;
   let newID = Utils.makeGUID();
 
-  store.changeItemID(oldID, newID);
+  await store.changeItemID(oldID, newID);
 
   let newAddon = getAddonFromAddonManagerByID(addon.id);
   do_check_neq(null, newAddon);
   do_check_eq(newID, newAddon.syncGUID);
 
   uninstallAddon(newAddon);
-
-  run_next_test();
 });
 
-add_test(function test_create() {
+add_task(async function test_create() {
   _("Ensure creating/installing an add-on from a record works.");
 
   let server = createAndStartHTTPServer(HTTP_PORT);
 
   let addon = installAddon("test_bootstrap1_1");
   let id = addon.id;
   uninstallAddon(addon);
 
   let guid = Utils.makeGUID();
   let record = createRecordForThisApp(guid, id, true, false);
 
-  let failed = store.applyIncomingBatch([record]);
+  let failed = await store.applyIncomingBatch([record]);
   do_check_eq(0, failed.length);
 
   let newAddon = getAddonFromAddonManagerByID(id);
   do_check_neq(null, newAddon);
   do_check_eq(guid, newAddon.syncGUID);
   do_check_false(newAddon.userDisabled);
 
   uninstallAddon(newAddon);
 
-  server.stop(run_next_test);
+  await promiseStopServer(server);
 });
 
-add_test(function test_create_missing_search() {
+add_task(async function test_create_missing_search() {
   _("Ensures that failed add-on searches are handled gracefully.");
 
   let server = createAndStartHTTPServer(HTTP_PORT);
 
   // The handler for this ID is not installed, so a search should 404.
   const id = "missing@tests.mozilla.org";
   let guid = Utils.makeGUID();
   let record = createRecordForThisApp(guid, id, true, false);
 
-  let failed = store.applyIncomingBatch([record]);
+  let failed = await store.applyIncomingBatch([record]);
   do_check_eq(1, failed.length);
   do_check_eq(guid, failed[0]);
 
   let addon = getAddonFromAddonManagerByID(id);
   do_check_eq(null, addon);
 
-  server.stop(run_next_test);
+  await promiseStopServer(server);
 });
 
-add_test(function test_create_bad_install() {
+add_task(async function test_create_bad_install() {
   _("Ensures that add-ons without a valid install are handled gracefully.");
 
   let server = createAndStartHTTPServer(HTTP_PORT);
 
   // The handler returns a search result but the XPI will 404.
   const id = "missing-xpi@tests.mozilla.org";
   let guid = Utils.makeGUID();
   let record = createRecordForThisApp(guid, id, true, false);
 
-  /* let failed = */ store.applyIncomingBatch([record]);
+  /* let failed = */ await store.applyIncomingBatch([record]);
   // This addon had no source URI so was skipped - but it's not treated as
   // failure.
   // XXX - this test isn't testing what we thought it was. Previously the addon
   // was not being installed due to requireSecureURL checking *before* we'd
   // attempted to get the XPI.
   // With requireSecureURL disabled we do see a download failure, but the addon
   // *does* get added to |failed|.
   // FTR: onDownloadFailed() is called with ERROR_NETWORK_FAILURE, so it's going
   // to be tricky to distinguish a 404 from other transient network errors
   // where we do want the addon to end up in |failed|.
   // This is being tracked in bug 1284778.
   // do_check_eq(0, failed.length);
 
   let addon = getAddonFromAddonManagerByID(id);
   do_check_eq(null, addon);
 
-  server.stop(run_next_test);
+  await promiseStopServer(server);
 });
 
-add_test(function test_ignore_system() {
+add_task(async function test_ignore_system() {
   _("Ensure we ignore system addons");
   // Our system addon should not appear in getAllIDs
-  engine._refreshReconcilerState();
+  await engine._refreshReconcilerState();
   let num = 0;
-  for (let guid in store.getAllIDs()) {
+  let ids = await store.getAllIDs();
+  for (let guid in ids) {
     num += 1;
     let addon = reconciler.getAddonStateFromSyncGUID(guid);
     do_check_neq(addon.id, SYSTEM_ADDON_ID);
   }
-  do_check_true(num > 1, "should have seen at least one.")
-  run_next_test();
+  do_check_true(num > 1, "should have seen at least one.");
 });
 
-add_test(function test_incoming_system() {
+add_task(async function test_incoming_system() {
   _("Ensure we handle incoming records that refer to a system addon");
   // eg, loop initially had a normal addon but it was then "promoted" to be a
   // system addon but wanted to keep the same ID. The server record exists due
   // to this.
 
   // before we start, ensure the system addon isn't disabled.
   do_check_false(getAddonFromAddonManagerByID(SYSTEM_ADDON_ID).userDisabled);
 
   // Now simulate an incoming record with the same ID as the system addon,
   // but flagged as disabled - it should not be applied.
   let server = createAndStartHTTPServer(HTTP_PORT);
   // We make the incoming record flag the system addon as disabled - it should
   // be ignored.
   let guid = Utils.makeGUID();
   let record = createRecordForThisApp(guid, SYSTEM_ADDON_ID, false, false);
 
-  let failed = store.applyIncomingBatch([record]);
+  let failed = await store.applyIncomingBatch([record]);
   do_check_eq(0, failed.length);
 
   // The system addon should still not be userDisabled.
   do_check_false(getAddonFromAddonManagerByID(SYSTEM_ADDON_ID).userDisabled);
 
-  server.stop(run_next_test);
+  await promiseStopServer(server);
 });
 
-add_test(function test_wipe() {
+add_task(async function test_wipe() {
   _("Ensures that wiping causes add-ons to be uninstalled.");
 
   let addon1 = installAddon("test_bootstrap1_1");
 
-  store.wipe();
+  await store.wipe();
 
   let addon = getAddonFromAddonManagerByID(addon1.id);
   do_check_eq(null, addon);
-
-  run_next_test();
 });
 
-add_test(function test_wipe_and_install() {
+add_task(async function test_wipe_and_install() {
   _("Ensure wipe followed by install works.");
 
   // This tests the reset sync flow where remote data is replaced by local. The
   // receiving client will see a wipe followed by a record which should undo
   // the wipe.
   let installed = installAddon("test_bootstrap1_1");
 
   let record = createRecordForThisApp(installed.syncGUID, installed.id, true,
                                       false);
 
-  store.wipe();
+  await store.wipe();
 
   let deleted = getAddonFromAddonManagerByID(installed.id);
   do_check_null(deleted);
 
   // Re-applying the record can require re-fetching the XPI.
   let server = createAndStartHTTPServer(HTTP_PORT);
 
-  store.applyIncoming(record);
+  await store.applyIncoming(record);
 
   let fetched = getAddonFromAddonManagerByID(record.addonID);
   do_check_true(!!fetched);
 
-  server.stop(run_next_test);
+  await promiseStopServer(server);
 });
 
 add_test(function cleanup() {
   // There's an xpcom-shutdown hook for this, but let's give this a shot.
   reconciler.stopListening();
   run_next_test();
 });
--- a/services/sync/tests/unit/test_addons_tracker.js
+++ b/services/sync/tests/unit/test_addons_tracker.js
@@ -8,113 +8,113 @@ Cu.import("resource://services-sync/engi
 Cu.import("resource://services-sync/constants.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 
 loadAddonTestFunctions();
 startupManager();
 Svc.Prefs.set("engine.addons", true);
 
-Service.engineManager.register(AddonsEngine);
-var engine     = Service.engineManager.get("addons");
-var reconciler = engine._reconciler;
-var store      = engine._store;
-var tracker    = engine._tracker;
-
-// Don't write out by default.
-tracker.persistChangedIDs = false;
+let engine;
+let reconciler;
+let store;
+let tracker;
 
 const addon1ID = "addon1@tests.mozilla.org";
 
-function cleanup_and_advance() {
+async function cleanup() {
   Svc.Obs.notify("weave:engine:stop-tracking");
   tracker.stopTracking();
 
   tracker.resetScore();
   tracker.clearChangedIDs();
 
   reconciler._addons = {};
   reconciler._changes = [];
-  let cb = Async.makeSpinningCallback();
-  reconciler.saveState(null, cb);
-  cb.wait();
-
-  run_next_test();
+  await reconciler.saveState();
 }
 
-function run_test() {
+add_task(async function setup() {
   initTestLogging("Trace");
   Log.repository.getLogger("Sync.Engine.Addons").level = Log.Level.Trace;
-  Log.repository.getLogger("Sync.AddonsReconciler").level =
-    Log.Level.Trace;
+  Log.repository.getLogger("Sync.AddonsReconciler").level = Log.Level.Trace;
+
+  await Service.engineManager.register(AddonsEngine);
+  engine     = Service.engineManager.get("addons");
+  reconciler = engine._reconciler;
+  store      = engine._store;
+  tracker    = engine._tracker;
 
-  cleanup_and_advance();
-}
+  // Don't write out by default.
+  tracker.persistChangedIDs = false;
 
-add_test(function test_empty() {
+  await cleanup();
+});
+
+add_task(async function test_empty() {
   _("Verify the tracker is empty to start with.");
 
   do_check_eq(0, Object.keys(tracker.changedIDs).length);
   do_check_eq(0, tracker.score);
 
-  cleanup_and_advance();
+  await cleanup();
 });
 
-add_test(function test_not_tracking() {
+add_task(async function test_not_tracking() {
   _("Ensures the tracker doesn't do anything when it isn't tracking.");
 
   let addon = installAddon("test_bootstrap1_1");
   uninstallAddon(addon);
 
   do_check_eq(0, Object.keys(tracker.changedIDs).length);
   do_check_eq(0, tracker.score);
 
-  cleanup_and_advance();
+  await cleanup();
 });
 
-add_test(function test_track_install() {
+add_task(async function test_track_install() {
   _("Ensure that installing an add-on notifies tracker.");
 
   reconciler.startListening();
 
   Svc.Obs.notify("weave:engine:start-tracking");
 
   do_check_eq(0, tracker.score);
   let addon = installAddon("test_bootstrap1_1");
   let changed = tracker.changedIDs;
 
   do_check_eq(1, Object.keys(changed).length);
   do_check_true(addon.syncGUID in changed);
   do_check_eq(SCORE_INCREMENT_XLARGE, tracker.score);
 
   uninstallAddon(addon);
-  cleanup_and_advance();
+  await cleanup();
 });
 
-add_test(function test_track_uninstall() {
+add_task(async function test_track_uninstall() {
   _("Ensure that uninstalling an add-on notifies tracker.");
 
   reconciler.startListening();
 
   let addon = installAddon("test_bootstrap1_1");
   let guid = addon.syncGUID;
   do_check_eq(0, tracker.score);
 
   Svc.Obs.notify("weave:engine:start-tracking");
 
   uninstallAddon(addon);
   let changed = tracker.changedIDs;
   do_check_eq(1, Object.keys(changed).length);
   do_check_true(guid in changed);
   do_check_eq(SCORE_INCREMENT_XLARGE, tracker.score);
 
-  cleanup_and_advance();
+  await cleanup();
 });
 
-add_test(function test_track_user_disable() {
+add_task(async function test_track_user_disable() {
   _("Ensure that tracker sees disabling of add-on");
 
   reconciler.startListening();
 
   let addon = installAddon("test_bootstrap1_1");
   do_check_false(addon.userDisabled);
   do_check_false(addon.appDisabled);
   do_check_true(addon.isActive);
@@ -144,34 +144,34 @@ add_test(function test_track_user_disabl
   Async.waitForSyncCallback(cb);
 
   let changed = tracker.changedIDs;
   do_check_eq(1, Object.keys(changed).length);
   do_check_true(addon.syncGUID in changed);
   do_check_eq(SCORE_INCREMENT_XLARGE, tracker.score);
 
   uninstallAddon(addon);
-  cleanup_and_advance();
+  await cleanup();
 });
 
-add_test(function test_track_enable() {
+add_task(async function test_track_enable() {
   _("Ensure that enabling a disabled add-on notifies tracker.");
 
   reconciler.startListening();
 
   let addon = installAddon("test_bootstrap1_1");
   addon.userDisabled = true;
-  store._sleep(0);
+  await Async.promiseYield();
 
   do_check_eq(0, tracker.score);
 
   Svc.Obs.notify("weave:engine:start-tracking");
   addon.userDisabled = false;
-  store._sleep(0);
+  await Async.promiseYield();
 
   let changed = tracker.changedIDs;
   do_check_eq(1, Object.keys(changed).length);
   do_check_true(addon.syncGUID in changed);
   do_check_eq(SCORE_INCREMENT_XLARGE, tracker.score);
 
   uninstallAddon(addon);
-  cleanup_and_advance();
+  await cleanup();
 });
--- a/services/sync/tests/unit/test_bookmark_batch_fail.js
+++ b/services/sync/tests/unit/test_bookmark_batch_fail.js
@@ -1,22 +1,23 @@
 /* Any copyright is dedicated to the Public Domain.
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 _("Making sure a failing sync reports a useful error");
 Cu.import("resource://services-sync/engines/bookmarks.js");
 Cu.import("resource://services-sync/service.js");
 
-function run_test() {
+add_task(async function run_test() {
   let engine = new BookmarksEngine(Service);
-  engine._syncStartup = function() {
+  await engine.initialize();
+  engine._syncStartup = async function() {
     throw "FAIL!";
   };
 
   try {
     _("Try calling the sync that should throw right away");
-    engine._sync();
+    await engine._sync();
     do_throw("Should have failed sync!");
   } catch (ex) {
     _("Making sure what we threw ended up as the exception:", ex);
     do_check_eq(ex, "FAIL!");
   }
-}
+});
--- a/services/sync/tests/unit/test_bookmark_decline_undecline.js
+++ b/services/sync/tests/unit/test_bookmark_decline_undecline.js
@@ -6,19 +6,20 @@ Cu.import("resource://gre/modules/Bookma
 Cu.import("resource://gre/modules/Log.jsm");
 Cu.import("resource://services-sync/constants.js");
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/engines/bookmarks.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
-initTestLogging("Trace");
-
-Service.engineManager.register(BookmarksEngine);
+add_task(async function setup() {
+  initTestLogging("Trace");
+  await Service.engineManager.register(BookmarksEngine);
+});
 
 // A stored reference to the collection won't be valid after disabling.
 function getBookmarkWBO(server, guid) {
   let coll = server.user("foo").collection("bookmarks");
   if (!coll) {
     return null;
   }
   return coll.wbo(guid);
@@ -33,25 +34,25 @@ add_task(async function test_decline_und
     let { guid: bzGuid } = await PlacesUtils.bookmarks.insert({
       parentGuid: PlacesUtils.bookmarks.menuGuid,
       url: "https://bugzilla.mozilla.org",
       index: PlacesUtils.bookmarks.DEFAULT_INDEX,
       title: "bugzilla",
     });
 
     ok(!getBookmarkWBO(server, bzGuid), "Shouldn't have been uploaded yet");
-    Service.sync();
+    await Service.sync();
     ok(getBookmarkWBO(server, bzGuid), "Should be present on server");
 
     engine.enabled = false;
-    Service.sync();
+    await Service.sync();
     ok(!getBookmarkWBO(server, bzGuid), "Shouldn't be present on server anymore");
 
     engine.enabled = true;
-    Service.sync();
+    await Service.sync();
     ok(getBookmarkWBO(server, bzGuid), "Should be present on server again");
 
   } finally {
     await PlacesSyncUtils.bookmarks.reset();
     await promiseStopServer(server);
   }
 });
 
--- a/services/sync/tests/unit/test_bookmark_duping.js
+++ b/services/sync/tests/unit/test_bookmark_duping.js
@@ -6,50 +6,51 @@ Cu.import("resource://services-common/as
 Cu.import("resource://gre/modules/Log.jsm");
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/engines/bookmarks.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 Cu.import("resource://services-sync/bookmark_validator.js");
 
-
-initTestLogging("Trace");
-
 const bms = PlacesUtils.bookmarks;
-
-Service.engineManager.register(BookmarksEngine);
+let engine;
+let store;
 
-const engine = new BookmarksEngine(Service);
-const store = engine._store;
-store._log.level = Log.Level.Trace;
-engine._log.level = Log.Level.Trace;
+add_task(async function setup() {
+  initTestLogging("Trace");
+  await Service.engineManager.register(BookmarksEngine);
+  engine = Service.engineManager.get("bookmarks");
+  store = engine._store;
+  store._log.level = Log.Level.Trace;
+  engine._log.level = Log.Level.Trace;
+});
 
-async function setup() {
+async function sharedSetup() {
  let server = serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let collection = server.user("foo").collection("bookmarks");
 
   Svc.Obs.notify("weave:engine:start-tracking");   // We skip usual startup...
 
   return { server, collection };
 }
 
 async function cleanup(server) {
   Svc.Obs.notify("weave:engine:stop-tracking");
   let promiseStartOver = promiseOneObserver("weave:service:start-over:finish");
-  Service.startOver();
+  await Service.startOver();
   await promiseStartOver;
   await promiseStopServer(server);
   await bms.eraseEverything();
 }
 
 async function syncIdToId(syncId) {
-  let guid = await PlacesSyncUtils.bookmarks.syncIdToGuid(syncId);
+  let guid = PlacesSyncUtils.bookmarks.syncIdToGuid(syncId);
   return PlacesUtils.promiseItemId(guid);
 }
 
 async function getFolderChildrenIDs(folderId) {
   let folderSyncId = PlacesSyncUtils.bookmarks.guidToSyncId(await PlacesUtils.promiseItemGuid(folderId));
   let syncIds = await PlacesSyncUtils.bookmarks.fetchChildSyncIds(folderSyncId);
   return Promise.all(syncIds.map(async (syncId) => await syncIdToId(syncId)));
 }
@@ -116,24 +117,24 @@ async function validate(collection, expe
     do_print("Local bookmark tree:\n" + JSON.stringify(tree, undefined, 2));
     ok(false);
   }
 }
 
 add_task(async function test_dupe_bookmark() {
   _("Ensure that a bookmark we consider a dupe is handled correctly.");
 
-  let { server, collection } = await this.setup();
+  let { server, collection } = await this.sharedSetup();
 
   try {
     // The parent folder and one bookmark in it.
     let {id: folder1_id, guid: folder1_guid } = await createFolder(bms.toolbarFolder, "Folder 1");
     let {id: localId, guid: bmk1_guid} = await createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
 
-    engine.sync();
+    await engine.sync();
 
     // We've added the bookmark, its parent (folder1) plus "menu", "toolbar", "unfiled", and "mobile".
     equal(collection.count(), 6);
     equal((await getFolderChildrenIDs(folder1_id)).length, 1);
 
     // Now create a new incoming record that looks alot like a dupe.
     let newGUID = Utils.makeGUID();
     let to_apply = {
@@ -160,17 +161,17 @@ add_task(async function test_dupe_bookma
         equal(source, PlacesUtils.bookmarks.SOURCE_SYNC);
         onItemChangedObserved = true;
       }
     };
     PlacesUtils.bookmarks.addObserver(obs, false);
 
     _("Syncing so new dupe record is processed");
     engine.lastSync = engine.lastSync - 5;
-    engine.sync();
+    await engine.sync();
 
     // We should have logically deleted the dupe record.
     equal(collection.count(), 7);
     ok(getServerRecord(collection, bmk1_guid).deleted);
     // and physically removed from the local store.
     await promiseNoLocalItem(bmk1_guid);
     // Parent should still only have 1 item.
     equal((await getFolderChildrenIDs(folder1_id)).length, 1);
@@ -187,28 +188,28 @@ add_task(async function test_dupe_bookma
   } finally {
     await cleanup(server);
   }
 });
 
 add_task(async function test_dupe_reparented_bookmark() {
   _("Ensure that a bookmark we consider a dupe from a different parent is handled correctly");
 
-  let { server, collection } = await this.setup();
+  let { server, collection } = await this.sharedSetup();
 
   try {
     // The parent folder and one bookmark in it.
     let {id: folder1_id, guid: folder1_guid } = await createFolder(bms.toolbarFolder, "Folder 1");
     let {guid: bmk1_guid} = await createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
     // Another parent folder *with the same name*
     let {id: folder2_id, guid: folder2_guid } = await createFolder(bms.toolbarFolder, "Folder 1");
 
     do_print(`folder1_guid=${folder1_guid}, folder2_guid=${folder2_guid}, bmk1_guid=${bmk1_guid}`);
 
-    engine.sync();
+    await engine.sync();
 
     // We've added the bookmark, 2 folders plus "menu", "toolbar", "unfiled", and "mobile".
     equal(collection.count(), 7);
     equal((await getFolderChildrenIDs(folder1_id)).length, 1);
     equal((await getFolderChildrenIDs(folder2_id)).length, 0);
 
     // Now create a new incoming record that looks alot like a dupe of the
     // item in folder1_guid, but with a record that points to folder2_guid.
@@ -221,17 +222,17 @@ add_task(async function test_dupe_repare
       parentName: "Folder 1",
       parentid: folder2_guid,
     };
 
     collection.insert(newGUID, encryptPayload(to_apply), Date.now() / 1000 + 500);
 
     _("Syncing so new dupe record is processed");
     engine.lastSync = engine.lastSync - 5;
-    engine.sync();
+    await engine.sync();
 
     // We should have logically deleted the dupe record.
     equal(collection.count(), 8);
     ok(getServerRecord(collection, bmk1_guid).deleted);
     // and physically removed from the local store.
     await promiseNoLocalItem(bmk1_guid);
     // The original folder no longer has the item
     equal((await getFolderChildrenIDs(folder1_id)).length, 0);
@@ -253,28 +254,28 @@ add_task(async function test_dupe_repare
   } finally {
     await cleanup(server);
   }
 });
 
 add_task(async function test_dupe_reparented_locally_changed_bookmark() {
   _("Ensure that a bookmark with local changes we consider a dupe from a different parent is handled correctly");
 
-  let { server, collection } = await this.setup();
+  let { server, collection } = await this.sharedSetup();
 
   try {
     // The parent folder and one bookmark in it.
     let {id: folder1_id, guid: folder1_guid } = await createFolder(bms.toolbarFolder, "Folder 1");
     let {guid: bmk1_guid} = await createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
     // Another parent folder *with the same name*
     let {id: folder2_id, guid: folder2_guid } = await createFolder(bms.toolbarFolder, "Folder 1");
 
     do_print(`folder1_guid=${folder1_guid}, folder2_guid=${folder2_guid}, bmk1_guid=${bmk1_guid}`);
 
-    engine.sync();
+    await engine.sync();
 
     // We've added the bookmark, 2 folders plus "menu", "toolbar", "unfiled", and "mobile".
     equal(collection.count(), 7);
     equal((await getFolderChildrenIDs(folder1_id)).length, 1);
     equal((await getFolderChildrenIDs(folder2_id)).length, 0);
 
     // Now create a new incoming record that looks alot like a dupe of the
     // item in folder1_guid, but with a record that points to folder2_guid.
@@ -298,17 +299,17 @@ add_task(async function test_dupe_repare
     await PlacesTestUtils.setBookmarkSyncFields({
       guid: bmk1_guid,
       syncChangeCounter: 1,
       lastModified: Date.now() + (deltaSeconds + 10) * 1000,
     });
 
     _("Syncing so new dupe record is processed");
     engine.lastSync = engine.lastSync - 5;
-    engine.sync();
+    await engine.sync();
 
     // We should have logically deleted the dupe record.
     equal(collection.count(), 8);
     ok(getServerRecord(collection, bmk1_guid).deleted);
     // and physically removed from the local store.
     await promiseNoLocalItem(bmk1_guid);
     // The original folder still longer has the item
     equal((await getFolderChildrenIDs(folder1_id)).length, 1);
@@ -331,28 +332,28 @@ add_task(async function test_dupe_repare
     await cleanup(server);
   }
 });
 
 add_task(async function test_dupe_reparented_to_earlier_appearing_parent_bookmark() {
   _("Ensure that a bookmark we consider a dupe from a different parent that " +
     "appears in the same sync before the dupe item");
 
-  let { server, collection } = await this.setup();
+  let { server, collection } = await this.sharedSetup();
 
   try {
     // The parent folder and one bookmark in it.
     let {id: folder1_id, guid: folder1_guid } = await createFolder(bms.toolbarFolder, "Folder 1");
     let {guid: bmk1_guid} = await createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
     // One more folder we'll use later.
     let {guid: folder2_guid} = await createFolder(bms.toolbarFolder, "A second folder");
 
     do_print(`folder1=${folder1_guid}, bmk1=${bmk1_guid} folder2=${folder2_guid}`);
 
-    engine.sync();
+    await engine.sync();
 
     // We've added the bookmark, 2 folders plus "menu", "toolbar", "unfiled", and "mobile".
     equal(collection.count(), 7);
     equal((await getFolderChildrenIDs(folder1_id)).length, 1);
 
     let newGUID = Utils.makeGUID();
     let newParentGUID = Utils.makeGUID();
 
@@ -389,47 +390,47 @@ add_task(async function test_dupe_repare
       parentName: "Folder 1",
       parentid: newParentGUID,
       tags: [],
     }), Date.now() / 1000 + 500);
 
 
     _("Syncing so new records are processed.");
     engine.lastSync = engine.lastSync - 5;
-    engine.sync();
+    await engine.sync();
 
     // Everything should be parented correctly.
     equal((await getFolderChildrenIDs(folder1_id)).length, 0);
-    let newParentID = store.idForGUID(newParentGUID);
-    let newID = store.idForGUID(newGUID);
+    let newParentID = await store.idForGUID(newParentGUID);
+    let newID = await store.idForGUID(newGUID);
     deepEqual(await getFolderChildrenIDs(newParentID), [newID]);
 
     // Make sure the validator thinks everything is hunky-dory.
     await validate(collection);
   } finally {
     await cleanup(server);
   }
 });
 
 add_task(async function test_dupe_reparented_to_later_appearing_parent_bookmark() {
   _("Ensure that a bookmark we consider a dupe from a different parent that " +
     "doesn't exist locally as we process the child, but does appear in the same sync");
 
-  let { server, collection } = await this.setup();
+  let { server, collection } = await this.sharedSetup();
 
   try {
     // The parent folder and one bookmark in it.
     let {id: folder1_id, guid: folder1_guid } = await createFolder(bms.toolbarFolder, "Folder 1");
     let {guid: bmk1_guid} = await createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
     // One more folder we'll use later.
     let {guid: folder2_guid} = await createFolder(bms.toolbarFolder, "A second folder");
 
     do_print(`folder1=${folder1_guid}, bmk1=${bmk1_guid} folder2=${folder2_guid}`);
 
-    engine.sync();
+    await engine.sync();
 
     // We've added the bookmark, 2 folders plus "menu", "toolbar", "unfiled", and "mobile".
     equal(collection.count(), 7);
     equal((await getFolderChildrenIDs(folder1_id)).length, 1);
 
     // Now create a new incoming record that looks alot like a dupe of the
     // item in folder1_guid, but with a record that points to a parent with the
     // same name, but a non-existing local ID.
@@ -465,48 +466,48 @@ add_task(async function test_dupe_repare
       parentName: "Bookmarks Toolbar",
       parentid: "toolbar",
       children: [newParentGUID],
       tags: [],
     }), Date.now() / 1000 + 500);
 
     _("Syncing so out-of-order records are processed.");
     engine.lastSync = engine.lastSync - 5;
-    engine.sync();
+    await engine.sync();
 
     // The intended parent did end up existing, so it should be parented
     // correctly after de-duplication.
     equal((await getFolderChildrenIDs(folder1_id)).length, 0);
-    let newParentID = store.idForGUID(newParentGUID);
-    let newID = store.idForGUID(newGUID);
+    let newParentID = await store.idForGUID(newParentGUID);
+    let newID = await store.idForGUID(newGUID);
     deepEqual(await getFolderChildrenIDs(newParentID), [newID]);
 
     // Make sure the validator thinks everything is hunky-dory.
     await validate(collection);
   } finally {
     await cleanup(server);
   }
 });
 
 add_task(async function test_dupe_reparented_to_future_arriving_parent_bookmark() {
   _("Ensure that a bookmark we consider a dupe from a different parent that " +
     "doesn't exist locally and doesn't appear in this Sync is handled correctly");
 
-  let { server, collection } = await this.setup();
+  let { server, collection } = await this.sharedSetup();
 
   try {
     // The parent folder and one bookmark in it.
     let {id: folder1_id, guid: folder1_guid } = await createFolder(bms.toolbarFolder, "Folder 1");
     let {guid: bmk1_guid} = await createBookmark(folder1_id, "http://getfirefox.com/", "Get Firefox!");
     // One more folder we'll use later.
     let {guid: folder2_guid} = await createFolder(bms.toolbarFolder, "A second folder");
 
     do_print(`folder1=${folder1_guid}, bmk1=${bmk1_guid} folder2=${folder2_guid}`);
 
-    engine.sync();
+    await engine.sync();
 
     // We've added the bookmark, 2 folders plus "menu", "toolbar", "unfiled", and "mobile".
     equal(collection.count(), 7);
     equal((await getFolderChildrenIDs(folder1_id)).length, 1);
 
     // Now create a new incoming record that looks alot like a dupe of the
     // item in folder1_guid, but with a record that points to a parent with the
     // same name, but a non-existing local ID.
@@ -521,34 +522,34 @@ add_task(async function test_dupe_repare
       parentName: "Folder 1",
       parentid: newParentGUID,
       tags: [],
       dateAdded: Date.now() - 10000
     }), Date.now() / 1000 + 500);
 
     _("Syncing so new dupe record is processed");
     engine.lastSync = engine.lastSync - 5;
-    engine.sync();
+    await engine.sync();
 
     // We should have logically deleted the dupe record.
     equal(collection.count(), 8);
     ok(getServerRecord(collection, bmk1_guid).deleted);
     // and physically removed from the local store.
     await promiseNoLocalItem(bmk1_guid);
     // The intended parent doesn't exist, so it remains in the original folder
     equal((await getFolderChildrenIDs(folder1_id)).length, 1);
 
     // The record for folder1 on the server should reference the new GUID.
     let serverRecord1 = getServerRecord(collection, folder1_guid);
     ok(!serverRecord1.children.includes(bmk1_guid));
     ok(serverRecord1.children.includes(newGUID));
 
     // As the incoming parent is missing the item should have been annotated
     // with that missing parent.
-    equal(PlacesUtils.annotations.getItemAnnotation(store.idForGUID(newGUID), "sync/parent"),
+    equal(PlacesUtils.annotations.getItemAnnotation((await store.idForGUID(newGUID)), "sync/parent"),
           newParentGUID);
 
     // Check the validator. Sadly, this is known to cause a mismatch between
     // the server and client views of the tree.
     let expected = [
       // We haven't fixed the incoming record that referenced the missing parent.
       { name: "orphans", count: 1 },
     ];
@@ -577,22 +578,22 @@ add_task(async function test_dupe_repare
       children: [newParentGUID],
       tags: [],
       dateAdded: Date.now() - 11000,
     }), Date.now() / 1000 + 500);
 
 
     _("Syncing so missing parent appears");
     engine.lastSync = engine.lastSync - 5;
-    engine.sync();
+    await engine.sync();
 
     // The intended parent now does exist, so it should have been reparented.
     equal((await getFolderChildrenIDs(folder1_id)).length, 0);
-    let newParentID = store.idForGUID(newParentGUID);
-    let newID = store.idForGUID(newGUID);
+    let newParentID = await store.idForGUID(newParentGUID);
+    let newID = await store.idForGUID(newGUID);
     deepEqual(await getFolderChildrenIDs(newParentID), [newID]);
 
     // validation now has different errors :(
     expected = [
       // The validator reports multipleParents because:
       // * The incoming record newParentGUID still (and correctly) references
       //   newGUID as a child.
       // * Our original Folder1 was updated to include newGUID when it
@@ -609,23 +610,23 @@ add_task(async function test_dupe_repare
     await cleanup(server);
   }
 });
 
 add_task(async function test_dupe_empty_folder() {
   _("Ensure that an empty folder we consider a dupe is handled correctly.");
   // Empty folders aren't particularly interesting in practice (as that seems
   // an edge-case) but duping folders with items is broken - bug 1293163.
-  let { server, collection } = await this.setup();
+  let { server, collection } = await this.sharedSetup();
 
   try {
     // The folder we will end up duping away.
     let {guid: folder1_guid } = await createFolder(bms.toolbarFolder, "Folder 1");
 
-    engine.sync();
+    await engine.sync();
 
     // We've added 1 folder, "menu", "toolbar", "unfiled", and "mobile".
     equal(collection.count(), 5);
 
     // Now create new incoming records that looks alot like a dupe of "Folder 1".
     let newFolderGUID = Utils.makeGUID();
     collection.insert(newFolderGUID, encryptPayload({
       id: newFolderGUID,
@@ -633,17 +634,17 @@ add_task(async function test_dupe_empty_
       title: "Folder 1",
       parentName: "Bookmarks Toolbar",
       parentid: "toolbar",
       children: [],
     }), Date.now() / 1000 + 500);
 
     _("Syncing so new dupe records are processed");
     engine.lastSync = engine.lastSync - 5;
-    engine.sync();
+    await engine.sync();
 
     await validate(collection);
 
     // Collection now has one additional record - the logically deleted dupe.
     equal(collection.count(), 6);
     // original folder should be logically deleted.
     ok(getServerRecord(collection, folder1_guid).deleted);
     await promiseNoLocalItem(folder1_guid);
--- a/services/sync/tests/unit/test_bookmark_engine.js
+++ b/services/sync/tests/unit/test_bookmark_engine.js
@@ -34,26 +34,28 @@ async function fetchAllSyncIds() {
   }
   return syncIds;
 }
 
 add_task(async function test_delete_invalid_roots_from_server() {
   _("Ensure that we delete the Places and Reading List roots from the server.");
 
   let engine  = new BookmarksEngine(Service);
+  await engine.initialize();
   let store   = engine._store;
   let server = serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let collection = server.user("foo").collection("bookmarks");
 
   Svc.Obs.notify("weave:engine:start-tracking");
 
   try {
-    collection.insert("places", encryptPayload(store.createRecord("places").cleartext));
+    let placesRecord = await store.createRecord("places");
+    collection.insert("places", encryptPayload(placesRecord.cleartext));
 
     let listBmk = new Bookmark("bookmarks", Utils.makeGUID());
     listBmk.bmkUri = "https://example.com";
     listBmk.title = "Example reading list entry";
     listBmk.parentName = "Reading List";
     listBmk.parentid = "readinglist";
     collection.insert(listBmk.id, encryptPayload(listBmk.cleartext));
 
@@ -71,24 +73,24 @@ add_task(async function test_delete_inva
     newBmk.parentid = "toolbar";
     collection.insert(newBmk.id, encryptPayload(newBmk.cleartext));
 
     deepEqual(collection.keys().sort(), ["places", "readinglist", listBmk.id, newBmk.id].sort(),
       "Should store Places root, reading list items, and new bookmark on server");
 
     await sync_engine_and_validate_telem(engine, false);
 
-    ok(!store.itemExists("readinglist"), "Should not apply Reading List root");
-    ok(!store.itemExists(listBmk.id), "Should not apply items in Reading List");
-    ok(store.itemExists(newBmk.id), "Should apply new bookmark");
+    ok(!(await store.itemExists("readinglist")), "Should not apply Reading List root");
+    ok(!(await store.itemExists(listBmk.id)), "Should not apply items in Reading List");
+    ok((await store.itemExists(newBmk.id)), "Should apply new bookmark");
 
     deepEqual(collection.keys().sort(), ["menu", "mobile", "toolbar", "unfiled", newBmk.id].sort(),
       "Should remove Places root and reading list items from server; upload local roots");
   } finally {
-    store.wipe();
+    await store.wipe();
     Svc.Prefs.resetBranch("");
     Service.recordManager.clearCache();
     await promiseStopServer(server);
     Svc.Obs.notify("weave:engine:stop-tracking");
   }
 });
 
 add_task(async function bad_record_allIDs() {
@@ -119,39 +121,41 @@ add_task(async function bad_record_allID
   await PlacesSyncUtils.bookmarks.reset();
   await promiseStopServer(server);
 });
 
 add_task(async function test_processIncoming_error_orderChildren() {
   _("Ensure that _orderChildren() is called even when _processIncoming() throws an error.");
 
   let engine = new BookmarksEngine(Service);
+  await engine.initialize();
   let store  = engine._store;
   let server = serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let collection = server.user("foo").collection("bookmarks");
 
   try {
 
     let folder1_id = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.toolbarFolder, "Folder 1", 0);
-    let folder1_guid = store.GUIDForId(folder1_id);
+    let folder1_guid = await store.GUIDForId(folder1_id);
 
     let fxuri = Utils.makeURI("http://getfirefox.com/");
     let tburi = Utils.makeURI("http://getthunderbird.com/");
 
     let bmk1_id = PlacesUtils.bookmarks.insertBookmark(
       folder1_id, fxuri, PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
     let bmk2_id = PlacesUtils.bookmarks.insertBookmark(
       folder1_id, tburi, PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Thunderbird!");
 
     // Create a server record for folder1 where we flip the order of
     // the children.
-    let folder1_payload = store.createRecord(folder1_guid).cleartext;
+    let folder1_record = await store.createRecord(folder1_guid);
+    let folder1_payload = folder1_record.cleartext;
     folder1_payload.children.reverse();
     collection.insert(folder1_guid, encryptPayload(folder1_payload));
 
     // Create a bogus record that when synced down will provoke a
     // network error which in turn provokes an exception in _processIncoming.
     const BOGUS_GUID = "zzzzzzzzzzzz";
     let bogus_record = collection.insert(BOGUS_GUID, "I'm a bogus record!");
     bogus_record.get = function get() {
@@ -167,58 +171,60 @@ add_task(async function test_processInco
     try {
       await sync_engine_and_validate_telem(engine, true)
     } catch (ex) {
       error = ex;
     }
     ok(!!error);
 
     // Verify that the bookmark order has been applied.
-    let new_children = store.createRecord(folder1_guid).children;
+    folder1_record = await store.createRecord(folder1_guid);
+    let new_children = folder1_record.children;
     do_check_eq(new_children.length, 2);
     do_check_eq(new_children[0], folder1_payload.children[0]);
     do_check_eq(new_children[1], folder1_payload.children[1]);
 
     do_check_eq(PlacesUtils.bookmarks.getItemIndex(bmk1_id), 1);
     do_check_eq(PlacesUtils.bookmarks.getItemIndex(bmk2_id), 0);
 
   } finally {
-    store.wipe();
+    await store.wipe();
     Svc.Prefs.resetBranch("");
     Service.recordManager.clearCache();
     await PlacesSyncUtils.bookmarks.reset();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_restorePromptsReupload() {
   _("Ensure that restoring from a backup will reupload all records.");
   let engine = new BookmarksEngine(Service);
+  await engine.initialize();
   let store  = engine._store;
   let server = serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let collection = server.user("foo").collection("bookmarks");
 
   Svc.Obs.notify("weave:engine:start-tracking");   // We skip usual startup...
 
   try {
 
     let folder1_id = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.toolbarFolder, "Folder 1", 0);
-    let folder1_guid = store.GUIDForId(folder1_id);
+    let folder1_guid = await store.GUIDForId(folder1_id);
     _("Folder 1: " + folder1_id + ", " + folder1_guid);
 
     let fxuri = Utils.makeURI("http://getfirefox.com/");
     let tburi = Utils.makeURI("http://getthunderbird.com/");
 
     _("Create a single record.");
     let bmk1_id = PlacesUtils.bookmarks.insertBookmark(
       folder1_id, fxuri, PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
-    let bmk1_guid = store.GUIDForId(bmk1_id);
+    let bmk1_guid = await store.GUIDForId(bmk1_id);
     _("Get Firefox!: " + bmk1_id + ", " + bmk1_guid);
 
 
     let dirSvc = Cc["@mozilla.org/file/directory_service;1"]
       .getService(Ci.nsIProperties);
 
     let backupFile = dirSvc.get("TmpD", Ci.nsILocalFile);
 
@@ -226,17 +232,17 @@ add_task(async function test_restoreProm
     backupFile.append("t_b_e_" + Date.now() + ".json");
 
     _("Backing up to file " + backupFile.path);
     await BookmarkJSONUtils.exportToFile(backupFile.path);
 
     _("Create a different record and sync.");
     let bmk2_id = PlacesUtils.bookmarks.insertBookmark(
       folder1_id, tburi, PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Thunderbird!");
-    let bmk2_guid = store.GUIDForId(bmk2_id);
+    let bmk2_guid = await store.GUIDForId(bmk2_id);
     _("Get Thunderbird!: " + bmk2_id + ", " + bmk2_guid);
 
     PlacesUtils.bookmarks.removeItem(bmk1_id);
 
     let error;
     try {
       await sync_engine_and_validate_telem(engine, false);
     } catch (ex) {
@@ -259,17 +265,17 @@ add_task(async function test_restoreProm
     _("Ensure we have the bookmarks we expect locally.");
     let guids = await fetchAllSyncIds();
     _("GUIDs: " + JSON.stringify([...guids]));
     let found = false;
     let count = 0;
     let newFX;
     for (let guid of guids) {
       count++;
-      let id = store.idForGUID(guid, true);
+      let id = await store.idForGUID(guid, true);
       // Only one bookmark, so _all_ should be Firefox!
       if (PlacesUtils.bookmarks.getItemType(id) == PlacesUtils.bookmarks.TYPE_BOOKMARK) {
         let uri = PlacesUtils.bookmarks.getBookmarkURI(id);
         _("Found URI " + uri.spec + " for GUID " + guid);
         do_check_eq(uri.spec, fxuri.spec);
         newFX = guid;   // Save the new GUID after restore.
         found = true;   // Only runs if the above check passes.
       }
@@ -308,17 +314,17 @@ add_task(async function test_restoreProm
     do_check_eq(bookmarkWBOs[0].bmkUri, fxuri.spec);
     do_check_eq(bookmarkWBOs[0].title, "Get Firefox!");
 
     _("Our old friend Folder 1 is still in play.");
     do_check_eq(folderWBOs.length, 1);
     do_check_eq(folderWBOs[0].title, "Folder 1");
 
   } finally {
-    store.wipe();
+    await store.wipe();
     Svc.Prefs.resetBranch("");
     Service.recordManager.clearCache();
     await PlacesSyncUtils.bookmarks.reset();
     await promiseStopServer(server);
   }
 });
 
 function FakeRecord(constructor, r) {
@@ -357,112 +363,115 @@ add_task(async function test_mismatched_
       ["HCRq40Rnxhrd", "YeyWCV1RVsYw", "GCceVZMhvMbP", "sYi2hevdArlF",
        "vjbZlPlSyGY8", "UtjUhVyrpeG6", "rVq8WMG2wfZI", "Lx0tcy43ZKhZ",
        "oT74WwV8_j4P", "IztsItWVSo3-"],
     "parentid": "toolbar"
   };
   newRecord.cleartext = newRecord;
 
   let engine = new BookmarksEngine(Service);
+  await engine.initialize();
   let store  = engine._store;
   let server = serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
-  _("GUID: " + store.GUIDForId(6, true));
+  _("GUID: " + (await store.GUIDForId(6, true)));
 
   try {
     let bms = PlacesUtils.bookmarks;
     let oldR = new FakeRecord(BookmarkFolder, oldRecord);
     let newR = new FakeRecord(Livemark, newRecord);
     oldR.parentid = PlacesUtils.bookmarks.toolbarGuid;
     newR.parentid = PlacesUtils.bookmarks.toolbarGuid;
 
-    store.applyIncoming(oldR);
+    await store.applyIncoming(oldR);
     _("Applied old. It's a folder.");
-    let oldID = store.idForGUID(oldR.id);
+    let oldID = await store.idForGUID(oldR.id);
     _("Old ID: " + oldID);
     do_check_eq(bms.getItemType(oldID), bms.TYPE_FOLDER);
     do_check_false(PlacesUtils.annotations
                               .itemHasAnnotation(oldID, PlacesUtils.LMANNO_FEEDURI));
 
-    store.applyIncoming(newR);
-    let newID = store.idForGUID(newR.id);
+    await store.applyIncoming(newR);
+    let newID = await store.idForGUID(newR.id);
     _("New ID: " + newID);
 
     _("Applied new. It's a livemark.");
     do_check_eq(bms.getItemType(newID), bms.TYPE_FOLDER);
     do_check_true(PlacesUtils.annotations
                              .itemHasAnnotation(newID, PlacesUtils.LMANNO_FEEDURI));
 
   } finally {
-    store.wipe();
+    await store.wipe();
     Svc.Prefs.resetBranch("");
     Service.recordManager.clearCache();
     await PlacesSyncUtils.bookmarks.reset();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_bookmark_guidMap_fail() {
   _("Ensure that failures building the GUID map cause early death.");
 
   let engine = new BookmarksEngine(Service);
+  await engine.initialize();
   let store = engine._store;
 
   let server = serverForFoo(engine);
   let coll   = server.user("foo").collection("bookmarks");
   await SyncTestingInfrastructure(server);
 
   // Add one item to the server.
   let itemID = PlacesUtils.bookmarks.createFolder(
     PlacesUtils.bookmarks.toolbarFolder, "Folder 1", 0);
-  let itemGUID    = store.GUIDForId(itemID);
-  let itemPayload = store.createRecord(itemGUID).cleartext;
+  let itemGUID = await store.GUIDForId(itemID);
+  let itemRecord = await store.createRecord(itemGUID);
+  let itemPayload = itemRecord.cleartext;
   coll.insert(itemGUID, encryptPayload(itemPayload));
 
   engine.lastSync = 1;   // So we don't back up.
 
   // Make building the GUID map fail.
 
   let pbt = PlacesUtils.promiseBookmarksTree;
   PlacesUtils.promiseBookmarksTree = function() { return Promise.reject("Nooo"); };
 
-  // Ensure that we throw when accessing _guidMap.
-  engine._syncStartup();
+  // Ensure that we throw when calling getGuidMap().
+  await engine._syncStartup();
   _("No error.");
-  do_check_false(engine._guidMapFailed);
 
   _("We get an error if building _guidMap fails in use.");
   let err;
   try {
-    _(engine._guidMap);
+    _(await engine.getGuidMap());
   } catch (ex) {
     err = ex;
   }
   do_check_eq(err.code, Engine.prototype.eEngineAbortApplyIncoming);
   do_check_eq(err.cause, "Nooo");
 
   _("We get an error and abort during processIncoming.");
   err = undefined;
   try {
-    engine._processIncoming();
+    await engine._processIncoming();
   } catch (ex) {
     err = ex;
   }
   do_check_eq(err, "Nooo");
 
   PlacesUtils.promiseBookmarksTree = pbt;
   await PlacesSyncUtils.bookmarks.reset();
   await promiseStopServer(server);
 });
 
 add_task(async function test_bookmark_tag_but_no_uri() {
   _("Ensure that a bookmark record with tags, but no URI, doesn't throw an exception.");
 
   let engine = new BookmarksEngine(Service);
+  await engine.initialize();
   let store = engine._store;
 
   // We're simply checking that no exception is thrown, so
   // no actual checks in this test.
 
   await PlacesSyncUtils.bookmarks.insert({
     kind: PlacesSyncUtils.bookmarks.KINDS.BOOKMARK,
     syncId: Utils.makeGUID(),
@@ -489,42 +498,43 @@ add_task(async function test_bookmark_ta
     parentid:    "toolbar",
     id:          Utils.makeGUID(),
     description: "",
     tags:        ["foo"],
     title:       "Taggy tag",
     type:        "folder"
   });
 
-  store.create(record);
+  await store.create(record);
   record.tags = ["bar"];
-  store.update(record);
+  await store.update(record);
 });
 
 add_task(async function test_misreconciled_root() {
   _("Ensure that we don't reconcile an arbitrary record with a root.");
 
   let engine = new BookmarksEngine(Service);
+  await engine.initialize();
   let store = engine._store;
   let server = serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   // Log real hard for this test.
   store._log.trace = store._log.debug;
   engine._log.trace = engine._log.debug;
 
-  engine._syncStartup();
+  await engine._syncStartup();
 
   // Let's find out where the toolbar is right now.
-  let toolbarBefore = store.createRecord("toolbar", "bookmarks");
-  let toolbarIDBefore = store.idForGUID("toolbar");
+  let toolbarBefore = await store.createRecord("toolbar", "bookmarks");
+  let toolbarIDBefore = await store.idForGUID("toolbar");
   do_check_neq(-1, toolbarIDBefore);
 
   let parentGUIDBefore = toolbarBefore.parentid;
-  let parentIDBefore = store.idForGUID(parentGUIDBefore);
+  let parentIDBefore = await store.idForGUID(parentGUIDBefore);
   do_check_neq(-1, parentIDBefore);
   do_check_eq("string", typeof(parentGUIDBefore));
 
   _("Current parent: " + parentGUIDBefore + " (" + parentIDBefore + ").");
 
   let to_apply = {
     id: "zzzzzzzzzzzz",
     type: "folder",
@@ -538,32 +548,33 @@ add_task(async function test_misreconcil
   let rec = new FakeRecord(BookmarkFolder, to_apply);
 
   _("Applying record.");
   store.applyIncoming(rec);
 
   // Ensure that afterwards, toolbar is still there.
   // As of 2012-12-05, this only passes because Places doesn't use "toolbar" as
   // the real GUID, instead using a generated one. Sync does the translation.
-  let toolbarAfter = store.createRecord("toolbar", "bookmarks");
+  let toolbarAfter = await store.createRecord("toolbar", "bookmarks");
   let parentGUIDAfter = toolbarAfter.parentid;
-  let parentIDAfter = store.idForGUID(parentGUIDAfter);
-  do_check_eq(store.GUIDForId(toolbarIDBefore), "toolbar");
+  let parentIDAfter = await store.idForGUID(parentGUIDAfter);
+  do_check_eq((await store.GUIDForId(toolbarIDBefore)), "toolbar");
   do_check_eq(parentGUIDBefore, parentGUIDAfter);
   do_check_eq(parentIDBefore, parentIDAfter);
 
   await store.wipe();
   await PlacesSyncUtils.bookmarks.reset();
   await promiseStopServer(server);
 });
 
 add_task(async function test_sync_dateAdded() {
   await Service.recordManager.clearCache();
   await PlacesSyncUtils.bookmarks.reset();
   let engine = new BookmarksEngine(Service);
+  await engine.initialize();
   let store  = engine._store;
   let server = serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let collection = server.user("foo").collection("bookmarks");
 
   // TODO: Avoid random orange (bug 1374599), this is only necessary
   // intermittently - reset the last sync date so that we'll get all bookmarks.
@@ -629,18 +640,18 @@ add_task(async function test_sync_dateAd
     item6.bmkUri = "https://example.com/6";
     item6.title = "asdf6";
     item6.parentName = "Bookmarks Toolbar";
     item6.parentid = "toolbar";
     const item6LastModified = (now - oneYearMS) / 1000;
     collection.insert(item6GUID, encryptPayload(item6.cleartext), item6LastModified);
 
     let origBuildWeakReuploadMap = engine.buildWeakReuploadMap;
-    engine.buildWeakReuploadMap = set => {
-      let fullMap = origBuildWeakReuploadMap.call(engine, set);
+    engine.buildWeakReuploadMap = async (set) => {
+      let fullMap = await origBuildWeakReuploadMap.call(engine, set);
       fullMap.delete(item6GUID);
       return fullMap;
     };
 
     await sync_engine_and_validate_telem(engine, false);
 
     let record1 = await store.createRecord(item1GUID);
     let record2 = await store.createRecord(item2GUID);
@@ -705,17 +716,17 @@ add_task(async function test_sync_dateAd
 
     let newerRecord2 = await store.createRecord(item2GUID);
     equal(newerRecord2.dateAdded, newRecord2.dateAdded,
       "dateAdded update should be ignored for later date if we know an earlier one ");
 
 
 
   } finally {
-    store.wipe();
+    await store.wipe();
     Svc.Prefs.resetBranch("");
     Service.recordManager.clearCache();
     await PlacesSyncUtils.bookmarks.reset();
     await promiseStopServer(server);
   }
 });
 
 function run_test() {
--- a/services/sync/tests/unit/test_bookmark_invalid.js
+++ b/services/sync/tests/unit/test_bookmark_invalid.js
@@ -1,19 +1,25 @@
 Cu.import("resource://gre/modules/Log.jsm");
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/engines/bookmarks.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 
-Service.engineManager.register(BookmarksEngine);
+let engine;
+let store;
+let tracker;
 
-var engine = Service.engineManager.get("bookmarks");
-var store = engine._store;
-var tracker = engine._tracker;
+add_task(async function setup() {
+  initTestLogging("Trace");
+  await Service.engineManager.register(BookmarksEngine);
+  engine = Service.engineManager.get("bookmarks");
+  store = engine._store;
+  tracker = engine._tracker;
+});
 
 add_task(async function test_ignore_invalid_uri() {
   _("Ensure that we don't die with invalid bookmarks.");
 
   // First create a valid bookmark.
   let bmid = PlacesUtils.bookmarks.insertBookmark(PlacesUtils.unfiledBookmarksFolderId,
                                                   Services.io.newURI("http://example.com/"),
                                                   PlacesUtils.bookmarks.DEFAULT_INDEX,
@@ -25,17 +31,17 @@ add_task(async function test_ignore_inva
       `UPDATE moz_places SET url = :url, url_hash = hash(:url)
        WHERE id = (SELECT b.fk FROM moz_bookmarks b
        WHERE b.id = :id LIMIT 1)`,
       { id: bmid, url: "<invalid url>" });
   });
 
   // Ensure that this doesn't throw even though the DB is now in a bad state (a
   // bookmark has an illegal url).
-  engine._buildGUIDMap();
+  await engine._buildGUIDMap();
 });
 
 add_task(async function test_ignore_missing_uri() {
   _("Ensure that we don't die with a bookmark referencing an invalid bookmark id.");
 
   // First create a valid bookmark.
   let bmid = PlacesUtils.bookmarks.insertBookmark(PlacesUtils.unfiledBookmarksFolderId,
                                                   Services.io.newURI("http://example.com/"),
@@ -47,15 +53,10 @@ add_task(async function test_ignore_miss
     await db.execute(
       `UPDATE moz_bookmarks SET fk = 999999
        WHERE id = :id`
       , { id: bmid });
   });
 
   // Ensure that this doesn't throw even though the DB is now in a bad state (a
   // bookmark has an illegal url).
-  engine._buildGUIDMap();
+  await engine._buildGUIDMap();
 });
-
-function run_test() {
-  initTestLogging("Trace");
-  run_next_test();
-}
--- a/services/sync/tests/unit/test_bookmark_livemarks.js
+++ b/services/sync/tests/unit/test_bookmark_livemarks.js
@@ -6,21 +6,21 @@ Cu.import("resource://services-sync/reco
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/engines/bookmarks.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://testing-common/services/common/utils.js");
 
 const DESCRIPTION_ANNO = "bookmarkProperties/description";
 
-var engine = Service.engineManager.get("bookmarks");
-var store = engine._store;
+let engine;
+let store;
 
 // Record borrowed from Bug 631361.
-var record631361 = {
+const record631361 = {
   id: "M5bwUKK8hPyF",
   index: 150,
   modified: 1296768176.49,
   payload:
   {"id": "M5bwUKK8hPyF",
    "type": "livemark",
    "siteUri": "http://www.bbc.co.uk/go/rss/int/news/-/news/",
    "feedUri": "http://fxfeeds.mozilla.com/en-US/firefox/headlines.xml",
@@ -47,87 +47,79 @@ var record631361 = {
       "E3H04Wn2RfSi", "eaSIMI6kSrcz", "rtkRxFoG5Vqi", "dectkUglV0Dz",
       "B4vUE0BE15No", "qgQFW5AQrgB0", "SxAXvwOhu8Zi", "0S6cRPOg-5Z2",
       "zcZZBGeLnaWW", "B0at8hkQqVZQ", "sgPtgGulbP66", "lwtwGHSCPYaQ",
       "mNTdpgoRZMbW", "-L8Vci6CbkJY", "bVzudKSQERc1", "Gxl9lb4DXsmL",
       "3Qr13GucOtEh"]},
   collection: "bookmarks"
 };
 
-// Clean up after other tests. Only necessary in XULRunner.
-store.wipe();
-
 function makeLivemark(p, mintGUID) {
   let b = new Livemark("bookmarks", p.id);
   // Copy here, because tests mutate the contents.
   b.cleartext = TestingUtils.deepCopy(p);
 
   if (mintGUID)
     b.id = Utils.makeGUID();
 
   return b;
 }
 
-
-function run_test() {
+add_task(async function setup() {
   initTestLogging("Trace");
   Log.repository.getLogger("Sync.Engine.Bookmarks").level = Log.Level.Trace;
   Log.repository.getLogger("Sync.Store.Bookmarks").level  = Log.Level.Trace;
 
-  run_next_test();
-}
+  engine = Service.engineManager.get("bookmarks");
+  store = engine._store;
+});
 
-add_test(function test_livemark_descriptions() {
+add_task(async function test_livemark_descriptions() {
   let record = record631361.payload;
 
-  function doRecord(r) {
+  async function doRecord(r) {
     store._childrenToOrder = {};
-    store.applyIncoming(r);
-    store._orderChildren();
+    await store.applyIncoming(r);
+    await store._orderChildren();
     delete store._childrenToOrder;
   }
 
   // Attempt to provoke an error by messing around with the description.
   record.description = null;
-  doRecord(makeLivemark(record));
+  await doRecord(makeLivemark(record));
   record.description = "";
-  doRecord(makeLivemark(record));
+  await doRecord(makeLivemark(record));
 
   // Attempt to provoke an error by adding a bad description anno.
-  let id = store.idForGUID(record.id);
+  let id = await store.idForGUID(record.id);
   PlacesUtils.annotations.setItemAnnotation(id, DESCRIPTION_ANNO, "", 0,
                                             PlacesUtils.annotations.EXPIRE_NEVER);
-
-  run_next_test();
 });
 
-add_test(function test_livemark_invalid() {
+add_task(async function test_livemark_invalid() {
   _("Livemarks considered invalid by nsLivemarkService are skipped.");
 
   _("Parent is unknown. Will be set to unfiled.");
   let lateParentRec = makeLivemark(record631361.payload, true);
   let parentGUID = Utils.makeGUID();
   lateParentRec.parentid = parentGUID;
-  do_check_eq(-1, store.idForGUID(parentGUID));
+  do_check_eq(-1, (await store.idForGUID(parentGUID)));
 
-  store.create(lateParentRec);
-  let recID = store.idForGUID(lateParentRec.id, true);
+  await store.create(lateParentRec);
+  let recID = await store.idForGUID(lateParentRec.id, true);
   do_check_true(recID > 0);
   do_check_eq(PlacesUtils.bookmarks.getFolderIdForItem(recID),
               PlacesUtils.bookmarks.unfiledBookmarksFolder);
 
   _("No feed URI, which is invalid. Will be skipped.");
   let noFeedURIRec = makeLivemark(record631361.payload, true);
   delete noFeedURIRec.cleartext.feedUri;
-  store.create(noFeedURIRec);
+  await store.create(noFeedURIRec);
   // No exception, but no creation occurs.
-  do_check_eq(-1, store.idForGUID(noFeedURIRec.id, true));
+  do_check_eq(-1, (await store.idForGUID(noFeedURIRec.id, true)));
 
   _("Parent is a Livemark. Will be skipped.");
   let lmParentRec = makeLivemark(record631361.payload, true);
-  lmParentRec.parentid = store.GUIDForId(recID);
-  store.create(lmParentRec);
+  lmParentRec.parentid = await store.GUIDForId(recID);
+  await store.create(lmParentRec);
   // No exception, but no creation occurs.
-  do_check_eq(-1, store.idForGUID(lmParentRec.id, true));
-
-  // Clear event loop.
-  Utils.nextTick(run_next_test);
+  do_check_eq(-1, (await store.idForGUID(lmParentRec.id, true)));
 });
--- a/services/sync/tests/unit/test_bookmark_order.js
+++ b/services/sync/tests/unit/test_bookmark_order.js
@@ -4,23 +4,19 @@
 _("Making sure after processing incoming bookmarks, they show up in the right order");
 Cu.import("resource://gre/modules/Log.jsm");
 Cu.import("resource://services-sync/engines/bookmarks.js");
 Cu.import("resource://services-sync/main.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
-function run_test() {
-  Svc.Prefs.set("log.logger.engine.bookmarks", "Trace");
-  initTestLogging("Trace");
-  Log.repository.getLogger("Sqlite").level = Log.Level.Info;
-
-  run_next_test();
-}
+Svc.Prefs.set("log.logger.engine.bookmarks", "Trace");
+initTestLogging("Trace");
+Log.repository.getLogger("Sqlite").level = Log.Level.Info;
 
 function serverForFoo(engine) {
   generateNewKeys(Service.collectionKeys);
 
   let clientsEngine = Service.clientsEngine;
   return serverForUsers({"foo": "password"}, {
     meta: {
       global: {
@@ -185,18 +181,18 @@ add_task(async function test_local_order
         guid: guids.mdn,
         index: 5,
       }],
     }, {
       guid: guids.tb,
       index: 2,
     }], "Should use local order as base if remote is older");
   } finally {
-    engine.wipeClient();
-    Service.startOver();
+    await engine.wipeClient();
+    await Service.startOver();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_remote_order_newer() {
   let engine = Service.engineManager.get("bookmarks");
 
   let server = serverForFoo(engine);
@@ -230,28 +226,28 @@ add_task(async function test_remote_orde
         guid: guids.customize,
         index: 5,
       }],
     }, {
       guid: guids.fx,
       index: 2,
     }], "Should use remote order as base if local is older");
   } finally {
-    engine.wipeClient();
-    Service.startOver();
+    await engine.wipeClient();
+    await Service.startOver();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_bookmark_order() {
   let engine = new BookmarksEngine(Service);
   let store = engine._store;
 
   _("Starting with a clean slate of no bookmarks");
-  store.wipe();
+  await store.wipe();
   await assertBookmarksTreeMatches("", [{
     guid: PlacesUtils.bookmarks.menuGuid,
     index: 0,
   }, {
     guid: PlacesUtils.bookmarks.toolbarGuid,
     index: 1,
   }, {
     // Index 2 is the tags root. (Root indices depend on the order of the
@@ -279,17 +275,17 @@ add_task(async function test_bookmark_or
     bmFolder.title = name;
     bmFolder.parentid = parent || "unfiled";
     bmFolder.children = children;
     return bmFolder;
   }
 
   async function apply(record) {
     store._childrenToOrder = {};
-    store.applyIncoming(record);
+    await store.applyIncoming(record);
     await store._orderChildren();
     delete store._childrenToOrder;
   }
   let id10 = "10_aaaaaaaaa";
   _("basic add first bookmark");
   await apply(bookmark(id10, ""));
   await assertBookmarksTreeMatches("", [{
     guid: PlacesUtils.bookmarks.menuGuid,
--- a/services/sync/tests/unit/test_bookmark_places_query_rewriting.js
+++ b/services/sync/tests/unit/test_bookmark_places_query_rewriting.js
@@ -1,59 +1,59 @@
 /* Any copyright is dedicated to the Public Domain.
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 _("Rewrite place: URIs.");
 Cu.import("resource://services-sync/engines/bookmarks.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 
-var engine = new BookmarksEngine(Service);
-var store = engine._store;
+let engine = new BookmarksEngine(Service);
+let store = engine._store;
 
 function makeTagRecord(id, uri) {
   let tagRecord = new BookmarkQuery("bookmarks", id);
   tagRecord.queryId = "MagicTags";
   tagRecord.parentName = "Bookmarks Toolbar";
   tagRecord.bmkUri = uri;
   tagRecord.title = "tagtag";
   tagRecord.folderName = "bar";
   tagRecord.parentid = PlacesUtils.bookmarks.toolbarGuid;
   return tagRecord;
 }
 
-function run_test() {
+add_task(async function run_test() {
   initTestLogging("Trace");
   Log.repository.getLogger("Sync.Engine.Bookmarks").level = Log.Level.Trace;
   Log.repository.getLogger("Sync.Store.Bookmarks").level = Log.Level.Trace;
 
   let uri = "place:folder=499&type=7&queryType=1";
   let tagRecord = makeTagRecord("abcdefabcdef", uri);
 
   _("Type: " + tagRecord.type);
   _("Folder name: " + tagRecord.folderName);
-  store.applyIncoming(tagRecord);
+  await store.applyIncoming(tagRecord);
 
   let tags = PlacesUtils.getFolderContents(PlacesUtils.tagsFolderId).root;
   let tagID;
   try {
     for (let i = 0; i < tags.childCount; ++i) {
       let child = tags.getChild(i);
       if (child.title == "bar") {
         tagID = child.itemId;
       }
     }
   } finally {
     tags.containerOpen = false;
   }
 
   _("Tag ID: " + tagID);
-  let insertedRecord = store.createRecord("abcdefabcdef", "bookmarks");
+  let insertedRecord = await store.createRecord("abcdefabcdef", "bookmarks");
   do_check_eq(insertedRecord.bmkUri, uri.replace("499", tagID));
 
   _("... but not if the type is wrong.");
   let wrongTypeURI = "place:folder=499&type=2&queryType=1";
   let wrongTypeRecord = makeTagRecord("fedcbafedcba", wrongTypeURI);
-  store.applyIncoming(wrongTypeRecord);
+  await store.applyIncoming(wrongTypeRecord);
 
-  insertedRecord = store.createRecord("fedcbafedcba", "bookmarks");
+  insertedRecord = await store.createRecord("fedcbafedcba", "bookmarks");
   do_check_eq(insertedRecord.bmkUri, wrongTypeURI);
-}
+});
--- a/services/sync/tests/unit/test_bookmark_repair.js
+++ b/services/sync/tests/unit/test_bookmark_repair.js
@@ -8,44 +8,49 @@ Cu.import("resource://gre/modules/osfile
 Cu.import("resource://services-sync/bookmark_repair.js");
 Cu.import("resource://services-sync/constants.js");
 Cu.import("resource://services-sync/doctor.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/engines/clients.js");
 Cu.import("resource://services-sync/engines/bookmarks.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
+initTestLogging("Trace");
+Log.repository.getLogger("Sync.Engine.Bookmarks").level = Log.Level.Trace
+Log.repository.getLogger("Sync.Engine.Clients").level = Log.Level.Trace
+Log.repository.getLogger("Sqlite").level = Log.Level.Info; // less noisy
+
 const LAST_BOOKMARK_SYNC_PREFS = [
   "bookmarks.lastSync",
   "bookmarks.lastSyncLocal",
 ];
 
 const BOOKMARK_REPAIR_STATE_PREFS = [
   "client.GUID",
   "doctor.lastRepairAdvance",
   ...LAST_BOOKMARK_SYNC_PREFS,
   ...Object.values(BookmarkRepairRequestor.PREF).map(name =>
     `repairs.bookmarks.${name}`
   ),
 ];
 
-initTestLogging("Trace");
-Log.repository.getLogger("Sync.Engine.Bookmarks").level = Log.Level.Trace
-Log.repository.getLogger("Sync.Engine.Clients").level = Log.Level.Trace
-Log.repository.getLogger("Sqlite").level = Log.Level.Info; // less noisy
+let clientsEngine;
+let bookmarksEngine;
+var recordedEvents = [];
 
-let clientsEngine = Service.clientsEngine;
-let bookmarksEngine = Service.engineManager.get("bookmarks");
-
-generateNewKeys(Service.collectionKeys);
+add_task(async function setup() {
+  clientsEngine = Service.clientsEngine;
+  bookmarksEngine = Service.engineManager.get("bookmarks");
 
-var recordedEvents = [];
-Service.recordTelemetryEvent = (object, method, value, extra = undefined) => {
-  recordedEvents.push({ object, method, value, extra });
-};
+  generateNewKeys(Service.collectionKeys);
+
+  Service.recordTelemetryEvent = (object, method, value, extra = undefined) => {
+    recordedEvents.push({ object, method, value, extra });
+  };
+});
 
 function checkRecordedEvents(expected, message) {
   deepEqual(recordedEvents, expected, message);
   // and clear the list so future checks are easier to write.
   recordedEvents = [];
 }
 
 // Backs up and resets all preferences to their default values. Returns a
@@ -71,18 +76,18 @@ async function promiseValidationDone(exp
   let summary = validationResult.problems.getSummary();
   let actual = summary.filter(({name, count}) => count);
   actual.sort((a, b) => String(a.name).localeCompare(b.name));
   expected.sort((a, b) => String(a.name).localeCompare(b.name));
   deepEqual(actual, expected);
 }
 
 async function cleanup(server) {
-  bookmarksEngine._store.wipe();
-  clientsEngine._store.wipe();
+  await bookmarksEngine._store.wipe();
+  await clientsEngine._store.wipe();
   Svc.Prefs.resetBranch("");
   Service.recordManager.clearCache();
   await promiseStopServer(server);
 }
 
 add_task(async function test_bookmark_repair_integration() {
   enableValidationPrefs();
 
@@ -93,17 +98,17 @@ add_task(async function test_bookmark_re
 
   let user = server.user("foo");
 
   let initialID = Service.clientsEngine.localID;
   let remoteID = Utils.makeGUID();
   try {
 
     _("Syncing to initialize crypto etc.");
-    Service.sync();
+    await Service.sync();
 
     _("Create remote client record");
     server.insertWBO("foo", "clients", new ServerWBO(remoteID, encryptPayload({
       id: remoteID,
       name: "Remote client",
       type: "desktop",
       commands: [],
       version: "54",
@@ -119,42 +124,42 @@ add_task(async function test_bookmark_re
     let bookmarkInfo = await PlacesUtils.bookmarks.insert({
       parentGuid: folderInfo.guid,
       url: "http://getfirefox.com/",
       title: "Get Firefox!",
     });
 
     _(`Upload ${folderInfo.guid} and ${bookmarkInfo.guid} to server`);
     let validationPromise = promiseValidationDone([]);
-    Service.sync();
+    await Service.sync();
     equal(clientsEngine.stats.numClients, 2, "Clients collection should have 2 records");
     await validationPromise;
     checkRecordedEvents([], "Should not start repair after first sync");
 
     _("Back up last sync timestamps for remote client");
     let restoreRemoteLastBookmarkSync = backupPrefs(LAST_BOOKMARK_SYNC_PREFS);
 
     _(`Delete ${bookmarkInfo.guid} locally and on server`);
     // Now we will reach into the server and hard-delete the bookmark
     user.collection("bookmarks").remove(bookmarkInfo.guid);
     // And delete the bookmark, but cheat by telling places that Sync did
     // it, so we don't end up with a tombstone.
     await PlacesUtils.bookmarks.remove(bookmarkInfo.guid, {
       source: PlacesUtils.bookmarks.SOURCE_SYNC,
     });
-    deepEqual(bookmarksEngine.pullNewChanges(), {},
+    deepEqual((await bookmarksEngine.pullNewChanges()), {},
       `Should not upload tombstone for ${bookmarkInfo.guid}`);
 
     // sync again - we should have a few problems...
     _("Sync again to trigger repair");
     validationPromise = promiseValidationDone([
       {"name": "missingChildren", "count": 1},
       {"name": "structuralDifferences", "count": 1},
     ]);
-    Service.sync();
+    await Service.sync();
     await validationPromise;
     let flowID = Svc.Prefs.get("repairs.bookmarks.flowID");
     checkRecordedEvents([{
       object: "repair",
       method: "started",
       value: undefined,
       extra: {
         flowID,
@@ -175,42 +180,43 @@ add_task(async function test_bookmark_re
       extra: {
         deviceID: Service.identity.hashedDeviceID(remoteID),
         flowID,
         numIDs: "1",
       },
     }], "Should record telemetry events for repair request");
 
     // We should have started a repair with our second client.
-    equal(clientsEngine.getClientCommands(remoteID).length, 1,
+    equal((await clientsEngine.getClientCommands(remoteID)).length, 1,
       "Should queue repair request for remote client after repair");
     _("Sync to send outgoing repair request");
-    Service.sync();
-    equal(clientsEngine.getClientCommands(remoteID).length, 0,
+    await Service.sync();
+    equal((await clientsEngine.getClientCommands(remoteID)).length, 0,
       "Should send repair request to remote client after next sync");
     checkRecordedEvents([],
       "Should not record repair telemetry after sending repair request");
 
     _("Back up repair state to restore later");
     let restoreInitialRepairState = backupPrefs(BOOKMARK_REPAIR_STATE_PREFS);
 
     // so now let's take over the role of that other client!
     _("Create new clients engine pretending to be remote client");
     let remoteClientsEngine = Service.clientsEngine = new ClientEngine(Service);
+    await remoteClientsEngine.initialize();
     remoteClientsEngine.localID = remoteID;
 
     _("Restore missing bookmark");
     // Pretend Sync wrote the bookmark, so that we upload it as part of the
     // repair instead of the sync.
     bookmarkInfo.source = PlacesUtils.bookmarks.SOURCE_SYNC;
     await PlacesUtils.bookmarks.insert(bookmarkInfo);
     restoreRemoteLastBookmarkSync();
 
     _("Sync as remote client");
-    Service.sync();
+    await Service.sync();
     checkRecordedEvents([{
       object: "processcommand",
       method: "repairRequest",
       value: undefined,
       extra: {
         flowID,
       },
     }, {
@@ -235,24 +241,24 @@ add_task(async function test_bookmark_re
       value: undefined,
       extra: {
         flowID,
         numIDs: "1",
       }
     }], "Should record telemetry events for repair response");
 
     // We should queue the repair response for the initial client.
-    equal(remoteClientsEngine.getClientCommands(initialID).length, 1,
+    equal((await remoteClientsEngine.getClientCommands(initialID)).length, 1,
       "Should queue repair response for initial client after repair");
     ok(user.collection("bookmarks").wbo(bookmarkInfo.guid),
       "Should upload missing bookmark");
 
     _("Sync to upload bookmark and send outgoing repair response");
-    Service.sync();
-    equal(remoteClientsEngine.getClientCommands(initialID).length, 0,
+    await Service.sync();
+    equal((await remoteClientsEngine.getClientCommands(initialID)).length, 0,
       "Should send repair response to initial client after next sync");
     checkRecordedEvents([],
       "Should not record repair telemetry after sending repair response");
     ok(!Services.prefs.prefHasUserValue("services.sync.repairs.bookmarks.state"),
       "Remote client should not be repairing");
 
     _("Pretend to be initial client again");
     Service.clientsEngine = clientsEngine;
@@ -262,17 +268,17 @@ add_task(async function test_bookmark_re
       source: PlacesUtils.bookmarks.SOURCE_SYNC,
     });
     restoreInitialRepairState();
     ok(Services.prefs.prefHasUserValue("services.sync.repairs.bookmarks.state"),
       "Initial client should still be repairing");
 
     _("Sync as initial client");
     let revalidationPromise = promiseValidationDone([]);
-    Service.sync();
+    await Service.sync();
     let restoredBookmarkInfo = await PlacesUtils.bookmarks.fetch(bookmarkInfo.guid);
     ok(restoredBookmarkInfo, "Missing bookmark should be downloaded to initial client");
     checkRecordedEvents([{
       object: "processcommand",
       method: "repairResponse",
       value: undefined,
       extra: {
         flowID,
@@ -296,32 +302,33 @@ add_task(async function test_bookmark_re
       },
     }]);
     await revalidationPromise;
     ok(!Services.prefs.prefHasUserValue("services.sync.repairs.bookmarks.state"),
       "Should clear repair pref after successfully completing repair");
   } finally {
     await cleanup(server);
     clientsEngine = Service.clientsEngine = new ClientEngine(Service);
+    clientsEngine.initialize();
   }
 });
 
 add_task(async function test_repair_client_missing() {
   enableValidationPrefs();
 
   _("Ensure that a record missing from the client only will get re-downloaded from the server");
 
   let server = serverForFoo(bookmarksEngine);
   await SyncTestingInfrastructure(server);
 
   let remoteID = Utils.makeGUID();
   try {
 
     _("Syncing to initialize crypto etc.");
-    Service.sync();
+    await Service.sync();
 
     _("Create remote client record");
     server.insertWBO("foo", "clients", new ServerWBO(remoteID, encryptPayload({
       id: remoteID,
       name: "Remote client",
       type: "desktop",
       commands: [],
       version: "54",
@@ -331,45 +338,45 @@ add_task(async function test_repair_clie
     let bookmarkInfo = await PlacesUtils.bookmarks.insert({
       parentGuid: PlacesUtils.bookmarks.toolbarGuid,
       url: "http://getfirefox.com/",
       title: "Get Firefox!",
     });
 
     let validationPromise = promiseValidationDone([]);
     _("Syncing.");
-    Service.sync();
+    await Service.sync();
     // should have 2 clients
     equal(clientsEngine.stats.numClients, 2)
     await validationPromise;
 
     // Delete the bookmark localy, but cheat by telling places that Sync did
     // it, so Sync still thinks we have it.
     await PlacesUtils.bookmarks.remove(bookmarkInfo.guid, {
       source: PlacesUtils.bookmarks.SOURCE_SYNC,
     });
     // sanity check we aren't going to sync this removal.
-    do_check_empty(bookmarksEngine.pullNewChanges());
+    do_check_empty((await bookmarksEngine.pullNewChanges()));
     // sanity check that the bookmark is not there anymore
     do_check_false(await PlacesUtils.bookmarks.fetch(bookmarkInfo.guid));
 
     // sync again - we should have a few problems...
     _("Syncing again.");
     validationPromise = promiseValidationDone([
       {"name": "clientMissing", "count": 1},
       {"name": "structuralDifferences", "count": 1},
     ]);
-    Service.sync();
+    await Service.sync();
     await validationPromise;
 
     // We shouldn't have started a repair with our second client.
-    equal(clientsEngine.getClientCommands(remoteID).length, 0);
+    equal((await clientsEngine.getClientCommands(remoteID)).length, 0);
 
     // Trigger a sync (will request the missing item)
-    Service.sync();
+    await Service.sync();
 
     // And we got our bookmark back
     do_check_true(await PlacesUtils.bookmarks.fetch(bookmarkInfo.guid));
   } finally {
     await cleanup(server);
   }
 });
 
@@ -382,17 +389,17 @@ add_task(async function test_repair_serv
   await SyncTestingInfrastructure(server);
 
   let user = server.user("foo");
 
   let remoteID = Utils.makeGUID();
   try {
 
     _("Syncing to initialize crypto etc.");
-    Service.sync();
+    await Service.sync();
 
     _("Create remote client record");
     server.insertWBO("foo", "clients", new ServerWBO(remoteID, encryptPayload({
       id: remoteID,
       name: "Remote client",
       type: "desktop",
       commands: [],
       version: "54",
@@ -402,38 +409,38 @@ add_task(async function test_repair_serv
     let bookmarkInfo = await PlacesUtils.bookmarks.insert({
       parentGuid: PlacesUtils.bookmarks.toolbarGuid,
       url: "http://getfirefox.com/",
       title: "Get Firefox!",
     });
 
     let validationPromise = promiseValidationDone([]);
     _("Syncing.");
-    Service.sync();
+    await Service.sync();
     // should have 2 clients
     equal(clientsEngine.stats.numClients, 2)
     await validationPromise;
 
     // Now we will reach into the server and hard-delete the bookmark
     user.collection("bookmarks").wbo(bookmarkInfo.guid).delete();
 
     // sync again - we should have a few problems...
     _("Syncing again.");
     validationPromise = promiseValidationDone([
       {"name": "serverMissing", "count": 1},
       {"name": "missingChildren", "count": 1},
     ]);
-    Service.sync();
+    await Service.sync();
     await validationPromise;
 
     // We shouldn't have started a repair with our second client.
-    equal(clientsEngine.getClientCommands(remoteID).length, 0);
+    equal((await clientsEngine.getClientCommands(remoteID)).length, 0);
 
     // Trigger a sync (will upload the missing item)
-    Service.sync();
+    await Service.sync();
 
     // And the server got our bookmark back
     do_check_true(user.collection("bookmarks").wbo(bookmarkInfo.guid));
   } finally {
     await cleanup(server);
   }
 });
 
@@ -444,17 +451,17 @@ add_task(async function test_repair_serv
 
   let server = serverForFoo(bookmarksEngine);
   await SyncTestingInfrastructure(server);
 
   let remoteID = Utils.makeGUID();
   try {
 
     _("Syncing to initialize crypto etc.");
-    Service.sync();
+    await Service.sync();
 
     _("Create remote client record");
     server.insertWBO("foo", "clients", new ServerWBO(remoteID, encryptPayload({
       id: remoteID,
       name: "Remote client",
       type: "desktop",
       commands: [],
       version: "54",
@@ -464,17 +471,17 @@ add_task(async function test_repair_serv
     let bookmarkInfo = await PlacesUtils.bookmarks.insert({
       parentGuid: PlacesUtils.bookmarks.toolbarGuid,
       url: "http://getfirefox.com/",
       title: "Get Firefox!",
     });
 
     let validationPromise = promiseValidationDone([]);
     _("Syncing.");
-    Service.sync();
+    await Service.sync();
     // should have 2 clients
     equal(clientsEngine.stats.numClients, 2)
     await validationPromise;
 
     // Now we will reach into the server and create a tombstone for that bookmark
     server.insertWBO("foo", "bookmarks", new ServerWBO(bookmarkInfo.guid, encryptPayload({
       id: bookmarkInfo.guid,
       deleted: true,
@@ -482,23 +489,23 @@ add_task(async function test_repair_serv
 
     // sync again - we should have a few problems...
     _("Syncing again.");
     validationPromise = promiseValidationDone([
       {"name": "serverDeleted", "count": 1},
       {"name": "deletedChildren", "count": 1},
       {"name": "orphans", "count": 1}
     ]);
-    Service.sync();
+    await Service.sync();
     await validationPromise;
 
     // We shouldn't have started a repair with our second client.
-    equal(clientsEngine.getClientCommands(remoteID).length, 0);
+    equal((await clientsEngine.getClientCommands(remoteID)).length, 0);
 
     // Trigger a sync (will upload the missing item)
-    Service.sync();
+    await Service.sync();
 
     // And the client deleted our bookmark
     do_check_true(!(await PlacesUtils.bookmarks.fetch(bookmarkInfo.guid)));
   } finally {
     await cleanup(server);
   }
 });
--- a/services/sync/tests/unit/test_bookmark_repair_requestor.js
+++ b/services/sync/tests/unit/test_bookmark_repair_requestor.js
@@ -23,23 +23,23 @@ class MockClientsEngine {
   get remoteClients() {
     return Object.values(this._clientList);
   }
 
   remoteClient(id) {
     return this._clientList[id];
   }
 
-  sendCommand(command, args, clientID) {
+  async sendCommand(command, args, clientID) {
     let cc = this._sentCommands[clientID] || [];
     cc.push({ command, args });
     this._sentCommands[clientID] = cc;
   }
 
-  getClientCommands(clientID) {
+  async getClientCommands(clientID) {
     return this._sentCommands[clientID] || [];
   }
 }
 
 class MockIdentity {
   hashedDeviceID(did) {
     return did; // don't hash it to make testing easier.
   }
@@ -93,17 +93,17 @@ add_task(async function test_requestor_n
         {parent: "x", child: "b"},
         {parent: "x", child: "c"}
       ],
       orphans: [],
     }
   }
   let flowID = Utils.makeGUID();
 
-  requestor.startRepairs(validationInfo, flowID);
+  await requestor.startRepairs(validationInfo, flowID);
   // there are no clients, so we should end up in "finished" (which we need to
   // check via telemetry)
   deepEqual(mockService._recordedEvents, [
     { object: "repair",
       method: "started",
       value: undefined,
       extra: { flowID, numIDs: 3 },
     },
@@ -124,36 +124,36 @@ add_task(async function test_requestor_o
         {parent: "x", child: "a"},
         {parent: "x", child: "b"},
         {parent: "x", child: "c"}
       ],
       orphans: [],
     }
   }
   let flowID = Utils.makeGUID();
-  requestor.startRepairs(validationInfo, flowID);
+  await requestor.startRepairs(validationInfo, flowID);
   // the command should now be outgoing.
   checkOutgoingCommand(mockService, "client-a");
 
   checkState(BookmarkRepairRequestor.STATE.SENT_REQUEST);
   // asking it to continue stays in that state until we timeout or the command
   // is removed.
-  requestor.continueRepairs();
+  await requestor.continueRepairs();
   checkState(BookmarkRepairRequestor.STATE.SENT_REQUEST);
 
   // now pretend that client synced.
   mockService.clientsEngine._sentCommands = {};
-  requestor.continueRepairs();
+  await requestor.continueRepairs();
   checkState(BookmarkRepairRequestor.STATE.SENT_SECOND_REQUEST);
   // the command should be outgoing again.
   checkOutgoingCommand(mockService, "client-a");
 
   // pretend that client synced again without writing a command.
   mockService.clientsEngine._sentCommands = {};
-  requestor.continueRepairs();
+  await requestor.continueRepairs();
   // There are no more clients, so we've given up.
 
   checkRepairFinished();
   deepEqual(mockService._recordedEvents, [
     { object: "repair",
       method: "started",
       value: undefined,
       extra: { flowID, numIDs: 3 },
@@ -185,27 +185,27 @@ add_task(async function test_requestor_o
         {parent: "x", child: "a"},
         {parent: "x", child: "b"},
         {parent: "x", child: "c"}
       ],
       orphans: [],
     }
   }
   let flowID = Utils.makeGUID();
-  requestor.startRepairs(validationInfo, flowID);
+  await requestor.startRepairs(validationInfo, flowID);
   // the command should now be outgoing.
   checkOutgoingCommand(mockService, "client-a");
 
   checkState(BookmarkRepairRequestor.STATE.SENT_REQUEST);
 
   // pretend we are now in the future.
   let theFuture = Date.now() + 300000000;
   requestor._now = () => theFuture;
 
-  requestor.continueRepairs();
+  await requestor.continueRepairs();
 
   // We should be finished as we gave up in disgust.
   checkRepairFinished();
   deepEqual(mockService._recordedEvents, [
     { object: "repair",
       method: "started",
       value: undefined,
       extra: { flowID, numIDs: 3 },
@@ -237,17 +237,17 @@ add_task(async function test_requestor_l
   let validationInfo = {
     problems: {
       missingChildren: [
         { parent: "x", child: "a" },
       ],
       orphans: [],
     }
   }
-  requestor.startRepairs(validationInfo, Utils.makeGUID());
+  await requestor.startRepairs(validationInfo, Utils.makeGUID());
   // the repair command should be outgoing to the most-recent client.
   checkOutgoingCommand(mockService, "client-late");
   checkState(BookmarkRepairRequestor.STATE.SENT_REQUEST);
   // and this test is done - reset the repair.
   requestor.prefs.resetBranch();
 });
 
 add_task(async function test_requestor_client_vanishes() {
@@ -262,40 +262,40 @@ add_task(async function test_requestor_c
         {parent: "x", child: "a"},
         {parent: "x", child: "b"},
         {parent: "x", child: "c"}
       ],
       orphans: [],
     }
   }
   let flowID = Utils.makeGUID();
-  requestor.startRepairs(validationInfo, flowID);
+  await requestor.startRepairs(validationInfo, flowID);
   // the command should now be outgoing.
   checkOutgoingCommand(mockService, "client-a");
 
   checkState(BookmarkRepairRequestor.STATE.SENT_REQUEST);
 
   mockService.clientsEngine._sentCommands = {};
   // Now let's pretend the client vanished.
   delete mockService.clientsEngine._clientList["client-a"];
 
-  requestor.continueRepairs();
+  await requestor.continueRepairs();
   // We should have moved on to client-b.
   checkState(BookmarkRepairRequestor.STATE.SENT_REQUEST);
   checkOutgoingCommand(mockService, "client-b");
 
   // Now let's pretend client B wrote all missing IDs.
   let response = {
     collection: "bookmarks",
     request: "upload",
     flowID: requestor._flowID,
     clientID: "client-b",
     ids: ["a", "b", "c"],
   }
-  requestor.continueRepairs(response);
+  await requestor.continueRepairs(response);
 
   // We should be finished as we got all our IDs.
   checkRepairFinished();
   deepEqual(mockService._recordedEvents, [
     { object: "repair",
       method: "started",
       value: undefined,
       extra: { flowID, numIDs: 3 },
@@ -340,45 +340,45 @@ add_task(async function test_requestor_s
         {parent: "x", child: "a"},
         {parent: "x", child: "b"},
         {parent: "x", child: "c"}
       ],
       orphans: [],
     }
   }
   let flowID = Utils.makeGUID();
-  requestor.startRepairs(validationInfo, flowID);
+  await requestor.startRepairs(validationInfo, flowID);
   // the command should now be outgoing.
   checkOutgoingCommand(mockService, "client-a");
 
   checkState(BookmarkRepairRequestor.STATE.SENT_REQUEST);
 
   mockService.clientsEngine._sentCommands = {};
   // Now let's pretend the client wrote a response.
   let response = {
     collection: "bookmarks",
     request: "upload",
     clientID: "client-a",
     flowID: requestor._flowID,
     ids: ["a", "b"],
   }
-  requestor.continueRepairs(response);
+  await requestor.continueRepairs(response);
   // We should have moved on to client 2.
   checkState(BookmarkRepairRequestor.STATE.SENT_REQUEST);
   checkOutgoingCommand(mockService, "client-b");
 
   // Now let's pretend client B write the missing ID.
   response = {
     collection: "bookmarks",
     request: "upload",
     clientID: "client-b",
     flowID: requestor._flowID,
     ids: ["c"],
   }
-  requestor.continueRepairs(response);
+  await requestor.continueRepairs(response);
 
   // We should be finished as we got all our IDs.
   checkRepairFinished();
   deepEqual(mockService._recordedEvents, [
     { object: "repair",
       method: "started",
       value: undefined,
       extra: { flowID, numIDs: 3 },
@@ -436,17 +436,17 @@ add_task(async function test_requestor_a
         {parent: "x", child: "b"},
         {parent: "x", child: "c"}
       ],
       orphans: [],
     }
   }
   let flowID = Utils.makeGUID();
 
-  ok(!requestor.startRepairs(validationInfo, flowID),
+  ok(!(await requestor.startRepairs(validationInfo, flowID)),
      "Shouldn't start repairs");
   equal(mockService._recordedEvents.length, 1);
   equal(mockService._recordedEvents[0].method, "aborted");
 });
 
 add_task(async function test_requestor_already_repairing_continue() {
   let clientB = makeClientRecord("client-b")
   let mockService = new MockService({
@@ -460,17 +460,17 @@ add_task(async function test_requestor_a
         {parent: "x", child: "a"},
         {parent: "x", child: "b"},
         {parent: "x", child: "c"}
       ],
       orphans: [],
     }
   }
   let flowID = Utils.makeGUID();
-  requestor.startRepairs(validationInfo, flowID);
+  await requestor.startRepairs(validationInfo, flowID);
   // the command should now be outgoing.
   checkOutgoingCommand(mockService, "client-a");
 
   checkState(BookmarkRepairRequestor.STATE.SENT_REQUEST);
   mockService.clientsEngine._sentCommands = {};
 
   // Now let's pretend the client wrote a response (it doesn't matter what's in here)
   let response = {
@@ -483,17 +483,17 @@ add_task(async function test_requestor_a
 
   // and another client also started a request
   clientB.commands = [{
     args: [{ collection: "bookmarks", flowID: "asdf" }],
     command: "repairRequest",
   }];
 
 
-  requestor.continueRepairs(response);
+  await requestor.continueRepairs(response);
 
   // We should have aborted now
   checkRepairFinished();
   const expected = [
     { method: "started",
       object: "repair",
       value: undefined,
       extra: { flowID, numIDs: "3" },
--- a/services/sync/tests/unit/test_bookmark_repair_responder.js
+++ b/services/sync/tests/unit/test_bookmark_repair_responder.js
@@ -11,58 +11,61 @@ Cu.import("resource://services-sync/serv
 Cu.import("resource://services-sync/bookmark_repair.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
 initTestLogging("Trace");
 Log.repository.getLogger("Sync.Engine.Bookmarks").level = Log.Level.Trace;
 // sqlite logging generates lots of noise and typically isn't helpful here.
 Log.repository.getLogger("Sqlite").level = Log.Level.Error;
 
+// Disable validation so that we don't try to automatically repair the server
+// when we sync.
+Svc.Prefs.set("engine.bookmarks.validation.enabled", false);
+
 // stub telemetry so we can easily check the right things are recorded.
 var recordedEvents = [];
-Service.recordTelemetryEvent = (object, method, value, extra = undefined) => {
-  recordedEvents.push({ object, method, value, extra });
-};
 
 function checkRecordedEvents(expected) {
   deepEqual(recordedEvents, expected);
   // and clear the list so future checks are easier to write.
   recordedEvents = [];
 }
 
 function getServerBookmarks(server) {
   return server.user("foo").collection("bookmarks");
 }
 
-async function setup() {
-  let bookmarksEngine = Service.engineManager.get("bookmarks");
-
+async function makeServer() {
   let server = serverForFoo(bookmarksEngine);
   await SyncTestingInfrastructure(server);
-
-  // Disable validation so that we don't try to automatically repair the server
-  // when we sync.
-  Svc.Prefs.set("engine.bookmarks.validation.enabled", false);
-
   return server;
 }
 
 async function cleanup(server) {
   await promiseStopServer(server);
   await PlacesSyncUtils.bookmarks.wipe();
-  Svc.Prefs.reset("engine.bookmarks.validation.enabled");
   // clear keys so when each test finds a different server it accepts its keys.
   Service.collectionKeys.clear();
 }
 
+let bookmarksEngine;
+
+add_task(async function setup() {
+  bookmarksEngine = Service.engineManager.get("bookmarks");
+
+  Service.recordTelemetryEvent = (object, method, value, extra = undefined) => {
+    recordedEvents.push({ object, method, value, extra });
+  };
+});
+
 add_task(async function test_responder_error() {
-  let server = await setup();
+  let server = await makeServer();
 
   // sync so the collection is created.
-  Service.sync();
+  await Service.sync();
 
   let request = {
     request: "upload",
     ids: [Utils.makeGUID()],
     flowID: Utils.makeGUID(),
   }
   let responder = new BookmarkRepairResponder();
   // mock the responder to simulate an error.
@@ -81,20 +84,20 @@ add_task(async function test_responder_e
       }
     },
   ]);
 
   await cleanup(server);
 });
 
 add_task(async function test_responder_no_items() {
-  let server = await setup();
+  let server = await makeServer();
 
   // sync so the collection is created.
-  Service.sync();
+  await Service.sync();
 
   let request = {
     request: "upload",
     ids: [Utils.makeGUID()],
     flowID: Utils.makeGUID(),
   }
   let responder = new BookmarkRepairResponder();
   await responder.repair(request, null);
@@ -107,26 +110,26 @@ add_task(async function test_responder_n
     },
   ]);
 
   await cleanup(server);
 });
 
 // One item requested and we have it locally, but it's not yet on the server.
 add_task(async function test_responder_upload() {
-  let server = await setup();
+  let server = await makeServer();
 
   // Pretend we've already synced this bookmark, so that we can ensure it's
   // uploaded in response to our repair request.
   let bm = await PlacesUtils.bookmarks.insert({ parentGuid: PlacesUtils.bookmarks.unfiledGuid,
                                                 title: "Get Firefox",
                                                 url: "http://getfirefox.com/",
                                                 source: PlacesUtils.bookmarks.SOURCES.SYNC });
 
-  Service.sync();
+  await Service.sync();
   deepEqual(getServerBookmarks(server).keys().sort(), [
     "menu",
     "mobile",
     "toolbar",
     "unfiled",
   ], "Should only upload roots on first sync");
 
   let request = {
@@ -140,17 +143,17 @@ add_task(async function test_responder_u
   checkRecordedEvents([
     { object: "repairResponse",
       method: "uploading",
       value: undefined,
       extra: {flowID: request.flowID, numIDs: "1"},
     },
   ]);
 
-  Service.sync();
+  await Service.sync();
   deepEqual(getServerBookmarks(server).keys().sort(), [
     "menu",
     "mobile",
     "toolbar",
     "unfiled",
     bm.guid,
   ].sort(), "Should upload requested bookmark on second sync");
 
@@ -163,24 +166,24 @@ add_task(async function test_responder_u
   ]);
 
   await cleanup(server);
 });
 
 // One item requested and we have it locally and it's already on the server.
 // As it was explicitly requested, we should upload it.
 add_task(async function test_responder_item_exists_locally() {
-  let server = await setup();
+  let server = await makeServer();
 
   let bm = await PlacesUtils.bookmarks.insert({ parentGuid: PlacesUtils.bookmarks.unfiledGuid,
                                                 title: "Get Firefox",
                                                 url: "http://getfirefox.com/" });
   // first sync to get the item on the server.
   _("Syncing to get item on the server");
-  Service.sync();
+  await Service.sync();
 
   // issue a repair request for it.
   let request = {
     request: "upload",
     ids: [bm.guid],
     flowID: Utils.makeGUID(),
   }
   let responder = new BookmarkRepairResponder();
@@ -191,57 +194,57 @@ add_task(async function test_responder_i
     { object: "repairResponse",
       method: "uploading",
       value: undefined,
       extra: {flowID: request.flowID, numIDs: "1"},
     },
   ]);
 
   _("Syncing to do the upload.");
-  Service.sync();
+  await Service.sync();
 
   checkRecordedEvents([
     { object: "repairResponse",
       method: "finished",
       value: undefined,
       extra: {flowID: request.flowID, numIDs: "1"},
     },
   ]);
   await cleanup(server);
 });
 
 add_task(async function test_responder_tombstone() {
-  let server = await setup();
+  let server = await makeServer();
 
   // TODO: Request an item for which we have a tombstone locally. Decide if
   // we want to store tombstones permanently for this. In the integration
   // test, we can also try requesting a deleted child or ancestor.
 
   // For now, we'll handle this identically to `test_responder_missing_items`.
   // Bug 1343103 is a follow-up to better handle this.
   await cleanup(server);
 });
 
 add_task(async function test_responder_missing_items() {
-  let server = await setup();
+  let server = await makeServer();
 
   let fxBmk = await PlacesUtils.bookmarks.insert({
     parentGuid: PlacesUtils.bookmarks.unfiledGuid,
     title: "Get Firefox",
     url: "http://getfirefox.com/",
   });
   let tbBmk = await PlacesUtils.bookmarks.insert({
     parentGuid: PlacesUtils.bookmarks.unfiledGuid,
     title: "Get Thunderbird",
     url: "http://getthunderbird.com/",
     // Pretend we've already synced Thunderbird.
     source: PlacesUtils.bookmarks.SOURCES.SYNC,
   });
 
-  Service.sync();
+  await Service.sync();
   deepEqual(getServerBookmarks(server).keys().sort(), [
     "menu",
     "mobile",
     "toolbar",
     "unfiled",
     fxBmk.guid,
   ].sort(), "Should upload roots and Firefox on first sync");
 
@@ -258,17 +261,17 @@ add_task(async function test_responder_m
     { object: "repairResponse",
       method: "uploading",
       value: undefined,
       extra: {flowID: request.flowID, numIDs: "2"},
     },
   ]);
 
   _("Sync after requesting IDs");
-  Service.sync();
+  await Service.sync();
   deepEqual(getServerBookmarks(server).keys().sort(), [
     "menu",
     "mobile",
     "toolbar",
     "unfiled",
     fxBmk.guid,
     tbBmk.guid,
   ].sort(), "Second sync should upload Thunderbird; skip nonexistent");
@@ -280,19 +283,19 @@ add_task(async function test_responder_m
       extra: {flowID: request.flowID, numIDs: "2"},
     },
   ]);
 
   await cleanup(server);
 });
 
 add_task(async function test_non_syncable() {
-  let server = await setup();
+  let server = await makeServer();
 
-  Service.sync(); // to create the collections on the server.
+  await Service.sync(); // to create the collections on the server.
 
   // Creates the left pane queries as a side effect.
   let leftPaneId = PlacesUIUtils.leftPaneFolderId;
   _(`Left pane root ID: ${leftPaneId}`);
   await PlacesTestUtils.promiseAsyncUpdates();
 
   // A child folder of the left pane root, containing queries for the menu,
   // toolbar, and unfiled queries.
@@ -325,17 +328,17 @@ add_task(async function test_non_syncabl
       method: "uploading",
       value: undefined,
       // Tombstones for the 2 items we requested and for bookmarksMenu
       extra: {flowID: request.flowID, numIDs: "3"},
     },
   ]);
 
   _("Sync to upload tombstones for items");
-  Service.sync();
+  await Service.sync();
 
   let toolbarQueryId = PlacesUIUtils.leftPaneQueries.BookmarksToolbar;
   let menuQueryId = PlacesUIUtils.leftPaneQueries.BookmarksMenu;
   let queryGuids = [
     allBookmarksGuid,
     await PlacesUtils.promiseItemGuid(toolbarQueryId),
     await PlacesUtils.promiseItemGuid(menuQueryId),
     unfiledQueryGuid,
@@ -371,17 +374,17 @@ add_task(async function test_non_syncabl
       extra: {flowID: request.flowID, numIDs: "3"},
     },
   ]);
 
   await cleanup(server);
 });
 
 add_task(async function test_folder_descendants() {
-  let server = await setup();
+  let server = await makeServer();
 
   let parentFolder = await PlacesUtils.bookmarks.insert({
     type: PlacesUtils.bookmarks.TYPE_FOLDER,
     parentGuid: PlacesUtils.bookmarks.menuGuid,
     title: "Parent folder",
   });
   let childFolder = await PlacesUtils.bookmarks.insert({
     type: PlacesUtils.bookmarks.TYPE_FOLDER,
@@ -399,17 +402,17 @@ add_task(async function test_folder_desc
   // the repair because we explicitly request its ID.
   let childSiblingBmk = await PlacesUtils.bookmarks.insert({
     parentGuid: parentFolder.guid,
     title: "Get Thunderbird",
     url: "http://getthunderbird.com",
   });
 
   _("Initial sync to upload roots and parent folder");
-  Service.sync();
+  await Service.sync();
 
   let initialSyncIds = [
     "menu",
     "mobile",
     "toolbar",
     "unfiled",
     parentFolder.guid,
     existingChildBmk.guid,
@@ -441,17 +444,17 @@ add_task(async function test_folder_desc
     kind: "bookmark",
     syncId: Utils.makeGUID(),
     parentSyncId: childFolder.guid,
     title: "Mozilla",
     url: "https://mozilla.org",
   });
 
   _("Sync again; server contents shouldn't change");
-  Service.sync();
+  await Service.sync();
   deepEqual(getServerBookmarks(server).keys().sort(), initialSyncIds,
     "Second sync should not upload missing bookmarks");
 
   // This assumes the parent record on the server is correct, and the server
   // is just missing the children. This isn't a correct assumption if the
   // parent's `children` array is wrong, or if the parent and children disagree.
   _("Request missing bookmarks");
   let request = {
@@ -474,17 +477,17 @@ add_task(async function test_folder_desc
     { object: "repairResponse",
       method: "uploading",
       value: undefined,
       extra: {flowID: request.flowID, numIDs: "5"},
     },
   ]);
 
   _("Sync after requesting repair; should upload missing records");
-  Service.sync();
+  await Service.sync();
   deepEqual(getServerBookmarks(server).keys().sort(), [
     ...initialSyncIds,
     childBmk.syncId,
     grandChildBmk.syncId,
     grandChildSiblingBmk.syncId,
   ].sort(), "Third sync should upload requested items");
 
   checkRecordedEvents([
@@ -495,17 +498,17 @@ add_task(async function test_folder_desc
     },
   ]);
 
   await cleanup(server);
 });
 
 // Error handling.
 add_task(async function test_aborts_unknown_request() {
-  let server = await setup();
+  let server = await makeServer();
 
   let request = {
     request: "not-upload",
     ids: [],
     flowID: Utils.makeGUID(),
   }
   let responder = new BookmarkRepairResponder();
   await responder.repair(request, null);
@@ -516,8 +519,12 @@ add_task(async function test_aborts_unkn
       value: undefined,
       extra: { flowID: request.flowID,
                reason: "Don't understand request type 'not-upload'",
              },
     },
   ]);
   await cleanup(server);
 });
+
+add_task(async function teardown() {
+  Svc.Prefs.reset("engine.bookmarks.validation.enabled");
+});
--- a/services/sync/tests/unit/test_bookmark_smart_bookmarks.js
+++ b/services/sync/tests/unit/test_bookmark_smart_bookmarks.js
@@ -4,27 +4,18 @@
 Cu.import("resource://gre/modules/Log.jsm");
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/engines/bookmarks.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
 const SMART_BOOKMARKS_ANNO = "Places/SmartBookmark";
-var IOService = Cc["@mozilla.org/network/io-service;1"]
+const IOService = Cc["@mozilla.org/network/io-service;1"]
                 .getService(Ci.nsIIOService);
-("http://www.mozilla.com", null, null);
-
-
-Service.engineManager.register(BookmarksEngine);
-var engine = Service.engineManager.get("bookmarks");
-var store = engine._store;
-
-// Clean up after other tests. Only necessary in XULRunner.
-store.wipe();
 
 function newSmartBookmark(parent, uri, position, title, queryID) {
   let id = PlacesUtils.bookmarks.insertBookmark(parent, uri, position, title);
   PlacesUtils.annotations.setItemAnnotation(id, SMART_BOOKMARKS_ANNO,
                                             queryID, 0,
                                             PlacesUtils.annotations.EXPIRE_NEVER);
   return id;
 }
@@ -40,16 +31,25 @@ function smartBookmarkCount() {
 function clearBookmarks() {
   _("Cleaning up existing items.");
   PlacesUtils.bookmarks.removeFolderChildren(PlacesUtils.bookmarks.bookmarksMenuFolder);
   PlacesUtils.bookmarks.removeFolderChildren(PlacesUtils.bookmarks.tagsFolder);
   PlacesUtils.bookmarks.removeFolderChildren(PlacesUtils.bookmarks.toolbarFolder);
   PlacesUtils.bookmarks.removeFolderChildren(PlacesUtils.bookmarks.unfiledBookmarksFolder);
 }
 
+let engine;
+let store;
+
+add_task(async function setup() {
+  await Service.engineManager.register(BookmarksEngine);
+  engine = Service.engineManager.get("bookmarks");
+  store = engine._store;
+});
+
 // Verify that Places smart bookmarks have their annotation uploaded and
 // handled locally.
 add_task(async function test_annotation_uploaded() {
   let server = serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let startCount = smartBookmarkCount();
 
@@ -74,22 +74,22 @@ add_task(async function test_annotation_
   _("New item ID: " + mostVisitedID);
   do_check_true(!!mostVisitedID);
 
   let annoValue = PlacesUtils.annotations.getItemAnnotation(mostVisitedID,
                                               SMART_BOOKMARKS_ANNO);
   _("Anno: " + annoValue);
   do_check_eq("MostVisited", annoValue);
 
-  let guid = store.GUIDForId(mostVisitedID);
+  let guid = await store.GUIDForId(mostVisitedID);
   _("GUID: " + guid);
   do_check_true(!!guid);
 
   _("Create record object and verify that it's sane.");
-  let record = store.createRecord(guid);
+  let record = await store.createRecord(guid);
   do_check_true(record instanceof Bookmark);
   do_check_true(record instanceof BookmarkQuery);
 
   do_check_eq(record.bmkUri, uri.spec);
 
   _("Make sure the new record carries with it the annotation.");
   do_check_eq("MostVisited", record.queryId);
 
@@ -121,46 +121,46 @@ add_task(async function test_annotation_
 
     // "Clear" by changing attributes: if we delete it, apparently it sticks
     // around as a deleted record...
     PlacesUtils.bookmarks.setItemTitle(mostVisitedID, "Not Most Visited");
     PlacesUtils.bookmarks.changeBookmarkURI(
       mostVisitedID, Utils.makeURI("http://something/else"));
     PlacesUtils.annotations.removeItemAnnotation(mostVisitedID,
                                                  SMART_BOOKMARKS_ANNO);
-    store.wipe();
-    engine.resetClient();
+    await store.wipe();
+    await engine.resetClient();
     do_check_eq(smartBookmarkCount(), startCount);
 
     _("Sync. Verify that the downloaded record carries the annotation.");
     await sync_engine_and_validate_telem(engine, false);
 
     _("Verify that the Places DB now has an annotated bookmark.");
     _("Our count has increased again.");
     do_check_eq(smartBookmarkCount(), startCount + 1);
 
     _("Find by GUID and verify that it's annotated.");
-    let newID = store.idForGUID(serverGUID);
+    let newID = await store.idForGUID(serverGUID);
     let newAnnoValue = PlacesUtils.annotations.getItemAnnotation(
       newID, SMART_BOOKMARKS_ANNO);
     do_check_eq(newAnnoValue, "MostVisited");
     do_check_eq(PlacesUtils.bookmarks.getBookmarkURI(newID).spec, uri.spec);
 
     _("Test updating.");
-    let newRecord = store.createRecord(serverGUID);
+    let newRecord = await store.createRecord(serverGUID);
     do_check_eq(newRecord.queryId, newAnnoValue);
     newRecord.queryId = "LeastVisited";
-    store.update(newRecord);
+    await store.update(newRecord);
     do_check_eq("LeastVisited", PlacesUtils.annotations.getItemAnnotation(
       newID, SMART_BOOKMARKS_ANNO));
 
 
   } finally {
     // Clean up.
-    store.wipe();
+    await store.wipe();
     Svc.Prefs.resetBranch("");
     Service.recordManager.clearCache();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_smart_bookmarks_duped() {
   let server = serverForFoo(engine);
@@ -168,50 +168,50 @@ add_task(async function test_smart_bookm
 
   let parent = PlacesUtils.toolbarFolderId;
   let uri =
     Utils.makeURI("place:sort=" +
                   Ci.nsINavHistoryQueryOptions.SORT_BY_VISITCOUNT_DESCENDING +
                   "&maxResults=10");
   let title = "Most Visited";
   let mostVisitedID = newSmartBookmark(parent, uri, -1, title, "MostVisited");
-  let mostVisitedGUID = store.GUIDForId(mostVisitedID);
+  let mostVisitedGUID = await store.GUIDForId(mostVisitedID);
 
-  let record = store.createRecord(mostVisitedGUID);
+  let record = await store.createRecord(mostVisitedGUID);
 
   _("Prepare sync.");
   try {
-    engine._syncStartup();
+    await engine._syncStartup();
 
     _("Verify that mapDupe uses the anno, discovering a dupe regardless of URI.");
-    do_check_eq(mostVisitedGUID, engine._mapDupe(record));
+    do_check_eq(mostVisitedGUID, (await engine._mapDupe(record)));
 
     record.bmkUri = "http://foo/";
-    do_check_eq(mostVisitedGUID, engine._mapDupe(record));
+    do_check_eq(mostVisitedGUID, (await engine._mapDupe(record)));
     do_check_neq(PlacesUtils.bookmarks.getBookmarkURI(mostVisitedID).spec,
                  record.bmkUri);
 
     _("Verify that different annos don't dupe.");
     let other = new BookmarkQuery("bookmarks", "abcdefabcdef");
     other.queryId = "LeastVisited";
     other.parentName = "Bookmarks Toolbar";
     other.bmkUri = "place:foo";
     other.title = "";
-    do_check_eq(undefined, engine._findDupe(other));
+    do_check_eq(undefined, (await engine._findDupe(other)));
 
     _("Handle records without a queryId entry.");
     record.bmkUri = uri;
     delete record.queryId;
-    do_check_eq(mostVisitedGUID, engine._mapDupe(record));
+    do_check_eq(mostVisitedGUID, (await engine._mapDupe(record)));
 
-    engine._syncFinish();
+    await engine._syncFinish();
 
   } finally {
     // Clean up.
-    store.wipe();
+    await store.wipe();
     await promiseStopServer(server);
     Svc.Prefs.resetBranch("");
     Service.recordManager.clearCache();
   }
 });
 
 function run_test() {
   initTestLogging("Trace");
--- a/services/sync/tests/unit/test_bookmark_store.js
+++ b/services/sync/tests/unit/test_bookmark_store.js
@@ -3,475 +3,472 @@
 
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/engines/bookmarks.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 
 const PARENT_ANNO = "sync/parent";
 
-Service.engineManager.register(BookmarksEngine);
+let engine;
+let store;
+let tracker;
 
-var engine = Service.engineManager.get("bookmarks");
-var store = engine._store;
-var tracker = engine._tracker;
+const fxuri = Utils.makeURI("http://getfirefox.com/");
+const tburi = Utils.makeURI("http://getthunderbird.com/");
 
-// Don't write some persistence files asynchronously.
-tracker.persistChangedIDs = false;
+add_task(async function setup() {
+  await Service.engineManager.register(BookmarksEngine);
 
-var fxuri = Utils.makeURI("http://getfirefox.com/");
-var tburi = Utils.makeURI("http://getthunderbird.com/");
+  engine = Service.engineManager.get("bookmarks");
+  store = engine._store;
+  tracker = engine._tracker;
+
+  // Don't write some persistence files asynchronously.
+  tracker.persistChangedIDs = false;
+});
 
 add_task(async function test_ignore_specials() {
   _("Ensure that we can't delete bookmark roots.");
 
   // Belt...
   let record = new BookmarkFolder("bookmarks", "toolbar", "folder");
   record.deleted = true;
-  do_check_neq(null, store.idForGUID("toolbar"));
+  do_check_neq(null, (await store.idForGUID("toolbar")));
 
-  store.applyIncoming(record);
+  await store.applyIncoming(record);
   await store.deletePending();
 
   // Ensure that the toolbar exists.
-  do_check_neq(null, store.idForGUID("toolbar"));
+  do_check_neq(null, (await store.idForGUID("toolbar")));
 
   // This will fail painfully in getItemType if the deletion worked.
-  engine._buildGUIDMap();
+  await engine._buildGUIDMap();
 
   // Braces...
-  store.remove(record);
+  await store.remove(record);
   await store.deletePending();
-  do_check_neq(null, store.idForGUID("toolbar"));
-  engine._buildGUIDMap();
+  do_check_neq(null, (await store.idForGUID("toolbar")));
+  await engine._buildGUIDMap();
 
-  store.wipe();
+  await store.wipe();
 });
 
-add_test(function test_bookmark_create() {
+add_task(async function test_bookmark_create() {
   try {
     _("Ensure the record isn't present yet.");
     let ids = PlacesUtils.bookmarks.getBookmarkIdsForURI(fxuri, {});
     do_check_eq(ids.length, 0);
 
     _("Let's create a new record.");
     let fxrecord = new Bookmark("bookmarks", "get-firefox1");
     fxrecord.bmkUri        = fxuri.spec;
     fxrecord.description   = "Firefox is awesome.";
     fxrecord.title         = "Get Firefox!";
     fxrecord.tags          = ["firefox", "awesome", "browser"];
     fxrecord.keyword       = "awesome";
     fxrecord.loadInSidebar = false;
     fxrecord.parentName    = "Bookmarks Toolbar";
     fxrecord.parentid      = "toolbar";
-    store.applyIncoming(fxrecord);
+    await store.applyIncoming(fxrecord);
 
     _("Verify it has been created correctly.");
-    let id = store.idForGUID(fxrecord.id);
-    do_check_eq(store.GUIDForId(id), fxrecord.id);
+    let id = await store.idForGUID(fxrecord.id);
+    do_check_eq((await store.GUIDForId(id)), fxrecord.id);
     do_check_eq(PlacesUtils.bookmarks.getItemType(id),
                 PlacesUtils.bookmarks.TYPE_BOOKMARK);
     do_check_true(PlacesUtils.bookmarks.getBookmarkURI(id).equals(fxuri));
     do_check_eq(PlacesUtils.bookmarks.getItemTitle(id), fxrecord.title);
     do_check_eq(PlacesUtils.annotations.getItemAnnotation(id, "bookmarkProperties/description"),
                 fxrecord.description);
     do_check_eq(PlacesUtils.bookmarks.getFolderIdForItem(id),
                 PlacesUtils.bookmarks.toolbarFolder);
     do_check_eq(PlacesUtils.bookmarks.getKeywordForBookmark(id), fxrecord.keyword);
 
     _("Have the store create a new record object. Verify that it has the same data.");
-    let newrecord = store.createRecord(fxrecord.id);
+    let newrecord = await store.createRecord(fxrecord.id);
     do_check_true(newrecord instanceof Bookmark);
     for (let property of ["type", "bmkUri", "description", "title",
                           "keyword", "parentName", "parentid"]) {
       do_check_eq(newrecord[property], fxrecord[property]);
     }
     do_check_true(Utils.deepEquals(newrecord.tags.sort(),
                                    fxrecord.tags.sort()));
 
     _("The calculated sort index is based on frecency data.");
     do_check_true(newrecord.sortindex >= 150);
 
     _("Create a record with some values missing.");
     let tbrecord = new Bookmark("bookmarks", "thunderbird1");
     tbrecord.bmkUri        = tburi.spec;
     tbrecord.parentName    = "Bookmarks Toolbar";
     tbrecord.parentid      = "toolbar";
-    store.applyIncoming(tbrecord);
+    await store.applyIncoming(tbrecord);
 
     _("Verify it has been created correctly.");
-    id = store.idForGUID(tbrecord.id);
-    do_check_eq(store.GUIDForId(id), tbrecord.id);
+    id = await store.idForGUID(tbrecord.id);
+    do_check_eq((await store.GUIDForId(id)), tbrecord.id);
     do_check_eq(PlacesUtils.bookmarks.getItemType(id),
                 PlacesUtils.bookmarks.TYPE_BOOKMARK);
     do_check_true(PlacesUtils.bookmarks.getBookmarkURI(id).equals(tburi));
     do_check_eq(PlacesUtils.bookmarks.getItemTitle(id), "");
     let error;
     try {
       PlacesUtils.annotations.getItemAnnotation(id, "bookmarkProperties/description");
     } catch (ex) {
       error = ex;
     }
     do_check_eq(error.result, Cr.NS_ERROR_NOT_AVAILABLE);
     do_check_eq(PlacesUtils.bookmarks.getFolderIdForItem(id),
                 PlacesUtils.bookmarks.toolbarFolder);
     do_check_eq(PlacesUtils.bookmarks.getKeywordForBookmark(id), null);
   } finally {
     _("Clean up.");
-    store.wipe();
-    run_next_test();
+    await store.wipe();
   }
 });
 
-add_test(function test_bookmark_update() {
+add_task(async function test_bookmark_update() {
   try {
     _("Create a bookmark whose values we'll change.");
     let bmk1_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.toolbarFolder, fxuri,
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Firefox!");
     PlacesUtils.annotations.setItemAnnotation(
       bmk1_id, "bookmarkProperties/description", "Firefox is awesome.", 0,
       PlacesUtils.annotations.EXPIRE_NEVER);
     PlacesUtils.bookmarks.setKeywordForBookmark(bmk1_id, "firefox");
-    let bmk1_guid = store.GUIDForId(bmk1_id);
+    let bmk1_guid = await store.GUIDForId(bmk1_id);
 
     _("Update the record with some null values.");
-    let record = store.createRecord(bmk1_guid);
+    let record = await store.createRecord(bmk1_guid);
     record.title = null;
     record.description = null;
     record.keyword = null;
     record.tags = null;
-    store.applyIncoming(record);
+    await store.applyIncoming(record);
 
     _("Verify that the values have been cleared.");
     do_check_throws(function() {
       PlacesUtils.annotations.getItemAnnotation(
         bmk1_id, "bookmarkProperties/description");
     }, Cr.NS_ERROR_NOT_AVAILABLE);
     do_check_eq(PlacesUtils.bookmarks.getItemTitle(bmk1_id), "");
     do_check_eq(PlacesUtils.bookmarks.getKeywordForBookmark(bmk1_id), null);
   } finally {
     _("Clean up.");
-    store.wipe();
-    run_next_test();
+    await store.wipe();
   }
 });
 
-add_test(function test_bookmark_createRecord() {
+add_task(async function test_bookmark_createRecord() {
   try {
     _("Create a bookmark without a description or title.");
     let bmk1_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.toolbarFolder, fxuri,
       PlacesUtils.bookmarks.DEFAULT_INDEX, null);
-    let bmk1_guid = store.GUIDForId(bmk1_id);
+    let bmk1_guid = await store.GUIDForId(bmk1_id);
 
     _("Verify that the record is created accordingly.");
-    let record = store.createRecord(bmk1_guid);
+    let record = await store.createRecord(bmk1_guid);
     do_check_eq(record.title, "");
     do_check_eq(record.description, null);
     do_check_eq(record.keyword, null);
 
   } finally {
     _("Clean up.");
-    store.wipe();
-    run_next_test();
+    await store.wipe();
   }
 });
 
-add_test(function test_folder_create() {
+add_task(async function test_folder_create() {
   try {
     _("Create a folder.");
     let folder = new BookmarkFolder("bookmarks", "testfolder-1");
     folder.parentName = "Bookmarks Toolbar";
     folder.parentid   = "toolbar";
     folder.title      = "Test Folder";
-    store.applyIncoming(folder);
+    await store.applyIncoming(folder);
 
     _("Verify it has been created correctly.");
-    let id = store.idForGUID(folder.id);
+    let id = await store.idForGUID(folder.id);
     do_check_eq(PlacesUtils.bookmarks.getItemType(id),
                 PlacesUtils.bookmarks.TYPE_FOLDER);
     do_check_eq(PlacesUtils.bookmarks.getItemTitle(id), folder.title);
     do_check_eq(PlacesUtils.bookmarks.getFolderIdForItem(id),
                 PlacesUtils.bookmarks.toolbarFolder);
 
     _("Have the store create a new record object. Verify that it has the same data.");
-    let newrecord = store.createRecord(folder.id);
+    let newrecord = await store.createRecord(folder.id);
     do_check_true(newrecord instanceof BookmarkFolder);
     for (let property of ["title", "parentName", "parentid"])
       do_check_eq(newrecord[property], folder[property]);
 
     _("Folders have high sort index to ensure they're synced first.");
     do_check_eq(newrecord.sortindex, 1000000);
   } finally {
     _("Clean up.");
-    store.wipe();
-    run_next_test();
+    await store.wipe();
   }
 });
 
-add_test(function test_folder_createRecord() {
+add_task(async function test_folder_createRecord() {
   try {
     _("Create a folder.");
     let folder1_id = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.toolbarFolder, "Folder1", 0);
-    let folder1_guid = store.GUIDForId(folder1_id);
+    let folder1_guid = await store.GUIDForId(folder1_id);
 
     _("Create two bookmarks in that folder without assigning them GUIDs.");
     let bmk1_id = PlacesUtils.bookmarks.insertBookmark(
       folder1_id, fxuri, PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
     let bmk2_id = PlacesUtils.bookmarks.insertBookmark(
       folder1_id, tburi, PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Thunderbird!");
 
     _("Create a record for the folder and verify basic properties.");
-    let record = store.createRecord(folder1_guid);
+    let record = await store.createRecord(folder1_guid);
     do_check_true(record instanceof BookmarkFolder);
     do_check_eq(record.title, "Folder1");
     do_check_eq(record.parentid, "toolbar");
     do_check_eq(record.parentName, "Bookmarks Toolbar");
 
     _("Verify the folder's children. Ensures that the bookmarks were given GUIDs.");
-    let bmk1_guid = store.GUIDForId(bmk1_id);
-    let bmk2_guid = store.GUIDForId(bmk2_id);
+    let bmk1_guid = await store.GUIDForId(bmk1_id);
+    let bmk2_guid = await store.GUIDForId(bmk2_id);
     do_check_eq(record.children.length, 2);
     do_check_eq(record.children[0], bmk1_guid);
     do_check_eq(record.children[1], bmk2_guid);
 
   } finally {
     _("Clean up.");
-    store.wipe();
-    run_next_test();
+    await store.wipe();
   }
 });
 
 add_task(async function test_deleted() {
   try {
     _("Create a bookmark that will be deleted.");
     let bmk1_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.toolbarFolder, fxuri,
       PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
-    let bmk1_guid = store.GUIDForId(bmk1_id);
+    let bmk1_guid = await store.GUIDForId(bmk1_id);
 
     _("Delete the bookmark through the store.");
     let record = new PlacesItem("bookmarks", bmk1_guid);
     record.deleted = true;
-    store.applyIncoming(record);
+    await store.applyIncoming(record);
     await store.deletePending();
     _("Ensure it has been deleted.");
     let error;
     try {
       PlacesUtils.bookmarks.getBookmarkURI(bmk1_id);
     } catch (ex) {
       error = ex;
     }
     do_check_eq(error.result, Cr.NS_ERROR_ILLEGAL_VALUE);
 
-    let newrec = store.createRecord(bmk1_guid);
+    let newrec = await store.createRecord(bmk1_guid);
     do_check_eq(newrec.deleted, true);
 
   } finally {
     _("Clean up.");
-    store.wipe();
+    await store.wipe();
   }
 });
 
-add_test(function test_move_folder() {
+add_task(async function test_move_folder() {
   try {
     _("Create two folders and a bookmark in one of them.");
     let folder1_id = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.toolbarFolder, "Folder1", 0);
-    let folder1_guid = store.GUIDForId(folder1_id);
+    let folder1_guid = await store.GUIDForId(folder1_id);
     let folder2_id = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.toolbarFolder, "Folder2", 0);
-    let folder2_guid = store.GUIDForId(folder2_id);
+    let folder2_guid = await store.GUIDForId(folder2_id);
     let bmk_id = PlacesUtils.bookmarks.insertBookmark(
       folder1_id, fxuri, PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
-    let bmk_guid = store.GUIDForId(bmk_id);
+    let bmk_guid = await store.GUIDForId(bmk_id);
 
     _("Get a record, reparent it and apply it to the store.");
-    let record = store.createRecord(bmk_guid);
+    let record = await store.createRecord(bmk_guid);
     do_check_eq(record.parentid, folder1_guid);
     record.parentid = folder2_guid;
-    store.applyIncoming(record);
+    await store.applyIncoming(record);
 
     _("Verify the new parent.");
     let new_folder_id = PlacesUtils.bookmarks.getFolderIdForItem(bmk_id);
-    do_check_eq(store.GUIDForId(new_folder_id), folder2_guid);
+    do_check_eq((await store.GUIDForId(new_folder_id)), folder2_guid);
   } finally {
     _("Clean up.");
-    store.wipe();
-    run_next_test();
+    await store.wipe();
   }
 });
 
 add_task(async function test_move_order() {
   // Make sure the tracker is turned on.
   Svc.Obs.notify("weave:engine:start-tracking");
   try {
     _("Create two bookmarks");
     let bmk1_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.toolbarFolder, fxuri,
       PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
-    let bmk1_guid = store.GUIDForId(bmk1_id);
+    let bmk1_guid = await store.GUIDForId(bmk1_id);
     let bmk2_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.toolbarFolder, tburi,
       PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Thunderbird!");
-    let bmk2_guid = store.GUIDForId(bmk2_id);
+    let bmk2_guid = await store.GUIDForId(bmk2_id);
 
     _("Verify order.");
     do_check_eq(PlacesUtils.bookmarks.getItemIndex(bmk1_id), 0);
     do_check_eq(PlacesUtils.bookmarks.getItemIndex(bmk2_id), 1);
-    let toolbar = store.createRecord("toolbar");
+    let toolbar = await store.createRecord("toolbar");
     do_check_eq(toolbar.children.length, 2);
     do_check_eq(toolbar.children[0], bmk1_guid);
     do_check_eq(toolbar.children[1], bmk2_guid);
 
     _("Move bookmarks around.");
     store._childrenToOrder = {};
     toolbar.children = [bmk2_guid, bmk1_guid];
-    store.applyIncoming(toolbar);
+    await store.applyIncoming(toolbar);
     // Bookmarks engine does this at the end of _processIncoming
     tracker.ignoreAll = true;
     await store._orderChildren();
     tracker.ignoreAll = false;
     delete store._childrenToOrder;
 
     _("Verify new order.");
     do_check_eq(PlacesUtils.bookmarks.getItemIndex(bmk2_id), 0);
     do_check_eq(PlacesUtils.bookmarks.getItemIndex(bmk1_id), 1);
 
   } finally {
     Svc.Obs.notify("weave:engine:stop-tracking");
     _("Clean up.");
-    store.wipe();
+    await store.wipe();
   }
 });
 
-add_test(function test_orphan() {
+add_task(async function test_orphan() {
   try {
 
     _("Add a new bookmark locally.");
     let bmk1_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.toolbarFolder, fxuri,
       PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
-    let bmk1_guid = store.GUIDForId(bmk1_id);
+    let bmk1_guid = await store.GUIDForId(bmk1_id);
     do_check_eq(PlacesUtils.bookmarks.getFolderIdForItem(bmk1_id),
                 PlacesUtils.bookmarks.toolbarFolder);
     let error;
     try {
       PlacesUtils.annotations.getItemAnnotation(bmk1_id, PARENT_ANNO);
     } catch (ex) {
       error = ex;
     }
     do_check_eq(error.result, Cr.NS_ERROR_NOT_AVAILABLE);
 
     _("Apply a server record that is the same but refers to non-existent folder.");
-    let record = store.createRecord(bmk1_guid);
+    let record = await store.createRecord(bmk1_guid);
     record.parentid = "non-existent";
-    store.applyIncoming(record);
+    await store.applyIncoming(record);
 
     _("Verify that bookmark has been flagged as orphan, has not moved.");
     do_check_eq(PlacesUtils.bookmarks.getFolderIdForItem(bmk1_id),
                 PlacesUtils.bookmarks.toolbarFolder);
     do_check_eq(PlacesUtils.annotations.getItemAnnotation(bmk1_id, PARENT_ANNO),
                 "non-existent");
 
   } finally {
     _("Clean up.");
-    store.wipe();
-    run_next_test();
+    await store.wipe();
   }
 });
 
-add_test(function test_reparentOrphans() {
+add_task(async function test_reparentOrphans() {
   try {
     let folder1_id = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.toolbarFolder, "Folder1", 0);
-    let folder1_guid = store.GUIDForId(folder1_id);
+    let folder1_guid = await store.GUIDForId(folder1_id);
 
     _("Create a bogus orphan record and write the record back to the store to trigger _reparentOrphans.");
     PlacesUtils.annotations.setItemAnnotation(
       folder1_id, PARENT_ANNO, folder1_guid, 0,
       PlacesUtils.annotations.EXPIRE_NEVER);
-    let record = store.createRecord(folder1_guid);
+    let record = await store.createRecord(folder1_guid);
     record.title = "New title for Folder 1";
     store._childrenToOrder = {};
-    store.applyIncoming(record);
+    await store.applyIncoming(record);
 
     _("Verify that is has been marked as an orphan even though it couldn't be moved into itself.");
     do_check_eq(PlacesUtils.annotations.getItemAnnotation(folder1_id, PARENT_ANNO),
                 folder1_guid);
 
   } finally {
     _("Clean up.");
-    store.wipe();
-    run_next_test();
+    await store.wipe();
   }
 });
 
 // Tests Bug 806460, in which query records arrive with empty folder
 // names and missing bookmark URIs.
-add_test(function test_empty_query_doesnt_die() {
+add_task(async function test_empty_query_doesnt_die() {
   let record = new BookmarkQuery("bookmarks", "8xoDGqKrXf1P");
   record.folderName    = "";
   record.queryId       = "";
   record.parentName    = "Toolbar";
   record.parentid      = "toolbar";
 
   // These should not throw.
-  store.applyIncoming(record);
+  await store.applyIncoming(record);
 
   delete record.folderName;
-  store.applyIncoming(record);
+  await store.applyIncoming(record);
 
-  run_next_test();
 });
 
 function assertDeleted(id) {
   let error;
   try {
     PlacesUtils.bookmarks.getItemType(id);
   } catch (e) {
     error = e;
   }
   equal(error.result, Cr.NS_ERROR_ILLEGAL_VALUE)
 }
 
 add_task(async function test_delete_buffering() {
-  store.wipe();
+  await store.wipe();
   await PlacesTestUtils.markBookmarksAsSynced();
 
   try {
     _("Create a folder with two bookmarks.");
     let folder = new BookmarkFolder("bookmarks", "testfolder-1");
     folder.parentName = "Bookmarks Toolbar";
     folder.parentid = "toolbar";
     folder.title = "Test Folder";
-    store.applyIncoming(folder);
+    await store.applyIncoming(folder);
 
 
     let fxRecord = new Bookmark("bookmarks", "get-firefox1");
     fxRecord.bmkUri        = fxuri.spec;
     fxRecord.title         = "Get Firefox!";
     fxRecord.parentName    = "Test Folder";
     fxRecord.parentid      = "testfolder-1";
 
     let tbRecord = new Bookmark("bookmarks", "get-tndrbrd1");
     tbRecord.bmkUri        = tburi.spec;
     tbRecord.title         = "Get Thunderbird!";
     tbRecord.parentName    = "Test Folder";
     tbRecord.parentid      = "testfolder-1";
 
-    store.applyIncoming(fxRecord);
-    store.applyIncoming(tbRecord);
+    await store.applyIncoming(fxRecord);
+    await store.applyIncoming(tbRecord);
 
-    let folderId = store.idForGUID(folder.id);
-    let fxRecordId = store.idForGUID(fxRecord.id);
-    let tbRecordId = store.idForGUID(tbRecord.id);
+    let folderId = await store.idForGUID(folder.id);
+    let fxRecordId = await store.idForGUID(fxRecord.id);
+    let tbRecordId = await store.idForGUID(tbRecord.id);
 
     _("Check everything was created correctly.");
 
     equal(PlacesUtils.bookmarks.getItemType(fxRecordId),
           PlacesUtils.bookmarks.TYPE_BOOKMARK);
     equal(PlacesUtils.bookmarks.getItemType(tbRecordId),
           PlacesUtils.bookmarks.TYPE_BOOKMARK);
     equal(PlacesUtils.bookmarks.getItemType(folderId),
@@ -485,18 +482,18 @@ add_task(async function test_delete_buff
     _("Delete the folder and one bookmark.");
 
     let deleteFolder = new PlacesItem("bookmarks", "testfolder-1");
     deleteFolder.deleted = true;
 
     let deleteFxRecord = new PlacesItem("bookmarks", "get-firefox1");
     deleteFxRecord.deleted = true;
 
-    store.applyIncoming(deleteFolder);
-    store.applyIncoming(deleteFxRecord);
+    await store.applyIncoming(deleteFolder);
+    await store.applyIncoming(deleteFxRecord);
 
     _("Check that we haven't deleted them yet, but that the deletions are queued");
     // these will throw if we've deleted them
     equal(PlacesUtils.bookmarks.getItemType(fxRecordId),
            PlacesUtils.bookmarks.TYPE_BOOKMARK);
 
     equal(PlacesUtils.bookmarks.getItemType(folderId),
            PlacesUtils.bookmarks.TYPE_FOLDER);
@@ -518,17 +515,17 @@ add_task(async function test_delete_buff
     ok(!store._itemsToDelete.has(folder.id));
     ok(!store._itemsToDelete.has(fxRecord.id));
 
     equal(PlacesUtils.bookmarks.getFolderIdForItem(tbRecordId),
           PlacesUtils.bookmarks.toolbarFolder);
 
   } finally {
     _("Clean up.");
-    store.wipe();
+    await store.wipe();
   }
 });
 
 
 function run_test() {
   initTestLogging("Trace");
   run_next_test();
 }
--- a/services/sync/tests/unit/test_bookmark_tracker.js
+++ b/services/sync/tests/unit/test_bookmark_tracker.js
@@ -11,43 +11,47 @@ Cu.import("resource://services-sync/cons
 Cu.import("resource://services-sync/engines/bookmarks.js");
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://gre/modules/osfile.jsm");
 Cu.import("resource://testing-common/PlacesTestUtils.jsm");
 Cu.import("resource:///modules/PlacesUIUtils.jsm");
 
-Service.engineManager.register(BookmarksEngine);
-var engine = Service.engineManager.get("bookmarks");
-var store  = engine._store;
-var tracker = engine._tracker;
-
-store.wipe();
-tracker.persistChangedIDs = false;
+let engine;
+let store;
+let tracker;
 
 const DAY_IN_MS = 24 * 60 * 60 * 1000;
 
+add_task(async function setup() {
+  await Service.engineManager.register(BookmarksEngine);
+  engine = Service.engineManager.get("bookmarks");
+  store  = engine._store;
+  tracker = engine._tracker;
+  tracker.persistChangedIDs = false;
+});
+
 // Test helpers.
 async function verifyTrackerEmpty() {
   await PlacesTestUtils.promiseAsyncUpdates();
   let changes = await tracker.promiseChangedIDs();
   deepEqual(changes, {});
   equal(tracker.score, 0);
 }
 
 async function resetTracker() {
   await PlacesTestUtils.markBookmarksAsSynced();
   tracker.resetScore();
 }
 
 async function cleanup() {
   engine.lastSync = 0;
   engine._needWeakReupload.clear()
-  store.wipe();
+  await store.wipe();
   await resetTracker();
   await stopTracking();
 }
 
 // startTracking is a signal that the test wants to notice things that happen
 // after this is called (ie, things already tracked should be discarded.)
 async function startTracking() {
   Svc.Obs.notify("weave:engine:start-tracking");
@@ -441,40 +445,40 @@ add_task(async function test_onItemAdded
 
   try {
     await startTracking();
 
     _("Insert a folder using the sync API");
     let syncFolderID = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.bookmarksMenuFolder, "Sync Folder",
       PlacesUtils.bookmarks.DEFAULT_INDEX);
-    let syncFolderGUID = engine._store.GUIDForId(syncFolderID);
+    let syncFolderGUID = await engine._store.GUIDForId(syncFolderID);
     await verifyTrackedItems(["menu", syncFolderGUID]);
     do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
 
     await resetTracker();
     await startTracking();
 
     _("Insert a bookmark using the sync API");
     let syncBmkID = PlacesUtils.bookmarks.insertBookmark(syncFolderID,
       Utils.makeURI("https://example.org/sync"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Sync Bookmark");
-    let syncBmkGUID = engine._store.GUIDForId(syncBmkID);
+    let syncBmkGUID = await engine._store.GUIDForId(syncBmkID);
     await verifyTrackedItems([syncFolderGUID, syncBmkGUID]);
     do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
 
     await resetTracker();
     await startTracking();
 
     _("Insert a separator using the sync API");
     let syncSepID = PlacesUtils.bookmarks.insertSeparator(
       PlacesUtils.bookmarks.bookmarksMenuFolder,
       PlacesUtils.bookmarks.getItemIndex(syncFolderID));
-    let syncSepGUID = engine._store.GUIDForId(syncSepID);
+    let syncSepGUID = await engine._store.GUIDForId(syncSepID);
     await verifyTrackedItems(["menu", syncSepGUID]);
     do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
   } finally {
     _("Clean up.");
     await cleanup();
   }
 });
 
@@ -565,17 +569,17 @@ add_task(async function test_onItemChang
     await stopTracking();
 
     _("Insert a bookmark");
     let fx_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.bookmarksMenuFolder,
       Utils.makeURI("http://getfirefox.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Firefox!");
-    let fx_guid = engine._store.GUIDForId(fx_id);
+    let fx_guid = await engine._store.GUIDForId(fx_id);
     _(`Firefox GUID: ${fx_guid}`);
 
     await startTracking();
 
     _("Reset the bookmark's added date");
     // Convert to microseconds for PRTime.
     let dateAdded = (Date.now() - DAY_IN_MS) * 1000;
     PlacesUtils.bookmarks.setItemDateAdded(fx_id, dateAdded);
@@ -601,17 +605,17 @@ add_task(async function test_onItemChang
     await stopTracking();
 
     _("Insert a bookmark");
     let fx_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.bookmarksMenuFolder,
       Utils.makeURI("http://getfirefox.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Firefox!");
-    let fx_guid = engine._store.GUIDForId(fx_id);
+    let fx_guid = await engine._store.GUIDForId(fx_id);
     _(`Firefox GUID: ${fx_guid}`);
 
     _("Set a tracked annotation to make sure we only notify once");
     PlacesUtils.annotations.setItemAnnotation(
       fx_id, PlacesSyncUtils.bookmarks.DESCRIPTION_ANNO, "A test description", 0,
       PlacesUtils.annotations.EXPIRE_NEVER);
 
     await startTracking();
@@ -632,26 +636,26 @@ add_task(async function test_onItemTagge
 
   try {
     await stopTracking();
 
     _("Create a folder");
     let folder = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.bookmarksMenuFolder, "Parent",
       PlacesUtils.bookmarks.DEFAULT_INDEX);
-    let folderGUID = engine._store.GUIDForId(folder);
+    let folderGUID = await engine._store.GUIDForId(folder);
     _("Folder ID: " + folder);
     _("Folder GUID: " + folderGUID);
 
     _("Track changes to tags");
     let uri = Utils.makeURI("http://getfirefox.com");
     let b = PlacesUtils.bookmarks.insertBookmark(
       folder, uri,
       PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
-    let bGUID = engine._store.GUIDForId(b);
+    let bGUID = await engine._store.GUIDForId(b);
     _("New item is " + b);
     _("GUID: " + bGUID);
 
     await startTracking();
 
     _("Tag the item");
     PlacesUtils.tagging.tagURI(uri, ["foo"]);
 
@@ -670,22 +674,22 @@ add_task(async function test_onItemUntag
   try {
     await stopTracking();
 
     _("Insert tagged bookmarks");
     let uri = Utils.makeURI("http://getfirefox.com");
     let fx1ID = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.bookmarksMenuFolder, uri,
       PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
-    let fx1GUID = engine._store.GUIDForId(fx1ID);
+    let fx1GUID = await engine._store.GUIDForId(fx1ID);
     // Different parent and title; same URL.
     let fx2ID = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.toolbarFolder, uri,
       PlacesUtils.bookmarks.DEFAULT_INDEX, "Download Firefox");
-    let fx2GUID = engine._store.GUIDForId(fx2ID);
+    let fx2GUID = await engine._store.GUIDForId(fx2ID);
     PlacesUtils.tagging.tagURI(uri, ["foo"]);
 
     await startTracking();
 
     _("Remove the tag");
     PlacesUtils.tagging.untagURI(uri, ["foo"]);
 
     await verifyTrackedItems([fx1GUID, fx2GUID]);
@@ -805,17 +809,17 @@ add_task(async function test_onItemKeywo
     let folder = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.bookmarksMenuFolder, "Parent",
       PlacesUtils.bookmarks.DEFAULT_INDEX);
     _("Track changes to keywords");
     let uri = Utils.makeURI("http://getfirefox.com");
     let b = PlacesUtils.bookmarks.insertBookmark(
       folder, uri,
       PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
-    let bGUID = engine._store.GUIDForId(b);
+    let bGUID = await engine._store.GUIDForId(b);
     _("New item is " + b);
     _("GUID: " + bGUID);
 
     await startTracking();
 
     _("Give the item a keyword");
     PlacesUtils.bookmarks.setKeywordForBookmark(b, "the_keyword");
 
@@ -910,17 +914,17 @@ add_task(async function test_onItemPostD
     await stopTracking();
 
     _("Insert a bookmark");
     let fx_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.bookmarksMenuFolder,
       Utils.makeURI("http://getfirefox.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Firefox!");
-    let fx_guid = engine._store.GUIDForId(fx_id);
+    let fx_guid = await engine._store.GUIDForId(fx_id);
     _(`Firefox GUID: ${fx_guid}`);
 
     await startTracking();
 
     // PlacesUtils.setPostDataForBookmark is deprecated, but still used by
     // PlacesTransactions.NewBookmark.
     _("Post data for the bookmark should be ignored");
     await PlacesUtils.setPostDataForBookmark(fx_id, "postData");
@@ -939,17 +943,17 @@ add_task(async function test_onItemAnnoC
     await stopTracking();
     let folder = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.bookmarksMenuFolder, "Parent",
       PlacesUtils.bookmarks.DEFAULT_INDEX);
     _("Track changes to annos.");
     let b = PlacesUtils.bookmarks.insertBookmark(
       folder, Utils.makeURI("http://getfirefox.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
-    let bGUID = engine._store.GUIDForId(b);
+    let bGUID = await engine._store.GUIDForId(b);
     _("New item is " + b);
     _("GUID: " + bGUID);
 
     await startTracking();
     PlacesUtils.annotations.setItemAnnotation(
       b, PlacesSyncUtils.bookmarks.DESCRIPTION_ANNO, "A test description", 0,
       PlacesUtils.annotations.EXPIRE_NEVER);
     // bookmark should be tracked, folder should not.
@@ -973,34 +977,34 @@ add_task(async function test_onItemAdded
   try {
     await startTracking();
 
     _("Create a new root");
     let rootID = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.placesRoot,
       "New root",
       PlacesUtils.bookmarks.DEFAULT_INDEX);
-    let rootGUID = engine._store.GUIDForId(rootID);
+    let rootGUID = await engine._store.GUIDForId(rootID);
     _(`New root GUID: ${rootGUID}`);
 
     _("Insert a bookmark underneath the new root");
     let untrackedBmkID = PlacesUtils.bookmarks.insertBookmark(
       rootID,
       Utils.makeURI("http://getthunderbird.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Thunderbird!");
-    let untrackedBmkGUID = engine._store.GUIDForId(untrackedBmkID);
+    let untrackedBmkGUID = await engine._store.GUIDForId(untrackedBmkID);
     _(`New untracked bookmark GUID: ${untrackedBmkGUID}`);
 
     _("Insert a bookmark underneath the Places root");
     let rootBmkID = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.placesRoot,
       Utils.makeURI("http://getfirefox.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
-    let rootBmkGUID = engine._store.GUIDForId(rootBmkID);
+    let rootBmkGUID = await engine._store.GUIDForId(rootBmkID);
     _(`New Places root bookmark GUID: ${rootBmkGUID}`);
 
     _("New root and bookmark should be ignored");
     await verifyTrackedItems([]);
     do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 3);
   } finally {
     _("Clean up.");
     await cleanup();
@@ -1013,17 +1017,17 @@ add_task(async function test_onItemDelet
   try {
     await stopTracking();
 
     _("Insert a bookmark underneath the Places root");
     let rootBmkID = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.placesRoot,
       Utils.makeURI("http://getfirefox.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX, "Get Firefox!");
-    let rootBmkGUID = engine._store.GUIDForId(rootBmkID);
+    let rootBmkGUID = await engine._store.GUIDForId(rootBmkID);
     _(`New Places root bookmark GUID: ${rootBmkGUID}`);
 
     await startTracking();
 
     PlacesUtils.bookmarks.removeItem(rootBmkID);
 
     await verifyTrackedItems([]);
     // We'll still increment the counter for the removed item.
@@ -1166,24 +1170,24 @@ add_task(async function test_onItemMoved
   _("Items moved via the synchronous API should be tracked");
 
   try {
     let fx_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.bookmarksMenuFolder,
       Utils.makeURI("http://getfirefox.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Firefox!");
-    let fx_guid = engine._store.GUIDForId(fx_id);
+    let fx_guid = await engine._store.GUIDForId(fx_id);
     _("Firefox GUID: " + fx_guid);
     let tb_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.bookmarksMenuFolder,
       Utils.makeURI("http://getthunderbird.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Thunderbird!");
-    let tb_guid = engine._store.GUIDForId(tb_id);
+    let tb_guid = await engine._store.GUIDForId(tb_id);
     _("Thunderbird GUID: " + tb_guid);
 
     await startTracking();
 
     // Moving within the folder will just track the folder.
     PlacesUtils.bookmarks.moveItem(
       tb_id, PlacesUtils.bookmarks.bookmarksMenuFolder, 0);
     await verifyTrackedItems(["menu"]);
@@ -1301,42 +1305,42 @@ add_task(async function test_onItemMoved
 
   try {
     await stopTracking();
 
     let folder_id = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.bookmarksMenuFolder,
       "Test folder",
       PlacesUtils.bookmarks.DEFAULT_INDEX);
-    let folder_guid = engine._store.GUIDForId(folder_id);
+    let folder_guid = await engine._store.GUIDForId(folder_id);
     _(`Folder GUID: ${folder_guid}`);
 
     let tb_id = PlacesUtils.bookmarks.insertBookmark(
       folder_id,
       Utils.makeURI("http://getthunderbird.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Thunderbird");
-    let tb_guid = engine._store.GUIDForId(tb_id);
+    let tb_guid = await engine._store.GUIDForId(tb_id);
     _(`Thunderbird GUID: ${tb_guid}`);
 
     let fx_id = PlacesUtils.bookmarks.insertBookmark(
       folder_id,
       Utils.makeURI("http://getfirefox.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Firefox");
-    let fx_guid = engine._store.GUIDForId(fx_id);
+    let fx_guid = await engine._store.GUIDForId(fx_id);
     _(`Firefox GUID: ${fx_guid}`);
 
     let moz_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.bookmarksMenuFolder,
       Utils.makeURI("https://mozilla.org"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Mozilla"
     );
-    let moz_guid = engine._store.GUIDForId(moz_id);
+    let moz_guid = await engine._store.GUIDForId(moz_id);
     _(`Mozilla GUID: ${moz_guid}`);
 
     await startTracking();
 
     // PlacesSortFolderByNameTransaction exercises
     // PlacesUtils.bookmarks.setItemIndex.
     let txn = new PlacesSortFolderByNameTransaction(folder_id);
 
@@ -1364,31 +1368,31 @@ add_task(async function test_onItemDelet
   try {
     await stopTracking();
 
     _("Create a folder with two children");
     let folder_id = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.bookmarksMenuFolder,
       "Test folder",
       PlacesUtils.bookmarks.DEFAULT_INDEX);
-    let folder_guid = engine._store.GUIDForId(folder_id);
+    let folder_guid = await engine._store.GUIDForId(folder_id);
     _(`Folder GUID: ${folder_guid}`);
     let fx_id = PlacesUtils.bookmarks.insertBookmark(
       folder_id,
       Utils.makeURI("http://getfirefox.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Firefox!");
-    let fx_guid = engine._store.GUIDForId(fx_id);
+    let fx_guid = await engine._store.GUIDForId(fx_id);
     _(`Firefox GUID: ${fx_guid}`);
     let tb_id = PlacesUtils.bookmarks.insertBookmark(
       folder_id,
       Utils.makeURI("http://getthunderbird.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Thunderbird!");
-    let tb_guid = engine._store.GUIDForId(tb_id);
+    let tb_guid = await engine._store.GUIDForId(tb_id);
     _(`Thunderbird GUID: ${tb_guid}`);
 
     await startTracking();
 
     let txn = PlacesUtils.bookmarks.getRemoveFolderTransaction(folder_id);
     // We haven't executed the transaction yet.
     await verifyTrackerEmpty();
 
@@ -1422,24 +1426,24 @@ add_task(async function test_treeMoved()
   _("Moving an entire tree of bookmarks should track the parents");
 
   try {
     // Create a couple of parent folders.
     let folder1_id = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.bookmarksMenuFolder,
       "First test folder",
       PlacesUtils.bookmarks.DEFAULT_INDEX);
-    let folder1_guid = engine._store.GUIDForId(folder1_id);
+    let folder1_guid = await engine._store.GUIDForId(folder1_id);
 
     // A second folder in the first.
     let folder2_id = PlacesUtils.bookmarks.createFolder(
       folder1_id,
       "Second test folder",
       PlacesUtils.bookmarks.DEFAULT_INDEX);
-    let folder2_guid = engine._store.GUIDForId(folder2_id);
+    let folder2_guid = await engine._store.GUIDForId(folder2_id);
 
     // Create a couple of bookmarks in the second folder.
     PlacesUtils.bookmarks.insertBookmark(
       folder2_id,
       Utils.makeURI("http://getfirefox.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Firefox!");
     PlacesUtils.bookmarks.insertBookmark(
@@ -1471,17 +1475,17 @@ add_task(async function test_onItemDelet
       Utils.makeURI("http://getfirefox.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Firefox!");
     let tb_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.bookmarksMenuFolder,
       Utils.makeURI("http://getthunderbird.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Thunderbird!");
-    let tb_guid = engine._store.GUIDForId(tb_id);
+    let tb_guid = await engine._store.GUIDForId(tb_id);
 
     await startTracking();
 
     // Delete the last item - the item and parent should be tracked.
     PlacesUtils.bookmarks.removeItem(tb_id);
 
     await verifyTrackedItems(["menu", tb_guid]);
     do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE);
@@ -1611,34 +1615,34 @@ add_task(async function test_onItemDelet
   _("Removing a folder's children should track the folder and its children");
 
   try {
     let fx_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.mobileFolderId,
       Utils.makeURI("http://getfirefox.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Firefox!");
-    let fx_guid = engine._store.GUIDForId(fx_id);
+    let fx_guid = await engine._store.GUIDForId(fx_id);
     _(`Firefox GUID: ${fx_guid}`);
 
     let tb_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.mobileFolderId,
       Utils.makeURI("http://getthunderbird.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Thunderbird!");
-    let tb_guid = engine._store.GUIDForId(tb_id);
+    let tb_guid = await engine._store.GUIDForId(tb_id);
     _(`Thunderbird GUID: ${tb_guid}`);
 
     let moz_id = PlacesUtils.bookmarks.insertBookmark(
       PlacesUtils.bookmarks.bookmarksMenuFolder,
       Utils.makeURI("https://mozilla.org"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Mozilla"
     );
-    let moz_guid = engine._store.GUIDForId(moz_id);
+    let moz_guid = await engine._store.GUIDForId(moz_id);
     _(`Mozilla GUID: ${moz_guid}`);
 
     await startTracking();
 
     _(`Mobile root ID: ${PlacesUtils.mobileFolderId}`);
     PlacesUtils.bookmarks.removeFolderChildren(PlacesUtils.mobileFolderId);
 
     await verifyTrackedItems(["mobile", fx_guid, tb_guid]);
@@ -1653,38 +1657,38 @@ add_task(async function test_onItemDelet
   _("Deleting a tree of bookmarks should track all items");
 
   try {
     // Create a couple of parent folders.
     let folder1_id = PlacesUtils.bookmarks.createFolder(
       PlacesUtils.bookmarks.bookmarksMenuFolder,
       "First test folder",
       PlacesUtils.bookmarks.DEFAULT_INDEX);
-    let folder1_guid = engine._store.GUIDForId(folder1_id);
+    let folder1_guid = await engine._store.GUIDForId(folder1_id);
 
     // A second folder in the first.
     let folder2_id = PlacesUtils.bookmarks.createFolder(
       folder1_id,
       "Second test folder",
       PlacesUtils.bookmarks.DEFAULT_INDEX);
-    let folder2_guid = engine._store.GUIDForId(folder2_id);
+    let folder2_guid = await engine._store.GUIDForId(folder2_id);
 
     // Create a couple of bookmarks in the second folder.
     let fx_id = PlacesUtils.bookmarks.insertBookmark(
       folder2_id,
       Utils.makeURI("http://getfirefox.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Firefox!");
-    let fx_guid = engine._store.GUIDForId(fx_id);
+    let fx_guid = await engine._store.GUIDForId(fx_id);
     let tb_id = PlacesUtils.bookmarks.insertBookmark(
       folder2_id,
       Utils.makeURI("http://getthunderbird.com"),
       PlacesUtils.bookmarks.DEFAULT_INDEX,
       "Get Thunderbird!");
-    let tb_guid = engine._store.GUIDForId(tb_id);
+    let tb_guid = await engine._store.GUIDForId(tb_id);
 
     await startTracking();
 
     // Delete folder2 - everything we created should be tracked.
     PlacesUtils.bookmarks.removeItem(folder2_id);
 
     await verifyTrackedItems([fx_guid, tb_guid, folder1_guid, folder2_guid]);
     do_check_eq(tracker.score, SCORE_INCREMENT_XLARGE * 3);
--- a/services/sync/tests/unit/test_clients_engine.js
+++ b/services/sync/tests/unit/test_clients_engine.js
@@ -7,17 +7,17 @@ Cu.import("resource://services-sync/engi
 Cu.import("resource://services-sync/record.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
 const MORE_THAN_CLIENTS_TTL_REFRESH = 691200; // 8 days
 const LESS_THAN_CLIENTS_TTL_REFRESH = 86400;  // 1 day
 
-var engine = Service.clientsEngine;
+let engine;
 
 /**
  * Unpack the record with this ID, and verify that it has the same version that
  * we should be putting into records.
  */
 function check_record_version(user, id) {
     let payload = JSON.parse(user.collection("clients").wbo(id).payload);
 
@@ -43,20 +43,24 @@ function compareCommands(actual, expecte
   let tweakedActual = JSON.parse(JSON.stringify(actual));
   tweakedActual.map(elt => delete elt.flowID);
   deepEqual(tweakedActual, expected, description);
   // each item must have a unique flowID.
   let allIDs = new Set(actual.map(elt => elt.flowID).filter(fid => !!fid));
   equal(allIDs.size, actual.length, "all items have unique IDs");
 }
 
-function cleanup() {
+add_task(async function setup() {
+  engine = Service.clientsEngine;
+});
+
+async function cleanup() {
   Svc.Prefs.resetBranch("");
   engine._tracker.clearChangedIDs();
-  engine._resetClient();
+  await engine._resetClient();
   // We don't finalize storage at cleanup, since we use the same clients engine
   // instance across all tests.
 }
 
 add_task(async function test_bad_hmac() {
   _("Ensure that Clients engine deletes corrupt records.");
   let deletedCollections = [];
   let deletedItems       = [];
@@ -90,120 +94,120 @@ add_task(async function test_bad_hmac() 
     generateNewKeys(Service.collectionKeys);
     let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
     serverKeys.encrypt(Service.identity.syncKeyBundle);
     ok((await serverKeys.upload(Service.resource(Service.cryptoKeysURL))).success);
   }
 
   try {
     await configureIdentity({username: "foo"}, server);
-    Service.login();
+    await Service.login();
 
     generateNewKeys(Service.collectionKeys);
 
     _("First sync, client record is uploaded");
     equal(engine.lastRecordUpload, 0);
     check_clients_count(0);
-    engine._sync();
+    await engine._sync();
     check_clients_count(1);
     ok(engine.lastRecordUpload > 0);
 
     // Our uploaded record has a version.
     check_record_version(user, engine.localID);
 
     // Initial setup can wipe the server, so clean up.
     deletedCollections = [];
     deletedItems       = [];
 
     _("Change our keys and our client ID, reupload keys.");
     let oldLocalID  = engine.localID;     // Preserve to test for deletion!
     engine.localID = Utils.makeGUID();
-    engine.resetClient();
+    await engine.resetClient();
     generateNewKeys(Service.collectionKeys);
     let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
     serverKeys.encrypt(Service.identity.syncKeyBundle);
     ok((await serverKeys.upload(Service.resource(Service.cryptoKeysURL))).success);
 
     _("Sync.");
-    engine._sync();
+    await engine._sync();
 
     _("Old record " + oldLocalID + " was deleted, new one uploaded.");
     check_clients_count(1);
     check_client_deleted(oldLocalID);
 
     _("Now change our keys but don't upload them. " +
       "That means we get an HMAC error but redownload keys.");
     Service.lastHMACEvent = 0;
     engine.localID = Utils.makeGUID();
-    engine.resetClient();
+    await engine.resetClient();
     generateNewKeys(Service.collectionKeys);
     deletedCollections = [];
     deletedItems       = [];
     check_clients_count(1);
-    engine._sync();
+    await engine._sync();
 
     _("Old record was not deleted, new one uploaded.");
     equal(deletedCollections.length, 0);
     equal(deletedItems.length, 0);
     check_clients_count(2);
 
     _("Now try the scenario where our keys are wrong *and* there's a bad record.");
     // Clean up and start fresh.
     user.collection("clients")._wbos = {};
     Service.lastHMACEvent = 0;
     engine.localID = Utils.makeGUID();
-    engine.resetClient();
+    await engine.resetClient();
     deletedCollections = [];
     deletedItems       = [];
     check_clients_count(0);
 
     await uploadNewKeys();
 
     // Sync once to upload a record.
-    engine._sync();
+    await engine._sync();
     check_clients_count(1);
 
     // Generate and upload new keys, so the old client record is wrong.
     await uploadNewKeys();
 
     // Create a new client record and new keys. Now our keys are wrong, as well
     // as the object on the server. We'll download the new keys and also delete
     // the bad client record.
     oldLocalID  = engine.localID;         // Preserve to test for deletion!
     engine.localID = Utils.makeGUID();
-    engine.resetClient();
+    await engine.resetClient();
     generateNewKeys(Service.collectionKeys);
     let oldKey = Service.collectionKeys.keyForCollection();
 
     equal(deletedCollections.length, 0);
     equal(deletedItems.length, 0);
-    engine._sync();
+    await engine._sync();
     equal(deletedItems.length, 1);
     check_client_deleted(oldLocalID);
     check_clients_count(1);
     let newKey = Service.collectionKeys.keyForCollection();
     ok(!oldKey.equals(newKey));
 
   } finally {
-    cleanup();
+    await cleanup();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_properties() {
   _("Test lastRecordUpload property");
   try {
     equal(Svc.Prefs.get("clients.lastRecordUpload"), undefined);
     equal(engine.lastRecordUpload, 0);
 
     let now = Date.now();
     engine.lastRecordUpload = now / 1000;
     equal(engine.lastRecordUpload, Math.floor(now / 1000));
   } finally {
-    cleanup();
+    await cleanup();
   }
 });
 
 add_task(async function test_full_sync() {
   _("Ensure that Clients engine fetches all records for each sync.");
 
   let now = Date.now() / 1000;
   let server = serverForFoo(engine);
@@ -232,38 +236,40 @@ add_task(async function test_full_sync()
     protocols: ["1.5"],
   }), now - 10));
 
   try {
     let store = engine._store;
 
     _("First sync. 2 records downloaded; our record uploaded.");
     strictEqual(engine.lastRecordUpload, 0);
-    engine._sync();
+    await engine._sync();
     ok(engine.lastRecordUpload > 0);
     deepEqual(user.collection("clients").keys().sort(),
               [activeID, deletedID, engine.localID].sort(),
               "Our record should be uploaded on first sync");
-    deepEqual(Object.keys(store.getAllIDs()).sort(),
+    let ids = await store.getAllIDs();
+    deepEqual(Object.keys(ids).sort(),
               [activeID, deletedID, engine.localID].sort(),
               "Other clients should be downloaded on first sync");
 
     _("Delete a record, then sync again");
     let collection = server.getCollection("foo", "clients");
     collection.remove(deletedID);
     // Simulate a timestamp update in info/collections.
     engine.lastModified = now;
-    engine._sync();
+    await engine._sync();
 
     _("Record should be updated");
-    deepEqual(Object.keys(store.getAllIDs()).sort(),
+    ids = await store.getAllIDs();
+    deepEqual(Object.keys(ids).sort(),
               [activeID, engine.localID].sort(),
               "Deleted client should be removed on next sync");
   } finally {
-    cleanup();
+    await cleanup();
 
     try {
       server.deleteCollections("foo");
     } finally {
       await promiseStopServer(server);
     }
   }
 });
@@ -281,41 +287,41 @@ add_task(async function test_sync() {
     return user.collection("clients").wbo(engine.localID);
   }
 
   try {
 
     _("First sync. Client record is uploaded.");
     equal(clientWBO(), undefined);
     equal(engine.lastRecordUpload, 0);
-    engine._sync();
+    await engine._sync();
     ok(!!clientWBO().payload);
     ok(engine.lastRecordUpload > 0);
 
     _("Let's time travel more than a week back, new record should've been uploaded.");
     engine.lastRecordUpload -= MORE_THAN_CLIENTS_TTL_REFRESH;
     let lastweek = engine.lastRecordUpload;
     clientWBO().payload = undefined;
-    engine._sync();
+    await engine._sync();
     ok(!!clientWBO().payload);
     ok(engine.lastRecordUpload > lastweek);
 
     _("Remove client record.");
     await engine.removeClientData();
     equal(clientWBO().payload, undefined);
 
     _("Time travel one day back, no record uploaded.");
     engine.lastRecordUpload -= LESS_THAN_CLIENTS_TTL_REFRESH;
     let yesterday = engine.lastRecordUpload;
-    engine._sync();
+    await engine._sync();
     equal(clientWBO().payload, undefined);
     equal(engine.lastRecordUpload, yesterday);
 
   } finally {
-    cleanup();
+    await cleanup();
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_client_name_change() {
   _("Ensure client name change incurs a client record update.");
 
   let tracker = engine._tracker;
@@ -339,17 +345,17 @@ add_task(async function test_client_name
   notEqual(initialName, engine.localName);
   equal(Object.keys(tracker.changedIDs).length, 1);
   ok(engine.localID in tracker.changedIDs);
   ok(tracker.score > initialScore);
   ok(tracker.score >= SCORE_INCREMENT_XLARGE);
 
   Svc.Obs.notify("weave:engine:stop-tracking");
 
-  cleanup();
+  await cleanup();
 });
 
 add_task(async function test_last_modified() {
   _("Ensure that remote records have a sane serverLastModified attribute.");
 
   let now = Date.now() / 1000;
   let server = serverForFoo(engine);
   let user   = server.user("foo");
@@ -366,73 +372,73 @@ add_task(async function test_last_modifi
     version: "48",
     protocols: ["1.5"],
   }), now - 10));
 
   try {
     let collection = user.collection("clients");
 
     _("Sync to download the record");
-    engine._sync();
+    await engine._sync();
 
     equal(engine._store._remoteClients[activeID].serverLastModified, now - 10,
           "last modified in the local record is correctly the server last-modified");
 
     _("Modify the record and re-upload it");
     // set a new name to make sure we really did upload.
     engine._store._remoteClients[activeID].name = "New name";
     engine._modified.set(activeID, 0);
-    engine._uploadOutgoing();
+    await engine._uploadOutgoing();
 
     _("Local record should have updated timestamp");
     ok(engine._store._remoteClients[activeID].serverLastModified >= now);
 
     _("Record on the server should have new name but not serverLastModified");
     let payload = JSON.parse(JSON.parse(collection.payload(activeID)).ciphertext);
     equal(payload.name, "New name");
     equal(payload.serverLastModified, undefined);
 
   } finally {
-    cleanup();
+    await cleanup();
     server.deleteCollections("foo");
     await promiseStopServer(server);
   }
 });
 
 add_task(async function test_send_command() {
   _("Verifies _sendCommandToClient puts commands in the outbound queue.");
 
   let store = engine._store;
   let tracker = engine._tracker;
   let remoteId = Utils.makeGUID();
   let rec = new ClientsRec("clients", remoteId);
 
-  store.create(rec);
-  store.createRecord(remoteId, "clients");
+  await store.create(rec);
+  await store.createRecord(remoteId, "clients");
 
   let action = "testCommand";
   let args = ["foo", "bar"];
   let extra = { flowID: "flowy" }
 
-  engine._sendCommandToClient(action, args, remoteId, extra);
+  await engine._sendCommandToClient(action, args, remoteId, extra);
 
   let newRecord = store._remoteClients[remoteId];
-  let clientCommands = engine._readCommands()[remoteId];
+  let clientCommands = (await engine._readCommands())[remoteId];
   notEqual(newRecord, undefined);
   equal(clientCommands.length, 1);
 
   let command = clientCommands[0];
   equal(command.command, action);
   equal(command.args.length, 2);
   deepEqual(command.args, args);
   ok(command.flowID);
 
   notEqual(tracker.changedIDs[remoteId], undefined);
 
-  cleanup();
+  await cleanup();
 });
 
 add_task(async function test_command_validation() {
   _("Verifies that command validation works properly.");
 
   let store = engine._store;
 
   let testCommands = [
@@ -448,25 +454,25 @@ add_task(async function test_command_val
     ["logout",      ["foo"],  false],
     ["__UNKNOWN__", [],       false]
   ];
 
   for (let [action, args, expectedResult] of testCommands) {
     let remoteId = Utils.makeGUID();
     let rec = new ClientsRec("clients", remoteId);
 
-    store.create(rec);
-    store.createRecord(remoteId, "clients");
+    await store.create(rec);
+    await store.createRecord(remoteId, "clients");
 
-    engine.sendCommand(action, args, remoteId);
+    await engine.sendCommand(action, args, remoteId);
 
     let newRecord = store._remoteClients[remoteId];
     notEqual(newRecord, undefined);
 
-    let clientCommands = engine._readCommands()[remoteId];
+    let clientCommands = (await engine._readCommands())[remoteId];
 
     if (expectedResult) {
       _("Ensuring command is sent: " + action);
       equal(clientCommands.length, 1);
 
       let command = clientCommands[0];
       equal(command.command, action);
       deepEqual(command.args, args);
@@ -478,68 +484,68 @@ add_task(async function test_command_val
       equal(clientCommands, undefined);
 
       if (store._tracker) {
         equal(engine._tracker[remoteId], undefined);
       }
     }
 
   }
-  cleanup();
+  await cleanup();
 });
 
 add_task(async function test_command_duplication() {
   _("Ensures duplicate commands are detected and not added");
 
   let store = engine._store;
   let remoteId = Utils.makeGUID();
   let rec = new ClientsRec("clients", remoteId);
-  store.create(rec);
-  store.createRecord(remoteId, "clients");
+  await store.create(rec);
+  await store.createRecord(remoteId, "clients");
 
   let action = "resetAll";
   let args = [];
 
-  engine.sendCommand(action, args, remoteId);
-  engine.sendCommand(action, args, remoteId);
+  await engine.sendCommand(action, args, remoteId);
+  await engine.sendCommand(action, args, remoteId);
 
-  let clientCommands = engine._readCommands()[remoteId];
+  let clientCommands = (await engine._readCommands())[remoteId];
   equal(clientCommands.length, 1);
 
   _("Check variant args length");
-  engine._saveCommands({});
+  await engine._saveCommands({});
 
   action = "resetEngine";
-  engine.sendCommand(action, [{ x: "foo" }], remoteId);
-  engine.sendCommand(action, [{ x: "bar" }], remoteId);
+  await engine.sendCommand(action, [{ x: "foo" }], remoteId);
+  await engine.sendCommand(action, [{ x: "bar" }], remoteId);
 
   _("Make sure we spot a real dupe argument.");
-  engine.sendCommand(action, [{ x: "bar" }], remoteId);
+  await engine.sendCommand(action, [{ x: "bar" }], remoteId);
 
-  clientCommands = engine._readCommands()[remoteId];
+  clientCommands = (await engine._readCommands())[remoteId];
   equal(clientCommands.length, 2);
 
-  cleanup();
+  await cleanup();
 });
 
 add_task(async function test_command_invalid_client() {
   _("Ensures invalid client IDs are caught");
 
   let id = Utils.makeGUID();
   let error;
 
   try {
-    engine.sendCommand("wipeAll", [], id);
+    await engine.sendCommand("wipeAll", [], id);
   } catch (ex) {
     error = ex;
   }
 
   equal(error.message.indexOf("Unknown remote client ID: "), 0);
 
-  cleanup();
+  await cleanup();
 });
 
 add_task(async function test_process_incoming_commands() {
   _("Ensures local commands are executed");
 
   engine.localCommands = [{ command: "logout", args: [] }];
 
   let ev = "weave:service:logout:finish";
@@ -550,21 +556,21 @@ add_task(async function test_process_inc
 
       resolve();
     };
 
     Svc.Obs.add(ev, handler);
   });
 
   // logout command causes processIncomingCommands to return explicit false.
-  ok(!engine.processIncomingCommands());
+  ok(!(await engine.processIncomingCommands()));
 
   await logoutPromise;
 
-  cleanup();
+  await cleanup();
 });
 
 add_task(async function test_filter_duplicate_names() {
   _("Ensure that we exclude clients with identical names that haven't synced in a week.");
 
   let now = Date.now() / 1000;
   let server = serverForFoo(engine);
   let user   = server.user("foo");
@@ -605,26 +611,27 @@ add_task(async function test_filter_dupl
     protocols: ["1.5"],
   }), now - 604820));
 
   try {
     let store = engine._store;
 
     _("First sync");
     strictEqual(engine.lastRecordUpload, 0);
-    engine._sync();
+    await engine._sync();
     ok(engine.lastRecordUpload > 0);
     deepEqual(user.collection("clients").keys().sort(),
               [recentID, dupeID, oldID, engine.localID].sort(),
               "Our record should be uploaded on first sync");
 
-    deepEqual(Object.keys(store.getAllIDs()).sort(),
+    let ids = await store.getAllIDs();
+    deepEqual(Object.keys(ids).sort(),
               [recentID, dupeID, oldID, engine.localID].sort(),
               "Duplicate ID should remain in getAllIDs");
-    ok(engine._store.itemExists(dupeID), "Dupe ID should be considered as existing for Sync methods.");
+    ok((await engine._store.itemExists(dupeID)), "Dupe ID should be considered as existing for Sync methods.");
     ok(!engine.remoteClientExists(dupeID), "Dupe ID should not be considered as existing for external methods.");
 
     // dupe desktop should not appear in .deviceTypes.
     equal(engine.deviceTypes.get("desktop"), 2);
     equal(engine.deviceTypes.get("mobile"), 1);
 
     // dupe desktop should not appear in stats
     deepEqual(engine.stats, {
@@ -639,26 +646,26 @@ add_task(async function test_filter_dupl
 
     // Check that a subsequent Sync doesn't report anything as being processed.
     let counts;
     Svc.Obs.add("weave:engine:sync:applied", function observe(subject, data) {
       Svc.Obs.remove("weave:engine:sync:applied", observe);
       counts = subject;
     });
 
-    engine._sync();
+    await engine._sync();
     equal(counts.applied, 0); // We didn't report applying any records.
     equal(counts.reconciled, 4); // We reported reconcilliation for all records
     equal(counts.succeeded, 0);
     equal(counts.failed, 0);
     equal(counts.newFailed, 0);
 
     _("Broadcast logout to all clients");
-    engine.sendCommand("logout", []);
-    engine._sync();
+    await engine.sendCommand("logout", []);
+    await engine._sync();
 
     let collection = server.getCollection("foo", "clients");
     let recentPayload = JSON.parse(JSON.parse(collection.payload(recentID)).ciphertext);
     compareCommands(recentPayload.commands, [{ command: "logout", args: [] }],
                     "Should send commands to the recent client");
 
     let oldPayload = JSON.parse(JSON.parse(collection.payload(oldID)).ciphertext);
     compareCommands(oldPayload.commands, [{ command: "logout", args: [] }],
@@ -674,19 +681,20 @@ add_task(async function test_filter_dupl
       name: engine.localName,
       type: "desktop",
       commands: [],
       version: "48",
       protocols: ["1.5"],
     }), now - 10));
 
     _("Second sync.");
-    engine._sync();
+    await engine._sync();
 
-    deepEqual(Object.keys(store.getAllIDs()).sort(),
+    ids = await store.getAllIDs();
+    deepEqual(Object.keys(ids).sort(),
               [recentID, oldID, dupeID, engine.localID].sort(),
               "Stale client synced, so it should no longer be marked as a dupe");
 
     ok(engine.remoteClientExists(dupeID), "Dupe ID should appear as it synced.");
 
     // Recently synced dupe desktop should appear in .deviceTypes.
     equal(engine.deviceTypes.get("desktop"), 3);
 
@@ -696,30 +704,30 @@ add_task(async function test_filter_dupl
       names: [engine.localName, "My Phone", engine.localName, "My old desktop"],
       numClients: 4,
     });
 
     ok(engine.remoteClientExists(dupeID), "recently synced dupe ID should now exist");
     equal(engine.remoteClients.length, 3, "recently synced dupe should now be in remoteClients");
 
   } finally {
-    cleanup();
+    await cleanup();
 
     try {
       server.deleteCollections("foo");
     } finally {
       await promiseStopServer(server);
     }
   }
 });
 
 add_task(async function test_command_sync() {
   _("Ensure that commands are synced across clients.");
 
-  engine._store.wipe();
+  await engine._store.wipe();
   generateNewKeys(Service.collectionKeys);
 
   let server   = serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let user     = server.user("foo");
   let remoteId = Utils.makeGUID();
 
@@ -734,63 +742,63 @@ add_task(async function test_command_syn
     type: "desktop",
     commands: [],
     version: "48",
     protocols: ["1.5"],
   }), Date.now() / 1000));
 
   try {
     _("Syncing.");
-    engine._sync();
+    await engine._sync();
 
     _("Checking remote record was downloaded.");
     let clientRecord = engine._store._remoteClients[remoteId];
     notEqual(clientRecord, undefined);
     equal(clientRecord.commands.length, 0);
 
     _("Send a command to the remote client.");
-    engine.sendCommand("wipeAll", []);
-    let clientCommands = engine._readCommands()[remoteId];
+    await engine.sendCommand("wipeAll", []);
+    let clientCommands = (await engine._readCommands())[remoteId];
     equal(clientCommands.length, 1);
-    engine._sync();
+    await engine._sync();
 
     _("Checking record was uploaded.");
     notEqual(clientWBO(engine.localID).payload, undefined);
     ok(engine.lastRecordUpload > 0);
 
     notEqual(clientWBO(remoteId).payload, undefined);
 
     Svc.Prefs.set("client.GUID", remoteId);
     engine._resetClient();
     equal(engine.localID, remoteId);
     _("Performing sync on resetted client.");
-    engine._sync();
+    await engine._sync();
     notEqual(engine.localCommands, undefined);
     equal(engine.localCommands.length, 1);
 
     let command = engine.localCommands[0];
     equal(command.command, "wipeAll");
     equal(command.args.length, 0);
 
   } finally {
-    cleanup();
+    await cleanup();
 
     try {
       let collection = server.getCollection("foo", "clients");
       collection.remove(remoteId);
     } finally {
       await promiseStopServer(server);
     }
   }
 });
 
 add_task(async function test_clients_not_in_fxa_list() {
   _("Ensure that clients not in the FxA devices list are marked as stale.");
 
-  engine._store.wipe();
+  await engine._store.wipe();
   generateNewKeys(Service.collectionKeys);
 
   let server   = serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let remoteId = Utils.makeGUID();
   let remoteId2 = Utils.makeGUID();
 
@@ -818,24 +826,24 @@ add_task(async function test_clients_not
   engine.fxAccounts = {
     notifyDevices() { return Promise.resolve(true); },
     getDeviceId() { return fxAccounts.getDeviceId(); },
     getDeviceList() { return Promise.resolve([{ id: remoteId }]); }
   };
 
   try {
     _("Syncing.");
-    engine._sync();
+    await engine._sync();
 
     ok(!engine._store._remoteClients[remoteId].stale);
     ok(engine._store._remoteClients[remoteId2].stale);
 
   } finally {
     engine.fxAccounts = fxAccounts;
-    cleanup();
+    await cleanup();
 
     try {
       let collection = server.getCollection("foo", "clients");
       collection.remove(remoteId);
     } finally {
       await promiseStopServer(server);
     }
   }
@@ -845,30 +853,30 @@ add_task(async function test_send_uri_to
   _("Ensure sendURIToClientForDisplay() sends command properly.");
 
   let tracker = engine._tracker;
   let store = engine._store;
 
   let remoteId = Utils.makeGUID();
   let rec = new ClientsRec("clients", remoteId);
   rec.name = "remote";
-  store.create(rec);
-  store.createRecord(remoteId, "clients");
+  await store.create(rec);
+  await store.createRecord(remoteId, "clients");
 
   tracker.clearChangedIDs();
   let initialScore = tracker.score;
 
   let uri = "http://www.mozilla.org/";
   let title = "Title of the Page";
-  engine.sendURIToClientForDisplay(uri, remoteId, title);
+  await engine.sendURIToClientForDisplay(uri, remoteId, title);
 
   let newRecord = store._remoteClients[remoteId];
 
   notEqual(newRecord, undefined);
-  let clientCommands = engine._readCommands()[remoteId];
+  let clientCommands = (await engine._readCommands())[remoteId];
   equal(clientCommands.length, 1);
 
   let command = clientCommands[0];
   equal(command.command, "displayURI");
   equal(command.args.length, 3);
   equal(command.args[0], uri);
   equal(command.args[1], engine.localID);
   equal(command.args[2], title);
@@ -876,24 +884,24 @@ add_task(async function test_send_uri_to
   ok(tracker.score > initialScore);
   ok(tracker.score - initialScore >= SCORE_INCREMENT_XLARGE);
 
   _("Ensure unknown client IDs result in exception.");
   let unknownId = Utils.makeGUID();
   let error;
 
   try {
-    engine.sendURIToClientForDisplay(uri, unknownId);
+    await engine.sendURIToClientForDisplay(uri, unknownId);
   } catch (ex) {
     error = ex;
   }
 
   equal(error.message.indexOf("Unknown remote client ID: "), 0);
 
-  cleanup();
+  await cleanup();
 });
 
 add_task(async function test_receive_display_uri() {
   _("Ensure processing of received 'displayURI' commands works.");
 
   // We don't set up WBOs and perform syncing because other tests verify
   // the command API works as advertised. This saves us a little work.
 
@@ -917,33 +925,33 @@ add_task(async function test_receive_dis
       Svc.Obs.remove(ev, handler);
 
       resolve({ subject, data });
     };
 
     Svc.Obs.add(ev, handler);
   });
 
-  ok(engine.processIncomingCommands());
+  ok((await engine.processIncomingCommands()));
 
   let { subject, data } = await promiseDisplayURI;
 
   equal(subject[0].uri, uri);
   equal(subject[0].clientId, remoteId);
   equal(subject[0].title, title);
   equal(data, null);
 
-  cleanup();
+  await cleanup();
 });
 
 add_task(async function test_optional_client_fields() {
   _("Ensure that we produce records with the fields added in Bug 1097222.");
 
   const SUPPORTED_PROTOCOL_VERSIONS = ["1.5"];
-  let local = engine._store.createRecord(engine.localID, "clients");
+  let local = await engine._store.createRecord(engine.localID, "clients");
   equal(local.name, engine.localName);
   equal(local.type, engine.localType);
   equal(local.version, Services.appinfo.version);
   deepEqual(local.protocols, SUPPORTED_PROTOCOL_VERSIONS);
 
   // Optional fields.
   // Make sure they're what they ought to be...
   equal(local.os, Services.appinfo.OS);
@@ -952,17 +960,17 @@ add_task(async function test_optional_cl
   // ... and also that they're non-empty.
   ok(!!local.os);
   ok(!!local.appPackage);
   ok(!!local.application);
 
   // We don't currently populate device or formfactor.
   // See Bug 1100722, Bug 1100723.
 
-  cleanup();
+  await cleanup();
 });
 
 add_task(async function test_merge_commands() {
   _("Verifies local commands for remote clients are merged with the server's");
 
   let now = Date.now() / 1000;
   let server = serverForFoo(engine);
 
@@ -995,37 +1003,37 @@ add_task(async function test_merge_comma
     }],
     version: "48",
     protocols: ["1.5"],
   }), now - 10));
 
   try {
     _("First sync. 2 records downloaded.");
     strictEqual(engine.lastRecordUpload, 0);
-    engine._sync();
+    await engine._sync();
 
     _("Broadcast logout to all clients");
-    engine.sendCommand("logout", []);
-    engine._sync();
+    await engine.sendCommand("logout", []);
+    await engine._sync();
 
     let collection = server.getCollection("foo", "clients");
     let desktopPayload = JSON.parse(JSON.parse(collection.payload(desktopID)).ciphertext);
     compareCommands(desktopPayload.commands, [{
       command: "displayURI",
       args: ["https://example.com", engine.localID, "Yak Herders Anonymous"],
     }, {
       command: "logout",
       args: [],
     }], "Should send the logout command to the desktop client");
 
     let mobilePayload = JSON.parse(JSON.parse(collection.payload(mobileID)).ciphertext);
     compareCommands(mobilePayload.commands, [{ command: "logout", args: [] }],
                     "Should not send a duplicate logout to the mobile client");
   } finally {
-    cleanup();
+    await cleanup();
 
     try {
       server.deleteCollections("foo");
     } finally {
       await promiseStopServer(server);
     }
   }
 });
@@ -1047,44 +1055,44 @@ add_task(async function test_duplicate_r
     commands: [],
     version: "48",
     protocols: ["1.5"],
   }), now - 10));
 
   try {
     _("First sync. 1 record downloaded.");
     strictEqual(engine.lastRecordUpload, 0);
-    engine._sync();
+    await engine._sync();
 
     _("Send tab to client");
-    engine.sendCommand("displayURI", ["https://example.com", engine.localID, "Yak Herders Anonymous"]);
-    engine._sync();
+    await engine.sendCommand("displayURI", ["https://example.com", engine.localID, "Yak Herders Anonymous"]);
+    await engine._sync();
 
     _("Simulate the desktop client consuming the command and syncing to the server");
     server.insertWBO("foo", "clients", new ServerWBO(desktopID, encryptPayload({
       id: desktopID,
       name: "Desktop client",
       type: "desktop",
       commands: [],
       version: "48",
       protocols: ["1.5"],
     }), now - 10));
 
     _("Send another tab to the desktop client");
-    engine.sendCommand("displayURI", ["https://foobar.com", engine.localID, "Foo bar!"], desktopID);
-    engine._sync();
+    await engine.sendCommand("displayURI", ["https://foobar.com", engine.localID, "Foo bar!"], desktopID);
+    await engine._sync();
 
     let collection = server.getCollection("foo", "clients");
     let desktopPayload = JSON.parse(JSON.parse(collection.payload(desktopID)).ciphertext);
     compareCommands(desktopPayload.commands, [{
       command: "displayURI",
       args: ["https://foobar.com", engine.localID, "Foo bar!"],
     }], "Should only send the second command to the desktop client");
   } finally {
-    cleanup();
+    await cleanup();
 
     try {
       server.deleteCollections("foo");
     } finally {
       await promiseStopServer(server);
     }
   }
 });
@@ -1119,24 +1127,24 @@ add_task(async function test_upload_afte
     commands: [],
     version: "48",
     protocols: ["1.5"],
   }), now - 10));
 
   try {
     _("First sync. 2 records downloaded.");
     strictEqual(engine.lastRecordUpload, 0);
-    engine._sync();
+    await engine._sync();
 
     _("Send tab to client");
-    engine.sendCommand("displayURI", ["https://example.com", engine.localID, "Yak Herders Anonymous"], deviceBID);
+    await engine.sendCommand("displayURI", ["https://example.com", engine.localID, "Yak Herders Anonymous"], deviceBID);
 
     const oldUploadOutgoing = SyncEngine.prototype._uploadOutgoing;
-    SyncEngine.prototype._uploadOutgoing = () => engine._onRecordsWritten([], [deviceBID]);
-    engine._sync();
+    SyncEngine.prototype._uploadOutgoing = async () => engine._onRecordsWritten([], [deviceBID]);
+    await engine._sync();
 
     let collection = server.getCollection("foo", "clients");
     let deviceBPayload = JSON.parse(JSON.parse(collection.payload(deviceBID)).ciphertext);
     compareCommands(deviceBPayload.commands, [{
       command: "displayURI", args: ["https://deviceclink.com", deviceCID, "Device C link"]
     }], "Should be the same because the upload failed");
 
     _("Simulate the client B consuming the command and syncing to the server");
@@ -1147,26 +1155,27 @@ add_task(async function test_upload_afte
       commands: [],
       version: "48",
       protocols: ["1.5"],
     }), now - 10));
 
     // Simulate reboot
     SyncEngine.prototype._uploadOutgoing = oldUploadOutgoing;
     engine = Service.clientsEngine = new ClientEngine(Service);
+    await engine.initialize();
 
-    engine._sync();
+    await engine._sync();
 
     deviceBPayload = JSON.parse(JSON.parse(collection.payload(deviceBID)).ciphertext);
     compareCommands(deviceBPayload.commands, [{
       command: "displayURI",
       args: ["https://example.com", engine.localID, "Yak Herders Anonymous"],
     }], "Should only had written our outgoing command");
   } finally {
-    cleanup();
+    await cleanup();
 
     try {
       server.deleteCollections("foo");
     } finally {
       await promiseStopServer(server);
     }
   }
 });
@@ -1217,22 +1226,22 @@ add_task(async function test_keep_cleare
   }), now - 10));
 
   try {
     _("First sync. Download remote and our record.");
     strictEqual(engine.lastRecordUpload, 0);
 
     let collection = server.getCollection("foo", "clients");
     const oldUploadOutgoing = SyncEngine.prototype._uploadOutgoing;
-    SyncEngine.prototype._uploadOutgoing = () => engine._onRecordsWritten([], [deviceBID]);
+    SyncEngine.prototype._uploadOutgoing = async () => engine._onRecordsWritten([], [deviceBID]);
     let commandsProcessed = 0;
     engine._handleDisplayURIs = (uris) => { commandsProcessed = uris.length };
 
-    engine._sync();
-    engine.processIncomingCommands(); // Not called by the engine.sync(), gotta call it ourselves
+    await engine._sync();
+    await engine.processIncomingCommands(); // Not called by the engine.sync(), gotta call it ourselves
     equal(commandsProcessed, 2, "We processed 2 commands");
 
     let localRemoteRecord = JSON.parse(JSON.parse(collection.payload(engine.localID)).ciphertext);
     compareCommands(localRemoteRecord.commands, [{
       command: "displayURI", args: ["https://deviceblink.com", deviceBID, "Device B link"]
     },
     {
       command: "displayURI", args: ["https://deviceclink.com", deviceCID, "Device C link"]
@@ -1260,30 +1269,32 @@ add_task(async function test_keep_cleare
       }],
       version: "48",
       protocols: ["1.5"],
     }), now - 10));
 
     // Simulate reboot
     SyncEngine.prototype._uploadOutgoing = oldUploadOutgoing;
     engine = Service.clientsEngine = new ClientEngine(Service);
+    await engine.initialize();
 
     commandsProcessed = 0;
     engine._handleDisplayURIs = (uris) => { commandsProcessed = uris.length };
-    engine._sync();
-    engine.processIncomingCommands();
+    await engine._sync();
+    await engine.processIncomingCommands();
     equal(commandsProcessed, 1, "We processed one command (the other were cleared)");
 
     localRemoteRecord = JSON.parse(JSON.parse(collection.payload(deviceBID)).ciphertext);
     deepEqual(localRemoteRecord.commands, [], "Should be empty");
   } finally {
-    cleanup();
+    await cleanup();
 
     // Reset service (remove mocks)
     engine = Service.clientsEngine = new ClientEngine(Service);
+    await engine.initialize();
     engine._resetClient();
 
     try {
       server.deleteCollections("foo");
     } finally {
       await promiseStopServer(server);
     }
   }
@@ -1315,34 +1326,34 @@ add_task(async function test_deleted_com
     type: "desktop",
     commands: [],
     version: "48",
     protocols: ["1.5"],
   }), now - 10));
 
   try {
     _("First sync. 2 records downloaded.");
-    engine._sync();
+    await engine._sync();
 
     _("Delete a record on the server.");
     let collection = server.getCollection("foo", "clients");
     collection.remove(deletedID);
 
     _("Broadcast a command to all clients");
-    engine.sendCommand("logout", []);
-    engine._sync();
+    await engine.sendCommand("logout", []);
+    await engine._sync();
 
     deepEqual(collection.keys().sort(), [activeID, engine.localID].sort(),
       "Should not reupload deleted clients");
 
     let activePayload = JSON.parse(JSON.parse(collection.payload(activeID)).ciphertext);
     compareCommands(activePayload.commands, [{ command: "logout", args: [] }],
       "Should send the command to the active client");
   } finally {
-    cleanup();
+    await cleanup();
 
     try {
       server.deleteCollections("foo");
     } finally {
       await promiseStopServer(server);
     }
   }
 });
@@ -1355,62 +1366,62 @@ add_task(async function test_send_uri_ac
 
   await SyncTestingInfrastructure(server);
   generateNewKeys(Service.collectionKeys);
 
   try {
     let fakeSenderID = Utils.makeGUID();
 
     _("Initial sync for empty clients collection");
-    engine._sync();
+    await engine._sync();
     let collection = server.getCollection("foo", "clients");
     let ourPayload = JSON.parse(JSON.parse(collection.payload(engine.localID)).ciphertext);
     ok(ourPayload, "Should upload our client record");
 
     _("Send a URL to the device on the server");
     ourPayload.commands = [{
       command: "displayURI",
       args: ["https://example.com", fakeSenderID, "Yak Herders Anonymous"],
       flowID: Utils.makeGUID(),
     }];
     server.insertWBO("foo", "clients", new ServerWBO(engine.localID, encryptPayload(ourPayload), now));
 
     _("Sync again");
-    engine._sync();
+    await engine._sync();
     compareCommands(engine.localCommands, [{
       command: "displayURI",
       args: ["https://example.com", fakeSenderID, "Yak Herders Anonymous"],
     }], "Should receive incoming URI");
-    ok(engine.processIncomingCommands(), "Should process incoming commands");
-    const clearedCommands = engine._readCommands()[engine.localID];
+    ok((await engine.processIncomingCommands()), "Should process incoming commands");
+    const clearedCommands = (await engine._readCommands())[engine.localID];
     compareCommands(clearedCommands, [{
       command: "displayURI",
       args: ["https://example.com", fakeSenderID, "Yak Herders Anonymous"],
     }], "Should mark the commands as cleared after processing");
 
     _("Check that the command was removed on the server");
-    engine._sync();
+    await engine._sync();
     ourPayload = JSON.parse(JSON.parse(collection.payload(engine.localID)).ciphertext);
     ok(ourPayload, "Should upload the synced client record");
     deepEqual(ourPayload.commands, [], "Should not reupload cleared commands");
   } finally {
-    cleanup();
+    await cleanup();
 
     try {
       server.deleteCollections("foo");
     } finally {
       await promiseStopServer(server);
     }
   }
 });
 
 add_task(async function test_command_sync() {
   _("Notify other clients when writing their record.");
 
-  engine._store.wipe();
+  await engine._store.wipe();
   generateNewKeys(Service.collectionKeys);
 
   let server    = serverForFoo(engine);
   await SyncTestingInfrastructure(server);
 
   let collection = server.getCollection("foo", "clients");
   let remoteId   = Utils.makeGUID();
   let remoteId2  = Utils.makeGUID();
@@ -1432,33 +1443,33 @@ add_task(async function test_command_syn
     type: "mobile",
     commands: [],
     version: "48",
     protocols: ["1.5"]
   }), Date.now() / 1000));
 
   try {
     equal(collection.count(), 2, "2 remote records written");
-    engine._sync();
+    await engine._sync();
     equal(collection.count(), 3, "3 remote records written (+1 for the synced local record)");
 
-    engine.sendCommand("wipeAll", []);
+    await engine.sendCommand("wipeAll", []);
     engine._tracker.addChangedID(engine.localID);
     const getClientFxaDeviceId = sinon.stub(engine, "getClientFxaDeviceId", (id) => "fxa-" + id);
     const engineMock = sinon.mock(engine);
     let _notifyCollectionChanged = engineMock.expects("_notifyCollectionChanged")
                                              .withArgs(["fxa-" + remoteId, "fxa-" + remoteId2]);
     _("Syncing.");
-    engine._sync();
+    await engine._sync();
     _notifyCollectionChanged.verify();
 
     engineMock.restore();
     getClientFxaDeviceId.restore();
   } finally {
-    cleanup();
+    await cleanup();
     engine._tracker.clearChangedIDs();
 
     try {
       server.deleteCollections("foo");
     } finally {
       await promiseStopServer(server);
     }
   }
@@ -1495,57 +1506,57 @@ add_task(async function ensureSameFlowID
       id: remoteId2,
       name: "Remote client 2",
       type: "mobile",
       commands: [],
       version: "48",
       protocols: ["1.5"]
     }), Date.now() / 1000));
 
-    engine._sync();
-    engine.sendCommand("wipeAll", []);
-    engine._sync();
+    await engine._sync();
+    await engine.sendCommand("wipeAll", []);
+    await engine._sync();
     equal(events.length, 2);
     // we don't know what the flowID is, but do know it should be the same.
     equal(events[0].extra.flowID, events[1].extra.flowID);
     // Wipe remote clients to ensure deduping doesn't prevent us from adding the command.
     for (let client of Object.values(engine._store._remoteClients)) {
       client.commands = [];
     }
     // check it's correctly used when we specify a flow ID
     events.length = 0;
     let flowID = Utils.makeGUID();
-    engine.sendCommand("wipeAll", [], null, { flowID });
-    engine._sync();
+    await engine.sendCommand("wipeAll", [], null, { flowID });
+    await engine._sync();
     equal(events.length, 2);
     equal(events[0].extra.flowID, flowID);
     equal(events[1].extra.flowID, flowID);
 
     // Wipe remote clients to ensure deduping doesn't prevent us from adding the command.
     for (let client of Object.values(engine._store._remoteClients)) {
       client.commands = [];
     }
 
     // and that it works when something else is in "extra"
     events.length = 0;
-    engine.sendCommand("wipeAll", [], null, { reason: "testing" });
-    engine._sync();
+    await engine.sendCommand("wipeAll", [], null, { reason: "testing" });
+    await engine._sync();
     equal(events.length, 2);
     equal(events[0].extra.flowID, events[1].extra.flowID);
     equal(events[0].extra.reason, "testing");
     equal(events[1].extra.reason, "testing");
     // Wipe remote clients to ensure deduping doesn't prevent us from adding the command.
     for (let client of Object.values(engine._store._remoteClients)) {
       client.commands = [];
     }
 
     // and when both are specified.
     events.length = 0;
-    engine.sendCommand("wipeAll", [], null, { reason: "testing", flowID });
-    engine._sync();
+    await engine.sendCommand("wipeAll", [], null, { reason: "testing", flowID });
+    await engine._sync();
     equal(events.length, 2);
     equal(events[0].extra.flowID, flowID);
     equal(events[1].extra.flowID, flowID);
     equal(events[0].extra.reason, "testing");
     equal(events[1].extra.reason, "testing");
     // Wipe remote clients to ensure deduping doesn't prevent us from adding the command.
     for (let client of Object.values(engine._store._remoteClients)) {
       client.commands = [];
@@ -1587,27 +1598,27 @@ add_task(async function test_duplicate_c
       id: remoteId2,
       name: "Remote client 2",
       type: "mobile",
       commands: [],
       version: "48",
       protocols: ["1.5"]
     }), Date.now() / 1000));
 
-    engine._sync();
+    await engine._sync();
     // Make sure deduping works before syncing
-    engine.sendURIToClientForDisplay("https://example.com", remoteId, "Example");
-    engine.sendURIToClientForDisplay("https://example.com", remoteId, "Example");
+    await engine.sendURIToClientForDisplay("https://example.com", remoteId, "Example");
+    await engine.sendURIToClientForDisplay("https://example.com", remoteId, "Example");
     equal(events.length, 1);
-    engine._sync();
+    await engine._sync();
     // And after syncing.
-    engine.sendURIToClientForDisplay("https://example.com", remoteId, "Example");
+    await engine.sendURIToClientForDisplay("https://example.com", remoteId, "Example");
     equal(events.length, 1);
     // Ensure we aren't deduping commands to different clients
-    engine.sendURIToClientForDisplay("https://example.com", remoteId2, "Example");
+    await engine.sendURIToClientForDisplay("https://example.com", remoteId2, "Example");
     equal(events.length, 2);
   } finally {
     Service.recordTelemetryEvent = origRecordTelemetryEvent;
     cleanup();
     await promiseStopServer(server);
   }
 });
 
@@ -1629,21 +1640,21 @@ add_task(async function test_other_clien
       calls++;
       return Promise.resolve(true);
     }
   };
 
   try {
     engine.lastRecordUpload = 0;
     _("First sync, should notify other clients");
-    engine._sync();
+    await engine._sync();
     equal(calls, 1);
 
     _("Second sync, should not notify other clients");
-    engine._sync();
+    await engine._sync();
     equal(calls, 1);
   } finally {
     engine.fxAccounts = fxAccounts;
     cleanup();
     await promiseStopServer(server);
   }
 });
 
@@ -1684,21 +1695,21 @@ add_task(async function device_disconnec
 
 add_task(async function process_incoming_refreshes_known_stale_clients() {
   const stubProcessIncoming = sinon.stub(SyncEngine.prototype, "_processIncoming");
   const stubRefresh = sinon.stub(engine, "_refreshKnownStaleClients", () => {
     engine._knownStaleFxADeviceIds = ["one", "two"];
   });
 
   engine._knownStaleFxADeviceIds = null;
-  engine._processIncoming();
+  await engine._processIncoming();
   ok(stubRefresh.calledOnce, "Should refresh the known stale clients");
   stubRefresh.reset();
 
-  engine._processIncoming();
+  await engine._processIncoming();
   ok(stubRefresh.notCalled, "Should not refresh the known stale clients since it's already populated");
 
   stubProcessIncoming.restore();
   stubRefresh.restore();
 });
 
 function run_test() {
   initTestLogging("Trace");
--- a/services/sync/tests/unit/test_clients_escape.js
+++ b/services/sync/tests/unit/test_clients_escape.js
@@ -16,17 +16,17 @@ add_task(async function test_clients_esc
   let engine = Service.clientsEngine;
 
   try {
     _("Test that serializing client records results in uploadable ascii");
     engine.localID = "ascii";
     engine.localName = "wéävê";
 
     _("Make sure we have the expected record");
-    let record = engine._createRecord("ascii");
+    let record = await engine._createRecord("ascii");
     do_check_eq(record.id, "ascii");
     do_check_eq(record.name, "wéävê");
 
     _("Encrypting record...");
     record.encrypt(keyBundle);
     _("Encrypted.");
 
     let serialized = JSON.stringify(record);
@@ -43,15 +43,15 @@ add_task(async function test_clients_esc
     do_check_eq(checkCount, serialized.length);
 
     _("Making sure the record still looks like it did before");
     record.decrypt(keyBundle);
     do_check_eq(record.id, "ascii");
     do_check_eq(record.name, "wéävê");
 
     _("Sanity check that creating the record also gives the same");
-    record = engine._createRecord("ascii");
+    record = await engine._createRecord("ascii");
     do_check_eq(record.id, "ascii");
     do_check_eq(record.name, "wéävê");
   } finally {
     Svc.Prefs.resetBranch("");
   }
 });
--- a/services/sync/tests/unit/test_collections_recovery.js
+++ b/services/sync/tests/unit/test_collections_recovery.js
@@ -1,16 +1,18 @@
 /* Any copyright is dedicated to the Public Domain.
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 // Verify that we wipe the server if we have to regenerate keys.
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
+initTestLogging("Trace");
+
 add_task(async function test_missing_crypto_collection() {
   enableValidationPrefs();
 
   let johnHelper = track_collections_helper();
   let johnU      = johnHelper.with_updated_collection;
   let johnColls  = johnHelper.collections;
 
   let empty = false;
@@ -41,29 +43,29 @@ add_task(async function test_missing_cry
       johnU(coll, new ServerCollection({}, true).handler());
   }
   let server = httpd_setup(handlers);
   await configureIdentity({username: "johndoe"}, server);
 
   try {
     let fresh = 0;
     let orig  = Service._freshStart;
-    Service._freshStart = function() {
+    Service._freshStart = async function() {
       _("Called _freshStart.");
-      orig.call(Service);
+      await orig.call(Service);
       fresh++;
     };
 
     _("Startup, no meta/global: freshStart called once.");
     await sync_and_validate_telem();
     do_check_eq(fresh, 1);
     fresh = 0;
 
     _("Regular sync: no need to freshStart.");
-    Service.sync();
+    await Service.sync();
     do_check_eq(fresh, 0);
 
     _("Simulate a bad info/collections.");
     delete johnColls.crypto;
     await sync_and_validate_telem();
     do_check_eq(fresh, 1);
     fresh = 0;
 
@@ -71,13 +73,8 @@ add_task(async function test_missing_cry
     await sync_and_validate_telem();
     do_check_eq(fresh, 0);
 
   } finally {
     Svc.Prefs.resetBranch("");
     await promiseStopServer(server);
   }
 });
-
-function run_test() {
-  initTestLogging("Trace");
-  run_next_test();
-}
--- a/services/sync/tests/unit/test_corrupt_keys.js
+++ b/services/sync/tests/unit/test_corrupt_keys.js
@@ -13,17 +13,17 @@ Cu.import("resource://services-sync/stat
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
 add_task(async function test_locally_changed_keys() {
   enableValidationPrefs();
 
   let hmacErrorCount = 0;
   function counting(f) {
-    return function() {
+    return async function() {
       hmacErrorCount++;
       return f.call(this);
     };
   }
 
   Service.handleHMACEvent = counting(Service.handleHMACEvent);
 
   let server  = new SyncServer();
@@ -37,17 +37,17 @@ add_task(async function test_locally_cha
 
   try {
     Svc.Prefs.set("registerEngines", "Tab");
 
     await configureIdentity({ username: "johndoe" }, server);
     // We aren't doing a .login yet, so fudge the cluster URL.
     Service.clusterURL = Service.identity._token.endpoint;
 
-    Service.engineManager.register(HistoryEngine);
+    await Service.engineManager.register(HistoryEngine);
     Service.engineManager.unregister("addons");
 
     function corrupt_local_keys() {
       Service.collectionKeys._default.keyPair = [Weave.Crypto.generateRandomKey(),
                                                  Weave.Crypto.generateRandomKey()];
     }
 
     _("Setting meta.");
@@ -62,17 +62,17 @@ add_task(async function test_locally_cha
 
     // Upload keys.
     generateNewKeys(Service.collectionKeys);
     let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
     serverKeys.encrypt(Service.identity.syncKeyBundle);
     do_check_true((await serverKeys.upload(Service.resource(Service.cryptoKeysURL))).success);
 
     // Check that login works.
-    do_check_true(Service.login());
+    do_check_true((await Service.login()));
     do_check_true(Service.isLoggedIn);
 
     // Sync should upload records.
     await sync_and_validate_telem();
 
     // Tabs exist.
     _("Tabs modified: " + johndoe.modified("tabs"));
     do_check_true(johndoe.modified("tabs") > 0);
--- a/services/sync/tests/unit/test_declined.js
+++ b/services/sync/tests/unit/test_declined.js
@@ -2,20 +2,16 @@
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 Cu.import("resource://services-sync/stages/declined.js");
 Cu.import("resource://services-sync/stages/enginesync.js");
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-common/observers.js");
 
-function run_test() {
-  run_next_test();
-}
-
 function PetrolEngine() {}
 PetrolEngine.prototype.name = "petrol";
 
 function DieselEngine() {}
 DieselEngine.prototype.name = "diesel";
 
 function DummyEngine() {}
 DummyEngine.prototype.name = "dummy";
@@ -41,34 +37,34 @@ function getEngineManager() {
  *
  * Push it into the EngineSynchronizer to set enabled; verify that those are
  * correct.
  *
  * Then push it into DeclinedEngines to set declined; verify that none are
  * declined, and a notification is sent for our locally disabled-but-not-
  * declined engines.
  */
-add_test(function testOldMeta() {
+add_task(async function testOldMeta() {
   let meta = {
     payload: {
       engines: {
         "petrol": 1,
         "diesel": 2,
         "nonlocal": 3,             // Enabled but not supported.
       },
     },
   };
 
   _("Record: " + JSON.stringify(meta));
 
   let manager = getEngineManager();
 
   // Update enabled from meta/global.
   let engineSync = new EngineSynchronizer(Service);
-  engineSync._updateEnabledFromMeta(meta, 3, manager);
+  await engineSync._updateEnabledFromMeta(meta, 3, manager);
 
   Assert.ok(manager._engines["petrol"].enabled, "'petrol' locally enabled.");
   Assert.ok(manager._engines["diesel"].enabled, "'diesel' locally enabled.");
   Assert.ok(!("nonlocal" in manager._engines), "We don't know anything about the 'nonlocal' engine.");
   Assert.ok(!manager._engines["actual"].enabled, "'actual' not locally enabled.");
   Assert.ok(!manager.isDeclined("actual"), "'actual' not declined, though.");
 
   let declinedEngines = new DeclinedEngines(Service);
@@ -91,17 +87,17 @@ add_test(function testOldMeta() {
 
 /**
  * 'Fetch' a meta/global that declines an engine we don't
  * recognize. Ensure that we track that declined engine along
  * with any we locally declined, and that the meta/global
  * record is marked as changed and includes all declined
  * engines.
  */
-add_test(function testDeclinedMeta() {
+add_task(async function testDeclinedMeta() {
   let meta = {
     payload: {
       engines: {
         "petrol": 1,
         "diesel": 2,
         "nonlocal": 3,             // Enabled but not supported.
       },
       declined: ["nonexistent"],   // Declined and not supported.
@@ -137,17 +133,15 @@ add_test(function testDeclinedMeta() {
     Assert.ok(0 <= declined.indexOf("nonexistent"), "'nonexistent' was declined on the server.");
 
     Assert.ok(0 <= declined.indexOf("localdecline"), "'localdecline' was declined locally.");
 
     // The meta/global is modified, too.
     Assert.ok(0 <= meta.payload.declined.indexOf("nonexistent"), "meta/global's declined contains 'nonexistent'.");
     Assert.ok(0 <= meta.payload.declined.indexOf("localdecline"), "meta/global's declined contains 'localdecline'.");
     Assert.strictEqual(true, meta.changed, "meta/global was changed.");
-
-    run_next_test();
   }
 
   Observers.add("weave:engines:notdeclined", onNotDeclined);
 
   declinedEngines.updateDeclined(meta, manager);
 });
 
--- a/services/sync/tests/unit/test_doctor.js
+++ b/services/sync/tests/unit/test_doctor.js
@@ -72,17 +72,17 @@ add_task(async function test_repairs_sta
   }
   let engine = {
     name: "test-engine",
     getValidator() {
       return validator;
     }
   }
   let requestor = {
-    startRepairs(validationInfo, flowID) {
+    async startRepairs(validationInfo, flowID) {
       ok(flowID, "got a flow ID");
       equal(validationInfo, problems);
       repairStarted = true;
       return true;
     },
     tryServerOnlyRepairs() {
       return false;
     }
@@ -106,17 +106,17 @@ add_task(async function test_repairs_sta
   await doctor.consult([engine]);
   await promiseValidationDone;
   ok(repairStarted);
 });
 
 add_task(async function test_repairs_advanced_daily() {
   let repairCalls = 0;
   let requestor = {
-    continueRepairs() {
+    async continueRepairs() {
       repairCalls++;
     },
     tryServerOnlyRepairs() {
       return false;
     }
   }
   // start now at just after REPAIR_ADVANCE_PERIOD so we do a a first one.
   let now = REPAIR_ADVANCE_PERIOD + 1;
@@ -160,17 +160,17 @@ add_task(async function test_repairs_ski
   }
   let engine = {
     name: "test-engine",
     getValidator() {
       return validator;
     }
   }
   let requestor = {
-    startRepairs(validationInfo, flowID) {
+    async startRepairs(validationInfo, flowID) {
       assert.ok(false, "Never should start repairs");
     },
     tryServerOnlyRepairs() {
       return false;
     }
   }
   let doctor = mockDoctor({
     _getEnginesToValidate(recentlySyncedEngines) {
--- a/services/sync/tests/unit/test_engine.js
+++ b/services/sync/tests/unit/test_engine.js
@@ -9,17 +9,17 @@ Cu.import("resource://services-sync/util
 
 function SteamStore(engine) {
   Store.call(this, "Steam", engine);
   this.wasWiped = false;
 }
 SteamStore.prototype = {
   __proto__: Store.prototype,
 
-  wipe() {
+  async wipe() {
     this.wasWiped = true;
   }
 };
 
 function SteamTracker(name, engine) {
   Tracker.call(this, name || "Steam", engine);
 }
 SteamTracker.prototype = {
@@ -32,21 +32,21 @@ function SteamEngine(name, service) {
   this.wasReset = false;
   this.wasSynced = false;
 }
 SteamEngine.prototype = {
   __proto__: Engine.prototype,
   _storeObj: SteamStore,
   _trackerObj: SteamTracker,
 
-  _resetClient() {
+  async _resetClient() {
     this.wasReset = true;
   },
 
-  _sync() {
+  async _sync() {
     this.wasSynced = true;
   }
 };
 
 var engineObserver = {
   topics: [],
 
   observe(subject, topic, data) {
@@ -100,17 +100,17 @@ add_task(async function test_score() {
   do_check_eq(engine.score, 5);
 });
 
 add_task(async function test_resetClient() {
   _("Engine.resetClient calls _resetClient");
   let engine = new SteamEngine("Steam", Service);
   do_check_false(engine.wasReset);
 
-  engine.resetClient();
+  await engine.resetClient();
   do_check_true(engine.wasReset);
   do_check_eq(engineObserver.topics[0], "weave:engine:reset-client:start");
   do_check_eq(engineObserver.topics[1], "weave:engine:reset-client:finish");
 
   await cleanup(engine);
 });
 
 add_task(async function test_invalidChangedIDs() {
@@ -135,17 +135,17 @@ add_task(async function test_invalidChan
 add_task(async function test_wipeClient() {
   _("Engine.wipeClient calls resetClient, wipes store, clears changed IDs");
   let engine = new SteamEngine("Steam", Service);
   do_check_false(engine.wasReset);
   do_check_false(engine._store.wasWiped);
   do_check_true(engine._tracker.addChangedID("a-changed-id"));
   do_check_true("a-changed-id" in engine._tracker.changedIDs);
 
-  engine.wipeClient();
+  await engine.wipeClient();
   do_check_true(engine.wasReset);
   do_check_true(engine._store.wasWiped);
   do_check_eq(JSON.stringify(engine._tracker.changedIDs), "{}");
   do_check_eq(engineObserver.topics[0], "weave:engine:wipe-client:start");
   do_check_eq(engineObserver.topics[1], "weave:engine:reset-client:start");
   do_check_eq(engineObserver.topics[2], "weave:engine:reset-client:finish");
   do_check_eq(engineObserver.topics[3], "weave:engine:wipe-client:finish");
 
@@ -168,24 +168,24 @@ add_task(async function test_enabled() {
 });
 
 add_task(async function test_sync() {
   let engine = new SteamEngine("Steam", Service);
   try {
     _("Engine.sync doesn't call _sync if it's not enabled");
     do_check_false(engine.enabled);
     do_check_false(engine.wasSynced);
-    engine.sync();
+    await engine.sync();
 
     do_check_false(engine.wasSynced);
 
     _("Engine.sync calls _sync if it's enabled");
     engine.enabled = true;
 
-    engine.sync();
+    await engine.sync();
     do_check_true(engine.wasSynced);
     do_check_eq(engineObserver.topics[0], "weave:engine:sync:start");
     do_check_eq(engineObserver.topics[1], "weave:engine:sync:finish");
   } finally {
     await cleanup(engine);
   }
 });
 
--- a/services/sync/tests/unit/test_engine_abort.js
+++ b/services/sync/tests/unit/test_engine_abort.js
@@ -25,39 +25,39 @@ add_task(async function test_processInco
   generateNewKeys(Service.collectionKeys);
 
   _("Create some server data.");
   let meta_global = Service.recordManager.set(engine.metaURL,
                                               new WBORecord(engine.metaURL));
   meta_global.payload.engines = {rotary: {version: engine.version,
                                           syncID: engine.syncID}};
   _("Fake applyIncoming to abort.");
-  engine._store.applyIncoming = function(record) {
+  engine._store.applyIncoming = async function(record) {
     let ex = {code: Engine.prototype.eEngineAbortApplyIncoming,
               cause: "Nooo"};
     _("Throwing: " + JSON.stringify(ex));
     throw ex;
   };
 
   _("Trying _processIncoming. It will throw after aborting.");
   let err;
   try {
-    engine._syncStartup();
-    engine._processIncoming();
+    await engine._syncStartup();
+    await engine._processIncoming();
   } catch (ex) {
     err = ex;
   }
 
   do_check_eq(err, "Nooo");
   err = undefined;
 
   _("Trying engine.sync(). It will abort without error.");
   try {
     // This will quietly fail.
-    engine.sync();
+    await engine.sync();
   } catch (ex) {
     err = ex;
   }
 
   do_check_eq(err, undefined);
 
   await promiseStopServer(server);
   Svc.Prefs.resetBranch("");
--- a/services/sync/tests/unit/test_engine_changes_during_sync.js
+++ b/services/sync/tests/unit/test_engine_changes_during_sync.js
@@ -1,11 +1,10 @@
 Cu.import("resource://gre/modules/FormHistory.jsm");
 Cu.import("resource://gre/modules/Log.jsm");
-Cu.import("resource://services-common/async.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/engines/bookmarks.js");
 Cu.import("resource://services-sync/engines/history.js");
 Cu.import("resource://services-sync/engines/forms.js");
 Cu.import("resource://services-sync/engines/passwords.js");
 Cu.import("resource://services-sync/engines/prefs.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 
@@ -25,17 +24,17 @@ Log.repository.getLogger("Sqlite").level
 async function assertChildGuids(folderGuid, expectedChildGuids, message) {
   let tree = await PlacesUtils.promiseBookmarksTree(folderGuid);
   let childGuids = tree.children.map(child => child.guid);
   deepEqual(childGuids, expectedChildGuids, message);
 }
 
 async function cleanup(engine, server) {
   Svc.Obs.notify("weave:engine:stop-tracking");
-  engine._store.wipe();
+  await engine._store.wipe();
   Svc.Prefs.resetBranch("");
   Service.recordManager.clearCache();
   await promiseStopServer(server);
 }
 
 add_task(async function test_history_change_during_sync() {
   _("Ensure that we don't bump the score when applying history records.");
 
@@ -44,24 +43,24 @@ add_task(async function test_history_cha
   let engine = Service.engineManager.get("history");
   let server = serverForEnginesWithKeys({"foo": "password"}, [engine]);
   await SyncTestingInfrastructure(server);
   let collection = server.user("foo").collection("history");
 
   // Override `applyIncomingBatch` to insert a record while we're applying
   // changes. The tracker should ignore this change.
   let { applyIncomingBatch } = engine._store;
-  engine._store.applyIncomingBatch = function(records) {
+  engine._store.applyIncomingBatch = async function(records) {
     _("Inserting local history visit");
     engine._store.applyIncomingBatch = applyIncomingBatch;
     let failed;
     try {
-      Async.promiseSpinningly(addVisit("during_sync"));
+      await addVisit("during_sync");
     } finally {
-      failed = applyIncomingBatch.call(this, records);
+      failed = await applyIncomingBatch.call(this, records);
     }
     return failed;
   };
 
   Svc.Obs.notify("weave:engine:start-tracking");
 
   try {
     let remoteRec = new HistoryRec("history", "UrOOuzE5QM-e");
@@ -100,26 +99,26 @@ add_task(async function test_passwords_c
   enableValidationPrefs();
 
   let engine = Service.engineManager.get("passwords");
   let server = serverForEnginesWithKeys({"foo": "password"}, [engine]);
   await SyncTestingInfrastructure(server);
   let collection = server.user("foo").collection("passwords");
 
   let { applyIncomingBatch } = engine._store;
-  engine._store.applyIncomingBatch = function(records) {
+  engine._store.applyIncomingBatch = async function(records) {
     _("Inserting local password");
     engine._store.applyIncomingBatch = applyIncomingBatch;
     let failed;
     try {
       let login = new LoginInfo("https://example.com", "", null, "username",
         "password", "", "");
       Services.logins.addLogin(login);
     } finally {
-      failed = applyIncomingBatch.call(this, records);
+      failed = await applyIncomingBatch.call(this, records);
     }
     return failed;
   };
 
   Svc.Obs.notify("weave:engine:start-tracking");
 
   try {
     let remoteRec = new LoginRec("passwords", "{765e3d6e-071d-d640-a83d-81a7eb62d3ed}");
@@ -161,25 +160,25 @@ add_task(async function test_prefs_chang
   enableValidationPrefs();
 
   let engine = Service.engineManager.get("prefs");
   let server = serverForEnginesWithKeys({"foo": "password"}, [engine]);
   await SyncTestingInfrastructure(server);
   let collection = server.user("foo").collection("prefs");
 
   let { applyIncomingBatch } = engine._store;
-  engine._store.applyIncomingBatch = function(records) {
+  engine._store.applyIncomingBatch = async function(records) {
     _("Updating local pref value");
     engine._store.applyIncomingBatch = applyIncomingBatch;
     let failed;
     try {
       // Change the value of a synced pref.
       Services.prefs.setCharPref(TEST_PREF, "hello");
     } finally {
-      failed = applyIncomingBatch.call(this, records);
+      failed = await applyIncomingBatch.call(this, records);
     }
     return failed;
   };
 
   Svc.Obs.notify("weave:engine:start-tracking");
 
   try {
     // All synced prefs are stored in a single record, so we'll only ever
@@ -222,32 +221,32 @@ add_task(async function test_forms_chang
   enableValidationPrefs();
 
   let engine = Service.engineManager.get("forms");
   let server = serverForEnginesWithKeys({"foo": "password"}, [engine]);
   await SyncTestingInfrastructure(server);
   let collection = server.user("foo").collection("forms");
 
   let { applyIncomingBatch } = engine._store;
-  engine._store.applyIncomingBatch = function(records) {
+  engine._store.applyIncomingBatch = async function(records) {
     _("Inserting local form history entry");
     engine._store.applyIncomingBatch = applyIncomingBatch;
     let failed;
     try {
-      Async.promiseSpinningly(new Promise(resolve => {
+      await new Promise(resolve => {
         FormHistory.update([{
           op: "add",
           fieldname: "favoriteDrink",
           value: "cocoa",
         }], {
           handleCompletion: resolve,
         });
-      }));
+      });
     } finally {
-      failed = applyIncomingBatch.call(this, records);
+      failed = await applyIncomingBatch.call(this, records);
     }
     return failed;
   };
 
   Svc.Obs.notify("weave:engine:start-tracking");
 
   try {
     // Add an existing remote form history entry. We shouldn't bump the score when
@@ -300,28 +299,28 @@ add_task(async function test_bookmark_ch
   let engine = Service.engineManager.get("bookmarks");
   let server = serverForEnginesWithKeys({"foo": "password"}, [engine]);
   await SyncTestingInfrastructure(server);
   let collection = server.user("foo").collection("bookmarks");
 
   let bmk3; // New child of Folder 1, created locally during sync.
 
   let { applyIncomingBatch } = engine._store;
-  engine._store.applyIncomingBatch = function(records) {
+  engine._store.applyIncomingBatch = async function(records) {
     _("Inserting bookmark into local store");
     engine._store.applyIncomingBatch = applyIncomingBatch;
     let failed;
     try {
-      bmk3 = Async.promiseSpinningly(PlacesUtils.bookmarks.insert({
+      bmk3 = await PlacesUtils.bookmarks.insert({
         parentGuid: folder1.guid,
         url: "https://mozilla.org/",
         title: "Mozilla",
-      }));
+      });
     } finally {
-      failed = applyIncomingBatch.call(this, records);
+      failed = await applyIncomingBatch.call(this, records);
     }
     return failed;
   };
 
   // New bookmarks that should be uploaded during the first sync.
   let folder1 = await PlacesUtils.bookmarks.insert({
     type: PlacesUtils.bookmarks.TYPE_FOLDER,
     parentGuid: PlacesUtils.bookmarks.toolbarGuid,
@@ -394,17 +393,17 @@ add_task(async function test_bookmark_ch
       collection.insert(bmk4_guid, encryptPayload(remoteTaggedBmk.cleartext));
     }
 
     await assertChildGuids(folder1.guid, [tbBmk.guid],
       "Folder should have 1 child before first sync");
 
     let pingsPromise = wait_for_pings(2);
 
-    let changes = engine.pullNewChanges();
+    let changes = await engine.pullNewChanges();
     deepEqual(Object.keys(changes).sort(), [
       folder1.guid,
       tbBmk.guid,
       "menu",
       "mobile",
       "toolbar",
       "unfiled",
     ].sort(), "Should track bookmark and folder created before first sync");
@@ -436,17 +435,17 @@ add_task(async function test_bookmark_ch
       "Folder 1 should have 3 children after first sync");
     await assertChildGuids(folder2_guid, [bmk4_guid, tagQuery_guid],
       "Folder 2 should have 2 children after first sync");
     let taggedURIs = PlacesUtils.tagging.getURIsForTag("taggy");
     equal(taggedURIs.length, 1, "Should have 1 tagged URI");
     equal(taggedURIs[0].spec, "https://example.org/",
       "Synced tagged bookmark should appear in tagged URI list");
 
-    changes = engine.pullNewChanges();
+    changes = await engine.pullNewChanges();
     deepEqual(changes, {},
       "Should have already uploaded changes in follow-up sync");
 
     // First ping won't include validation data, since we've changed bookmarks
     // and `canValidate` will indicate it can't proceed.
     let engineData = pings.map(p =>
       p.syncs[0].engines.find(e => e.name == "bookmarks")
     );
--- a/services/sync/tests/unit/test_enginemanager.js
+++ b/services/sync/tests/unit/test_enginemanager.js
@@ -1,117 +1,111 @@
 /* Any copyright is dedicated to the Public Domain.
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/service.js");
 
-function run_test() {
-  run_next_test();
-}
-
 function PetrolEngine() {}
 PetrolEngine.prototype.name = "petrol";
 PetrolEngine.prototype.finalize = async function() {};
 
 function DieselEngine() {}
 DieselEngine.prototype.name = "diesel";
 DieselEngine.prototype.finalize = async function() {};
 
 function DummyEngine() {}
 DummyEngine.prototype.name = "dummy";
 DummyEngine.prototype.finalize = async function() {};
 
 function ActualEngine() {}
 ActualEngine.prototype = {__proto__: Engine.prototype,
                           name: "actual"};
 
-add_test(function test_basics() {
+add_task(async function test_basics() {
   _("We start out with a clean slate");
 
   let manager = new EngineManager(Service);
 
-  let engines = manager.getAll();
+  let engines = await manager.getAll();
   do_check_eq(engines.length, 0);
-  do_check_eq(manager.get("dummy"), undefined);
+  do_check_eq((await manager.get("dummy")), undefined);
 
   _("Register an engine");
-  manager.register(DummyEngine);
-  let dummy = manager.get("dummy");
+  await manager.register(DummyEngine);
+  let dummy = await manager.get("dummy");
   do_check_true(dummy instanceof DummyEngine);
 
-  engines = manager.getAll();
+  engines = await manager.getAll();
   do_check_eq(engines.length, 1);
   do_check_eq(engines[0], dummy);
 
   _("Register an already registered engine is ignored");
-  manager.register(DummyEngine);
-  do_check_eq(manager.get("dummy"), dummy);
+  await manager.register(DummyEngine);
+  do_check_eq((await manager.get("dummy")), dummy);
 
   _("Register multiple engines in one go");
-  manager.register([PetrolEngine, DieselEngine]);
-  let petrol = manager.get("petrol");
-  let diesel = manager.get("diesel");
+  await manager.register([PetrolEngine, DieselEngine]);
+  let petrol = await manager.get("petrol");
+  let diesel = await manager.get("diesel");
   do_check_true(petrol instanceof PetrolEngine);
   do_check_true(diesel instanceof DieselEngine);
 
-  engines = manager.getAll();
+  engines = await manager.getAll();
   do_check_eq(engines.length, 3);
   do_check_neq(engines.indexOf(petrol), -1);
   do_check_neq(engines.indexOf(diesel), -1);
 
   _("Retrieve multiple engines in one go");
-  engines = manager.get(["dummy", "diesel"]);
+  engines = await manager.get(["dummy", "diesel"]);
   do_check_eq(engines.length, 2);
   do_check_neq(engines.indexOf(dummy), -1);
   do_check_neq(engines.indexOf(diesel), -1);
 
   _("getEnabled() only returns enabled engines");
-  engines = manager.getEnabled();
+  engines = await manager.getEnabled();
   do_check_eq(engines.length, 0);
 
   petrol.enabled = true;
-  engines = manager.getEnabled();
+  engines = await manager.getEnabled();
   do_check_eq(engines.length, 1);
   do_check_eq(engines[0], petrol);
 
   dummy.enabled = true;
   diesel.enabled = true;
-  engines = manager.getEnabled();
+  engines = await manager.getEnabled();
   do_check_eq(engines.length, 3);
 
   _("getEnabled() returns enabled engines in sorted order");
   petrol.syncPriority = 1;
   dummy.syncPriority = 2;
   diesel.syncPriority = 3;
 
-  engines = manager.getEnabled();
+  engines = await manager.getEnabled();
 
   do_check_array_eq(engines, [petrol, dummy, diesel]);
 
   _("Changing the priorities should change the order in getEnabled()");
 
   dummy.syncPriority = 4;
 
-  engines = manager.getEnabled();
+  engines = await manager.getEnabled();
 
   do_check_array_eq(engines, [petrol, diesel, dummy]);
 
   _("Unregister an engine by name");
   manager.unregister("dummy");
-  do_check_eq(manager.get("dummy"), undefined);
-  engines = manager.getAll();
+  do_check_eq((await manager.get("dummy")), undefined);
+  engines = await manager.getAll();
   do_check_eq(engines.length, 2);
   do_check_eq(engines.indexOf(dummy), -1);
 
   _("Unregister an engine by value");
   // manager.unregister() checks for instanceof Engine, so let's make one:
-  manager.register(ActualEngine);
-  let actual = manager.get("actual");
+  await manager.register(ActualEngine);
+  let actual = await manager.get("actual");
   do_check_true(actual instanceof ActualEngine);
   do_check_true(actual instanceof Engine);
 
   manager.unregister(actual);
-  do_check_eq(manager.get("actual"), undefined);
-
-  run_next_test();
+  do_check_eq((await manager.get("actual")), undefined);
 });
 
--- a/services/sync/tests/unit/test_errorhandler_1.js
+++ b/services/sync/tests/unit/test_errorhandler_1.js
@@ -28,96 +28,93 @@ var fakeServerUrl = "http://localhost:" 
 const logsdir = FileUtils.getDir("ProfD", ["weave", "logs"], true);
 
 const PROLONGED_ERROR_DURATION =
   (Svc.Prefs.get("errorhandler.networkFailureReportTimeout") * 2) * 1000;
 
 const NON_PROLONGED_ERROR_DURATION =
   (Svc.Prefs.get("errorhandler.networkFailureReportTimeout") / 2) * 1000;
 
-Service.engineManager.clear();
-
 function setLastSync(lastSyncValue) {
   Svc.Prefs.set("lastSync", (new Date(Date.now() - lastSyncValue)).toString());
 }
 
-var engineManager = Service.engineManager;
-engineManager.register(EHTestsCommon.CatapultEngine);
-
 // This relies on Service/ErrorHandler being a singleton. Fixing this will take
 // a lot of work.
-var errorHandler = Service.errorHandler;
+let errorHandler = Service.errorHandler;
+let engine;
 
-function run_test() {
+add_task(async function setup() {
   initTestLogging("Trace");
 
   Log.repository.getLogger("Sync.Service").level = Log.Level.Trace;
   Log.repository.getLogger("Sync.SyncScheduler").level = Log.Level.Trace;
   Log.repository.getLogger("Sync.ErrorHandler").level = Log.Level.Trace;
 
-  run_next_test();
-}
+  Service.engineManager.clear();
+  await Service.engineManager.register(EHTestsCommon.CatapultEngine);
+  engine = Service.engineManager.get("catapult");
+});
 
-
-function clean() {
-  Service.startOver();
+async function clean() {
+  await Service.startOver();
   Status.resetSync();
   Status.resetBackoff();
   errorHandler.didReportProlongedError = false;
 }
 
 add_task(async function test_401_logout() {
   enableValidationPrefs();
 
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   // By calling sync, we ensure we're logged in.
   await sync_and_validate_telem();
   do_check_eq(Status.sync, SYNC_SUCCEEDED);
   do_check_true(Service.isLoggedIn);
 
-  let deferred = PromiseUtils.defer();
-  Svc.Obs.add("weave:service:sync:error", onSyncError);
-  function onSyncError() {
-    _("Got weave:service:sync:error in first sync.");
-    Svc.Obs.remove("weave:service:sync:error", onSyncError);
-
-    // Wait for the automatic next sync.
-    function onLoginError() {
-      _("Got weave:service:login:error in second sync.");
-      Svc.Obs.remove("weave:service:login:error", onLoginError);
+  let promiseErrors = new Promise(res => {
+    Svc.Obs.add("weave:service:sync:error", onSyncError);
+    function onSyncError() {
+      _("Got weave:service:sync:error in first sync.");
+      Svc.Obs.remove("weave:service:sync:error", onSyncError);
 
-      do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR);
-      do_check_false(Service.isLoggedIn);
-
-      // Clean up.
-      Utils.nextTick(function() {
-        Service.startOver();
-        server.stop(deferred.resolve);
-      });
+      // Wait for the automatic next sync.
+      Svc.Obs.add("weave:service:login:error", onLoginError);
+      function onLoginError() {
+        _("Got weave:service:login:error in second sync.");
+        Svc.Obs.remove("weave:service:login:error", onLoginError);
+        res();
+      }
     }
-    Svc.Obs.add("weave:service:login:error", onLoginError);
-  }
+  });
 
   // Make sync fail due to login rejected.
   await configureIdentity({username: "janedoe"}, server);
   Service._updateCachedURLs();
 
   _("Starting first sync.");
   let ping = await sync_and_validate_telem(true);
   deepEqual(ping.failureReason, { name: "httperror", code: 401 });
   _("First sync done.");
-  await deferred.promise;
+
+  await promiseErrors;
+  do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR);
+  do_check_false(Service.isLoggedIn);
+
+  // Clean up.
+  await Service.startOver();
+  await promiseStopServer(server);
 });
 
 add_task(async function test_credentials_changed_logout() {
   enableValidationPrefs();
 
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   // By calling sync, we ensure we're logged in.
   await sync_and_validate_telem();
   do_check_eq(Status.sync, SYNC_SUCCEEDED);
   do_check_true(Service.isLoggedIn);
 
   await EHTestsCommon.generateCredentialsChangedFailure();
@@ -128,17 +125,17 @@ add_task(async function test_credentials
     name: "unexpectederror",
     error: "Error: Aborting sync, remote setup failed"
   });
 
   do_check_eq(Status.sync, CREDENTIALS_CHANGED);
   do_check_false(Service.isLoggedIn);
 
   // Clean up.
-  Service.startOver();
+  await Service.startOver();
   await promiseStopServer(server);
 });
 
 add_task(function test_no_lastSync_pref() {
   // Test reported error.
   Status.resetSync();
   errorHandler.dontIgnoreErrors = true;
   Status.sync = CREDENTIALS_CHANGED;
@@ -321,34 +318,34 @@ add_task(function test_shouldReportError
   errorHandler.dontIgnoreErrors = true;
   Status.login = SERVER_MAINTENANCE;
   do_check_true(errorHandler.shouldReportError());
   do_check_false(errorHandler.didReportProlongedError);
 });
 
 add_task(async function test_shouldReportError_master_password() {
   _("Test error ignored due to locked master password");
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   // Monkey patch Service.verifyLogin to imitate
   // master password being locked.
   Service._verifyLogin = Service.verifyLogin;
-  Service.verifyLogin = function() {
+  Service.verifyLogin = async function() {
     Status.login = MASTER_PASSWORD_LOCKED;
     return false;
   };
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
   do_check_false(errorHandler.shouldReportError());
 
   // Clean up.
   Service.verifyLogin = Service._verifyLogin;
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 // Test that even if we don't have a cluster URL, a login failure due to
 // authentication errors is always reported.
 add_task(function test_shouldReportLoginFailureWithNoCluster() {
   // Ensure no clusterURL - any error not specific to login should not be reported.
   Service.clusterURL = "";
@@ -364,41 +361,41 @@ add_task(function test_shouldReportLogin
   do_check_false(errorHandler.shouldReportError());
 });
 
 add_task(async function test_login_syncAndReportErrors_non_network_error() {
   enableValidationPrefs();
 
   // Test non-network errors are reported
   // when calling syncAndReportErrors
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
   Service.identity.resetSyncKeyBundle();
 
   let promiseObserved = promiseOneObserver("weave:ui:login:error");
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
   errorHandler.syncAndReportErrors();
   await promiseObserved;
   do_check_eq(Status.login, LOGIN_FAILED_NO_PASSPHRASE);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_sync_syncAndReportErrors_non_network_error() {
   enableValidationPrefs();
 
   // Test non-network errors are reported
   // when calling syncAndReportErrors
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   // By calling sync, we ensure we're logged in.
-  Service.sync();
+  await Service.sync();
   do_check_eq(Status.sync, SYNC_SUCCEEDED);
   do_check_true(Service.isLoggedIn);
 
   await EHTestsCommon.generateCredentialsChangedFailure();
 
   let promiseObserved = promiseOneObserver("weave:ui:sync:error");
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
@@ -407,51 +404,51 @@ add_task(async function test_sync_syncAn
   deepEqual(ping.failureReason, {
     name: "unexpectederror",
     error: "Error: Aborting sync, remote setup failed"
   });
   await promiseObserved;
 
   do_check_eq(Status.sync, CREDENTIALS_CHANGED);
   // If we clean this tick, telemetry won't get the right error
-  await promiseNextTick();
-  clean();
+  await Async.promiseYield();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_login_syncAndReportErrors_prolonged_non_network_error() {
   enableValidationPrefs();
 
   // Test prolonged, non-network errors are
   // reported when calling syncAndReportErrors.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
   Service.identity.resetSyncKeyBundle();
 
   let promiseObserved = promiseOneObserver("weave:ui:login:error");
 
   setLastSync(PROLONGED_ERROR_DURATION);
   errorHandler.syncAndReportErrors();
   await promiseObserved;
   do_check_eq(Status.login, LOGIN_FAILED_NO_PASSPHRASE);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_sync_syncAndReportErrors_prolonged_non_network_error() {
   enableValidationPrefs();
 
   // Test prolonged, non-network errors are
   // reported when calling syncAndReportErrors.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   // By calling sync, we ensure we're logged in.
-  Service.sync();
+  await Service.sync();
   do_check_eq(Status.sync, SYNC_SUCCEEDED);
   do_check_true(Service.isLoggedIn);
 
   await EHTestsCommon.generateCredentialsChangedFailure();
 
   let promiseObserved = promiseOneObserver("weave:ui:sync:error");
 
   setLastSync(PROLONGED_ERROR_DURATION);
@@ -460,18 +457,18 @@ add_task(async function test_sync_syncAn
   deepEqual(ping.failureReason, {
     name: "unexpectederror",
     error: "Error: Aborting sync, remote setup failed"
   });
   await promiseObserved;
 
   do_check_eq(Status.sync, CREDENTIALS_CHANGED);
   // If we clean this tick, telemetry won't get the right error
-  await promiseNextTick();
-  clean();
+  await Async.promiseYield();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_login_syncAndReportErrors_network_error() {
   enableValidationPrefs();
 
   // Test network errors are reported when calling syncAndReportErrors.
   await configureIdentity({username: "broken.wipe"});
@@ -480,37 +477,35 @@ add_task(async function test_login_syncA
   let promiseObserved = promiseOneObserver("weave:ui:login:error");
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
   errorHandler.syncAndReportErrors();
   await promiseObserved;
 
   do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR);
 
-  clean();
+  await clean();
 });
 
 
-add_test(function test_sync_syncAndReportErrors_network_error() {
+add_task(async function test_sync_syncAndReportErrors_network_error() {
   enableValidationPrefs();
 
   // Test network errors are reported when calling syncAndReportErrors.
   Services.io.offline = true;
 
-  Svc.Obs.add("weave:ui:sync:error", function onSyncError() {
-    Svc.Obs.remove("weave:ui:sync:error", onSyncError);
-    do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
-
-    Services.io.offline = false;
-    clean();
-    run_next_test();
-  });
+  let promiseUISyncError = promiseOneObserver("weave:ui:sync:error");
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
   errorHandler.syncAndReportErrors();
+  await promiseUISyncError;
+  do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
+
+  Services.io.offline = false;
+  await clean();
 });
 
 add_task(async function test_login_syncAndReportErrors_prolonged_network_error() {
   enableValidationPrefs();
 
   // Test prolonged, network errors are reported
   // when calling syncAndReportErrors.
   await configureIdentity({username: "johndoe"});
@@ -519,68 +514,66 @@ add_task(async function test_login_syncA
 
   let promiseObserved = promiseOneObserver("weave:ui:login:error");
 
   setLastSync(PROLONGED_ERROR_DURATION);
   errorHandler.syncAndReportErrors();
   await promiseObserved;
   do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR);
 
-  clean();
+  await clean();
 });
 
-add_test(function test_sync_syncAndReportErrors_prolonged_network_error() {
+add_task(async function test_sync_syncAndReportErrors_prolonged_network_error() {
   enableValidationPrefs();
 
   // Test prolonged, network errors are reported
   // when calling syncAndReportErrors.
   Services.io.offline = true;
 
-  Svc.Obs.add("weave:ui:sync:error", function onSyncError() {
-    Svc.Obs.remove("weave:ui:sync:error", onSyncError);
-    do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
-
-    Services.io.offline = false;
-    clean();
-    run_next_test();
-  });
+  let promiseUISyncError = promiseOneObserver("weave:ui:sync:error");
 
   setLastSync(PROLONGED_ERROR_DURATION);
   errorHandler.syncAndReportErrors();
+  await promiseUISyncError;
+  do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
+
+  Services.io.offline = false;
+  await clean();
 });
 
 add_task(async function test_login_prolonged_non_network_error() {
   enableValidationPrefs();
 
   // Test prolonged, non-network errors are reported
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
   Service.identity.resetSyncKeyBundle();
 
   let promiseObserved = promiseOneObserver("weave:ui:login:error");
 
   setLastSync(PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
   await promiseObserved;
   do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
   do_check_true(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_sync_prolonged_non_network_error() {
   enableValidationPrefs();
 
   // Test prolonged, non-network errors are reported
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   // By calling sync, we ensure we're logged in.
-  Service.sync();
+  await Service.sync();
   do_check_eq(Status.sync, SYNC_SUCCEEDED);
   do_check_true(Service.isLoggedIn);
 
   await EHTestsCommon.generateCredentialsChangedFailure();
 
   let promiseObserved = promiseOneObserver("weave:ui:sync:error");
 
   setLastSync(PROLONGED_ERROR_DURATION);
@@ -589,152 +582,147 @@ add_task(async function test_sync_prolon
   equal(ping.status.sync, PROLONGED_SYNC_FAILURE);
   deepEqual(ping.failureReason, {
     name: "unexpectederror",
     error: "Error: Aborting sync, remote setup failed"
   });
   await promiseObserved;
   do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
   do_check_true(errorHandler.didReportProlongedError);
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_login_prolonged_network_error() {
   enableValidationPrefs();
 
   // Test prolonged, network errors are reported
   await configureIdentity({username: "johndoe"});
   Service.clusterURL = fakeServerUrl;
 
   let promiseObserved = promiseOneObserver("weave:ui:login:error");
 
   setLastSync(PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
   await promiseObserved;
   do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
   do_check_true(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
 });
 
-add_test(function test_sync_prolonged_network_error() {
+add_task(async function test_sync_prolonged_network_error() {
   enableValidationPrefs();
 
   // Test prolonged, network errors are reported
   Services.io.offline = true;
 
-  Svc.Obs.add("weave:ui:sync:error", function onSyncError() {
-    Svc.Obs.remove("weave:ui:sync:error", onSyncError);
-    do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
-    do_check_true(errorHandler.didReportProlongedError);
-
-    Services.io.offline = false;
-    clean();
-    run_next_test();
-  });
+  let promiseUISyncError = promiseOneObserver("weave:ui:sync:error");
 
   setLastSync(PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
+  await promiseUISyncError;
+  do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
+  do_check_true(errorHandler.didReportProlongedError);
+
+  Services.io.offline = false;
+  await clean();
 });
 
 add_task(async function test_login_non_network_error() {
   enableValidationPrefs();
 
   // Test non-network errors are reported
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
   Service.identity.resetSyncKeyBundle();
 
   let promiseObserved = promiseOneObserver("weave:ui:login:error");
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
   await promiseObserved;
   do_check_eq(Status.login, LOGIN_FAILED_NO_PASSPHRASE);
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_sync_non_network_error() {
   enableValidationPrefs();
 
   // Test non-network errors are reported
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   // By calling sync, we ensure we're logged in.
-  Service.sync();
+  await Service.sync();
   do_check_eq(Status.sync, SYNC_SUCCEEDED);
   do_check_true(Service.isLoggedIn);
 
   await EHTestsCommon.generateCredentialsChangedFailure();
 
   let promiseObserved = promiseOneObserver("weave:ui:sync:error");
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
   await promiseObserved;
   do_check_eq(Status.sync, CREDENTIALS_CHANGED);
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_login_network_error() {
   enableValidationPrefs();
 
   await configureIdentity({username: "johndoe"});
   Service.clusterURL = fakeServerUrl;
 
   let promiseObserved = promiseOneObserver("weave:ui:clear-error");
   // Test network errors are not reported.
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
   await promiseObserved;
   do_check_eq(Status.login, LOGIN_FAILED_NETWORK_ERROR);
   do_check_false(errorHandler.didReportProlongedError);
 
   Services.io.offline = false;
-  clean();
+  await clean();
 });
 
-add_test(function test_sync_network_error() {
+add_task(async function test_sync_network_error() {
   enableValidationPrefs();
 
   // Test network errors are not reported.
   Services.io.offline = true;
 
-  Svc.Obs.add("weave:ui:sync:finish", function onUIUpdate() {
-    Svc.Obs.remove("weave:ui:sync:finish", onUIUpdate);
-    do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
-    do_check_false(errorHandler.didReportProlongedError);
-
-    Services.io.offline = false;
-    clean();
-    run_next_test();
-  });
+  let promiseSyncFinished = promiseOneObserver("weave:ui:sync:finish");
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
+  await promiseSyncFinished;
+  do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
+  do_check_false(errorHandler.didReportProlongedError);
+
+  Services.io.offline = false;
+  await clean();
 });
 
 add_task(async function test_sync_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test server maintenance errors are not reported.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   const BACKOFF = 42;
-  let engine = engineManager.get("catapult");
   engine.enabled = true;
   engine.exception = {status: 503,
                       headers: {"retry-after": BACKOFF}};
 
   function onSyncError() {
     do_throw("Shouldn't get here!");
   }
   Svc.Obs.add("weave:ui:sync:error", onSyncError);
@@ -748,25 +736,25 @@ add_task(async function test_sync_server
   equal(ping.status.sync, SERVER_MAINTENANCE);
   deepEqual(ping.engines.find(e => e.failureReason).failureReason, { name: "httperror", code: 503 })
 
   await promiseObserved;
   do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
   do_check_eq(Status.sync, SERVER_MAINTENANCE);
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_info_collections_login_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test info/collections server maintenance errors are not reported.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   await configureIdentity({username: "broken.info"}, server);
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
@@ -778,35 +766,35 @@ add_task(async function test_info_collec
   Svc.Obs.add("weave:ui:login:error", onUIUpdate);
 
   do_check_false(Status.enforceBackoff);
   do_check_eq(Status.service, STATUS_OK);
 
   let promiseObserved = promiseOneObserver("weave:ui:clear-error")
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
   await promiseObserved;
 
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, LOGIN_FAILED);
   do_check_eq(Status.login, SERVER_MAINTENANCE);
   do_check_false(errorHandler.didReportProlongedError);
 
   Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_meta_global_login_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test meta/global server maintenance errors are not reported.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   await configureIdentity({username: "broken.meta"}, server);
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
@@ -818,21 +806,21 @@ add_task(async function test_meta_global
   Svc.Obs.add("weave:ui:login:error", onUIUpdate);
 
   do_check_false(Status.enforceBackoff);
   do_check_eq(Status.service, STATUS_OK);
 
   let promiseObserved = promiseOneObserver("weave:ui:clear-error");
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
   await promiseObserved;
 
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, LOGIN_FAILED);
   do_check_eq(Status.login, SERVER_MAINTENANCE);
   do_check_false(errorHandler.didReportProlongedError);
 
   Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
--- a/services/sync/tests/unit/test_errorhandler_2.js
+++ b/services/sync/tests/unit/test_errorhandler_2.js
@@ -27,53 +27,50 @@ var fakeServerUrl = "http://localhost:" 
 const logsdir = FileUtils.getDir("ProfD", ["weave", "logs"], true);
 
 const PROLONGED_ERROR_DURATION =
   (Svc.Prefs.get("errorhandler.networkFailureReportTimeout") * 2) * 1000;
 
 const NON_PROLONGED_ERROR_DURATION =
   (Svc.Prefs.get("errorhandler.networkFailureReportTimeout") / 2) * 1000;
 
-Service.engineManager.clear();
-
 function setLastSync(lastSyncValue) {
   Svc.Prefs.set("lastSync", (new Date(Date.now() - lastSyncValue)).toString());
 }
 
-var engineManager = Service.engineManager;
-engineManager.register(EHTestsCommon.CatapultEngine);
-
 // This relies on Service/ErrorHandler being a singleton. Fixing this will take
 // a lot of work.
 var errorHandler = Service.errorHandler;
+let engine;
 
-function run_test() {
+add_task(async function setup() {
   initTestLogging("Trace");
 
   Log.repository.getLogger("Sync.Service").level = Log.Level.Trace;
   Log.repository.getLogger("Sync.SyncScheduler").level = Log.Level.Trace;
   Log.repository.getLogger("Sync.ErrorHandler").level = Log.Level.Trace;
 
-  run_next_test();
-}
+  Service.engineManager.clear();
+  await Service.engineManager.register(EHTestsCommon.CatapultEngine);
+  engine = Service.engineManager.get("catapult");
+});
 
-
-function clean() {
-  Service.startOver();
+async function clean() {
+  await Service.startOver();
   Status.resetSync();
   Status.resetBackoff();
   errorHandler.didReportProlongedError = false;
 }
 
 add_task(async function test_crypto_keys_login_server_maintenance_error() {
   enableValidationPrefs();
 
   Status.resetSync();
   // Test crypto/keys server maintenance errors are not reported.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   await configureIdentity({username: "broken.keys"}, server);
 
   // Force re-download of keys
   Service.collectionKeys.clear();
 
   let backoffInterval;
@@ -88,39 +85,38 @@ add_task(async function test_crypto_keys
   Svc.Obs.add("weave:ui:login:error", onUIUpdate);
 
   do_check_false(Status.enforceBackoff);
   do_check_eq(Status.service, STATUS_OK);
 
   let promiseObserved = promiseOneObserver("weave:ui:clear-error");
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
   await promiseObserved;
 
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, LOGIN_FAILED);
   do_check_eq(Status.login, SERVER_MAINTENANCE);
   do_check_false(errorHandler.didReportProlongedError);
 
   Svc.Obs.remove("weave:ui:login:error", onUIUpdate);
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_sync_prolonged_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test prolonged server maintenance errors are reported.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   const BACKOFF = 42;
-  let engine = engineManager.get("catapult");
   engine.enabled = true;
   engine.exception = {status: 503,
                       headers: {"retry-after": BACKOFF}};
 
   let promiseObserved = promiseOneObserver("weave:ui:sync:error");
 
   do_check_eq(Status.service, STATUS_OK);
 
@@ -131,92 +127,92 @@ add_task(async function test_sync_prolon
             { name: "httperror", code: 503 });
   await promiseObserved;
 
   do_check_eq(Status.service, SYNC_FAILED);
   do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
   do_check_true(errorHandler.didReportProlongedError);
 
   await promiseStopServer(server);
-  clean();
+  await clean();
 });
 
 add_task(async function test_info_collections_login_prolonged_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test info/collections prolonged server maintenance errors are reported.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   await configureIdentity({username: "broken.info"}, server);
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
   });
 
   let promiseObserved = promiseOneObserver("weave:ui:login:error");
 
   do_check_false(Status.enforceBackoff);
   do_check_eq(Status.service, STATUS_OK);
 
   setLastSync(PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
   await promiseObserved;
 
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, SYNC_FAILED);
   do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
   do_check_true(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_meta_global_login_prolonged_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test meta/global prolonged server maintenance errors are reported.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   await configureIdentity({username: "broken.meta"}, server);
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
   });
 
   let promiseObserved = promiseOneObserver("weave:ui:login:error");
 
   do_check_false(Status.enforceBackoff);
   do_check_eq(Status.service, STATUS_OK);
 
   setLastSync(PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
   await promiseObserved;
 
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, SYNC_FAILED);
   do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
   do_check_true(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_download_crypto_keys_login_prolonged_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test crypto/keys prolonged server maintenance errors are reported.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   await configureIdentity({username: "broken.keys"}, server);
   // Force re-download of keys
   Service.collectionKeys.clear();
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
@@ -225,109 +221,108 @@ add_task(async function test_download_cr
   });
 
   let promiseObserved = promiseOneObserver("weave:ui:login:error");
 
   do_check_false(Status.enforceBackoff);
   do_check_eq(Status.service, STATUS_OK);
 
   setLastSync(PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
   await promiseObserved;
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, SYNC_FAILED);
   do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
   do_check_true(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_upload_crypto_keys_login_prolonged_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test crypto/keys prolonged server maintenance errors are reported.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
 
   // Start off with an empty account, do not upload a key.
   await configureIdentity({username: "broken.keys"}, server);
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
   });
 
   let promiseObserved = promiseOneObserver("weave:ui:login:error");
 
   do_check_false(Status.enforceBackoff);
   do_check_eq(Status.service, STATUS_OK);
 
   setLastSync(PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
   await promiseObserved;
 
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, SYNC_FAILED);
   do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
   do_check_true(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_wipeServer_login_prolonged_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test that we report prolonged server maintenance errors that occur whilst
   // wiping the server.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
 
   // Start off with an empty account, do not upload a key.
   await configureIdentity({username: "broken.wipe"}, server);
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
   });
 
   let promiseObserved = promiseOneObserver("weave:ui:login:error");
 
   do_check_false(Status.enforceBackoff);
   do_check_eq(Status.service, STATUS_OK);
 
   setLastSync(PROLONGED_ERROR_DURATION);
-  Service.sync();
+  await Service.sync();
   await promiseObserved;
 
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, SYNC_FAILED);
   do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
   do_check_true(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_wipeRemote_prolonged_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test that we report prolonged server maintenance errors that occur whilst
   // wiping all remote devices.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
 
   server.registerPathHandler("/1.1/broken.wipe/storage/catapult", EHTestsCommon.service_unavailable);
   await configureIdentity({username: "broken.wipe"}, server);
   EHTestsCommon.generateAndUploadKeys();
 
-  let engine = engineManager.get("catapult");
   engine.exception = null;
   engine.enabled = true;
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
   });
@@ -345,55 +340,54 @@ add_task(async function test_wipeRemote_
 
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, SYNC_FAILED);
   do_check_eq(Status.sync, PROLONGED_SYNC_FAILURE);
   do_check_eq(Svc.Prefs.get("firstSync"), "wipeRemote");
   do_check_true(errorHandler.didReportProlongedError);
   await promiseStopServer(server);
-  clean();
+  await clean();
 });
 
 add_task(async function test_sync_syncAndReportErrors_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test server maintenance errors are reported
   // when calling syncAndReportErrors.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   const BACKOFF = 42;
-  let engine = engineManager.get("catapult");
   engine.enabled = true;
   engine.exception = {status: 503,
                       headers: {"retry-after": BACKOFF}};
 
   let promiseObserved = promiseOneObserver("weave:ui:sync:error");
 
   do_check_eq(Status.service, STATUS_OK);
 
   setLastSync(NON_PROLONGED_ERROR_DURATION);
   errorHandler.syncAndReportErrors();
   await promiseObserved;
 
   do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
   do_check_eq(Status.sync, SERVER_MAINTENANCE);
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_info_collections_login_syncAndReportErrors_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test info/collections server maintenance errors are reported
   // when calling syncAndReportErrors.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   await configureIdentity({username: "broken.info"}, server);
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
@@ -409,26 +403,26 @@ add_task(async function test_info_collec
   await promiseObserved;
 
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, LOGIN_FAILED);
   do_check_eq(Status.login, SERVER_MAINTENANCE);
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_meta_global_login_syncAndReportErrors_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test meta/global server maintenance errors are reported
   // when calling syncAndReportErrors.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   await configureIdentity({username: "broken.meta"}, server);
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
@@ -444,26 +438,26 @@ add_task(async function test_meta_global
   await promiseObserved;
 
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, LOGIN_FAILED);
   do_check_eq(Status.login, SERVER_MAINTENANCE);
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_download_crypto_keys_login_syncAndReportErrors_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test crypto/keys server maintenance errors are reported
   // when calling syncAndReportErrors.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   await configureIdentity({username: "broken.keys"}, server);
   // Force re-download of keys
   Service.collectionKeys.clear();
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
@@ -481,26 +475,26 @@ add_task(async function test_download_cr
   await promiseObserved;
 
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, LOGIN_FAILED);
   do_check_eq(Status.login, SERVER_MAINTENANCE);
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_upload_crypto_keys_login_syncAndReportErrors_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test crypto/keys server maintenance errors are reported
   // when calling syncAndReportErrors.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
 
   // Start off with an empty account, do not upload a key.
   await configureIdentity({username: "broken.keys"}, server);
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
@@ -516,26 +510,26 @@ add_task(async function test_upload_cryp
   await promiseObserved;
 
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, LOGIN_FAILED);
   do_check_eq(Status.login, SERVER_MAINTENANCE);
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_wipeServer_login_syncAndReportErrors_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test crypto/keys server maintenance errors are reported
   // when calling syncAndReportErrors.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
 
   // Start off with an empty account, do not upload a key.
   await configureIdentity({username: "broken.wipe"}, server);
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
@@ -551,31 +545,30 @@ add_task(async function test_wipeServer_
   await promiseObserved;
 
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, LOGIN_FAILED);
   do_check_eq(Status.login, SERVER_MAINTENANCE);
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_wipeRemote_syncAndReportErrors_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test that we report prolonged server maintenance errors that occur whilst
   // wiping all remote devices.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
 
   await configureIdentity({username: "broken.wipe"}, server);
   EHTestsCommon.generateAndUploadKeys();
 
-  let engine = engineManager.get("catapult");
   engine.exception = null;
   engine.enabled = true;
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
   });
@@ -592,30 +585,29 @@ add_task(async function test_wipeRemote_
 
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, SYNC_FAILED);
   do_check_eq(Status.sync, SERVER_MAINTENANCE);
   do_check_eq(Svc.Prefs.get("firstSync"), "wipeRemote");
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_sync_syncAndReportErrors_prolonged_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test prolonged server maintenance errors are
   // reported when calling syncAndReportErrors.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   const BACKOFF = 42;
-  let engine = engineManager.get("catapult");
   engine.enabled = true;
   engine.exception = {status: 503,
                       headers: {"retry-after": BACKOFF}};
 
   let promiseObserved = promiseOneObserver("weave:ui:sync:error");
 
   do_check_eq(Status.service, STATUS_OK);
 
@@ -624,26 +616,26 @@ add_task(async function test_sync_syncAn
   await promiseObserved;
 
   do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
   do_check_eq(Status.sync, SERVER_MAINTENANCE);
   // syncAndReportErrors means dontIgnoreErrors, which means
   // didReportProlongedError not touched.
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_info_collections_login_syncAndReportErrors_prolonged_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test info/collections server maintenance errors are reported
   // when calling syncAndReportErrors.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   await configureIdentity({username: "broken.info"}, server);
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
@@ -661,26 +653,26 @@ add_task(async function test_info_collec
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, LOGIN_FAILED);
   do_check_eq(Status.login, SERVER_MAINTENANCE);
   // syncAndReportErrors means dontIgnoreErrors, which means
   // didReportProlongedError not touched.
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_meta_global_login_syncAndReportErrors_prolonged_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test meta/global server maintenance errors are reported
   // when calling syncAndReportErrors.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   await configureIdentity({username: "broken.meta"}, server);
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
@@ -698,26 +690,26 @@ add_task(async function test_meta_global
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, LOGIN_FAILED);
   do_check_eq(Status.login, SERVER_MAINTENANCE);
   // syncAndReportErrors means dontIgnoreErrors, which means
   // didReportProlongedError not touched.
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_download_crypto_keys_login_syncAndReportErrors_prolonged_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test crypto/keys server maintenance errors are reported
   // when calling syncAndReportErrors.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
   await EHTestsCommon.setUp(server);
 
   await configureIdentity({username: "broken.keys"}, server);
   // Force re-download of keys
   Service.collectionKeys.clear();
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
@@ -737,26 +729,26 @@ add_task(async function test_download_cr
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, LOGIN_FAILED);
   do_check_eq(Status.login, SERVER_MAINTENANCE);
   // syncAndReportErrors means dontIgnoreErrors, which means
   // didReportProlongedError not touched.
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_upload_crypto_keys_login_syncAndReportErrors_prolonged_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test crypto/keys server maintenance errors are reported
   // when calling syncAndReportErrors.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
 
   // Start off with an empty account, do not upload a key.
   await configureIdentity({username: "broken.keys"}, server);
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
@@ -774,26 +766,26 @@ add_task(async function test_upload_cryp
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, LOGIN_FAILED);
   do_check_eq(Status.login, SERVER_MAINTENANCE);
   // syncAndReportErrors means dontIgnoreErrors, which means
   // didReportProlongedError not touched.
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_wipeServer_login_syncAndReportErrors_prolonged_server_maintenance_error() {
   enableValidationPrefs();
 
   // Test crypto/keys server maintenance errors are reported
   // when calling syncAndReportErrors.
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
 
   // Start off with an empty account, do not upload a key.
   await configureIdentity({username: "broken.wipe"}, server);
 
   let backoffInterval;
   Svc.Obs.add("weave:service:backoff:interval", function observe(subject, data) {
     Svc.Obs.remove("weave:service:backoff:interval", observe);
     backoffInterval = subject;
@@ -811,163 +803,152 @@ add_task(async function test_wipeServer_
   do_check_true(Status.enforceBackoff);
   do_check_eq(backoffInterval, 42);
   do_check_eq(Status.service, LOGIN_FAILED);
   do_check_eq(Status.login, SERVER_MAINTENANCE);
   // syncAndReportErrors means dontIgnoreErrors, which means
   // didReportProlongedError not touched.
   do_check_false(errorHandler.didReportProlongedError);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
 
 add_task(async function test_sync_engine_generic_fail() {
   enableValidationPrefs();
 
-  let server = EHTestsCommon.sync_httpd_setup();
-
-let engine = engineManager.get("catapult");
+  let server = await EHTestsCommon.sync_httpd_setup();
   engine.enabled = true;
-  engine.sync = function sync() {
+  engine.sync = async function sync() {
     Svc.Obs.notify("weave:engine:sync:error", ENGINE_UNKNOWN_FAIL, "catapult");
   };
 
   let log = Log.repository.getLogger("Sync.ErrorHandler");
   Svc.Prefs.set("log.appender.file.logOnError", true);
 
   do_check_eq(Status.engines["catapult"], undefined);
 
-  let deferred = PromiseUtils.defer();
-  // Don't wait for reset-file-log until the sync is underway.
-  // This avoids us catching a delayed notification from an earlier test.
-  Svc.Obs.add("weave:engine:sync:finish", function onEngineFinish() {
-    Svc.Obs.remove("weave:engine:sync:finish", onEngineFinish);
-
-    log.info("Adding reset-file-log observer.");
-    Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() {
-      Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog);
+  let promiseObserved = new Promise(res => {
+    Svc.Obs.add("weave:engine:sync:finish", function onEngineFinish() {
+      Svc.Obs.remove("weave:engine:sync:finish", onEngineFinish);
 
-      // Put these checks here, not after sync(), so that we aren't racing the
-      // log handler... which resets everything just a few lines below!
-      _("Status.engines: " + JSON.stringify(Status.engines));
-      do_check_eq(Status.engines["catapult"], ENGINE_UNKNOWN_FAIL);
-      do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
-
-      // Test Error log was written on SYNC_FAILED_PARTIAL.
-      let entries = logsdir.directoryEntries;
-      do_check_true(entries.hasMoreElements());
-      let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile);
-      do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName);
-
-      clean();
-
-      let syncErrors = sumHistogram("WEAVE_ENGINE_SYNC_ERRORS", { key: "catapult" });
-      do_check_true(syncErrors, 1);
-
-      server.stop(() => {
-        clean();
-        deferred.resolve();
+      log.info("Adding reset-file-log observer.");
+      Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() {
+        Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog);
+        res();
       });
     });
   });
 
   do_check_true(await EHTestsCommon.setUp(server));
   let ping = await sync_and_validate_telem(true);
   deepEqual(ping.status.service, SYNC_FAILED_PARTIAL);
   deepEqual(ping.engines.find(e => e.status).status, ENGINE_UNKNOWN_FAIL);
 
-  await deferred.promise;
+  await promiseObserved;
+
+  _("Status.engines: " + JSON.stringify(Status.engines));
+  do_check_eq(Status.engines["catapult"], ENGINE_UNKNOWN_FAIL);
+  do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
+
+  // Test Error log was written on SYNC_FAILED_PARTIAL.
+  let entries = logsdir.directoryEntries;
+  do_check_true(entries.hasMoreElements());
+  let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile);
+  do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName);
+
+  await clean();
+
+  let syncErrors = sumHistogram("WEAVE_ENGINE_SYNC_ERRORS", { key: "catapult" });
+  do_check_true(syncErrors, 1);
+
+  await clean();
+  await promiseStopServer(server);
 });
 
-add_test(function test_logs_on_sync_error_despite_shouldReportError() {
+add_task(async function test_logs_on_sync_error_despite_shouldReportError() {
   enableValidationPrefs();
 
   _("Ensure that an error is still logged when weave:service:sync:error " +
     "is notified, despite shouldReportError returning false.");
 
   let log = Log.repository.getLogger("Sync.ErrorHandler");
   Svc.Prefs.set("log.appender.file.logOnError", true);
   log.info("TESTING");
 
   // Ensure that we report no error.
   Status.login = MASTER_PASSWORD_LOCKED;
   do_check_false(errorHandler.shouldReportError());
 
-  Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() {
-    Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog);
+  let promiseObserved = promiseOneObserver("weave:service:reset-file-log");
+  Svc.Obs.notify("weave:service:sync:error", {});
+  await promiseObserved;
 
-    // Test that error log was written.
-    let entries = logsdir.directoryEntries;
-    do_check_true(entries.hasMoreElements());
-    let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile);
-    do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName);
+  // Test that error log was written.
+  let entries = logsdir.directoryEntries;
+  do_check_true(entries.hasMoreElements());
+  let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile);
+  do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName);
 
-    clean();
-    run_next_test();
-  });
-  Svc.Obs.notify("weave:service:sync:error", {});
+  await clean();
 });
 
-add_test(function test_logs_on_login_error_despite_shouldReportError() {
+add_task(async function test_logs_on_login_error_despite_shouldReportError() {
   enableValidationPrefs();
 
   _("Ensure that an error is still logged when weave:service:login:error " +
     "is notified, despite shouldReportError returning false.");
 
   let log = Log.repository.getLogger("Sync.ErrorHandler");
   Svc.Prefs.set("log.appender.file.logOnError", true);
   log.info("TESTING");
 
   // Ensure that we report no error.
   Status.login = MASTER_PASSWORD_LOCKED;
   do_check_false(errorHandler.shouldReportError());
 
-  Svc.Obs.add("weave:service:reset-file-log", function onResetFileLog() {
-    Svc.Obs.remove("weave:service:reset-file-log", onResetFileLog);
+  let promiseObserved = promiseOneObserver("weave:service:reset-file-log");
+  Svc.Obs.notify("weave:service:login:error", {});
+  await promiseObserved;
 
-    // Test that error log was written.
-    let entries = logsdir.directoryEntries;
-    do_check_true(entries.hasMoreElements());
-    let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile);
-    do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName);
+  // Test that error log was written.
+  let entries = logsdir.directoryEntries;
+  do_check_true(entries.hasMoreElements());
+  let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile);
+  do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName);
 
-    clean();
-    run_next_test();
-  });
-  Svc.Obs.notify("weave:service:login:error", {});
+  await clean();
 });
 
 // This test should be the last one since it monkeypatches the engine object
 // and we should only have one engine object throughout the file (bug 629664).
 add_task(async function test_engine_applyFailed() {
   enableValidationPrefs();
 
-  let server = EHTestsCommon.sync_httpd_setup();
+  let server = await EHTestsCommon.sync_httpd_setup();
 
-  let engine = engineManager.get("catapult");
   engine.enabled = true;
   delete engine.exception;
-  engine.sync = function sync() {
+  engine.sync = async function sync() {
     Svc.Obs.notify("weave:engine:sync:applied", {newFailed: 1}, "catapult");
   };
 
   Svc.Prefs.set("log.appender.file.logOnError", true);
 
   let promiseObserved = promiseOneObserver("weave:service:reset-file-log");
 
   do_check_eq(Status.engines["catapult"], undefined);
   do_check_true(await EHTestsCommon.setUp(server));
-  Service.sync();
+  await Service.sync();
   await promiseObserved;
 
   do_check_eq(Status.engines["catapult"], ENGINE_APPLY_FAIL);
   do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
 
   // Test Error log was written on SYNC_FAILED_PARTIAL.
   let entries = logsdir.directoryEntries;
   do_check_true(entries.hasMoreElements());
   let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile);
   do_check_true(logfile.leafName.startsWith("error-sync-"), logfile.leafName);
 
-  clean();
+  await clean();
   await promiseStopServer(server);
 });
--- a/services/sync/tests/unit/test_errorhandler_eol.js
+++ b/services/sync/tests/unit/test_errorhandler_eol.js
@@ -67,17 +67,17 @@ function do_check_hard_eol(eh, start) {
 add_task(async function test_200_hard() {
   let eh = Service.errorHandler;
   let start = Date.now();
   let server = sync_httpd_setup(handler200("hard-eol"));
   await setUp(server);
 
   let promiseObserved = promiseOneObserver("weave:eol");
 
-  Service._fetchInfo();
+  await Service._fetchInfo();
   Service.scheduler.adjustSyncInterval();   // As if we failed or succeeded in syncing.
 
   let { subject } = await promiseObserved;
   do_check_eq("hard-eol", subject.code);
   do_check_hard_eol(eh, start);
   do_check_eq(Service.scheduler.eolInterval, Service.scheduler.syncInterval);
   eh.clearServerAlerts();
   await promiseStopServer(server);
@@ -87,17 +87,17 @@ add_task(async function test_513_hard() 
   let eh = Service.errorHandler;
   let start = Date.now();
   let server = sync_httpd_setup(handler513);
   await setUp(server);
 
   let promiseObserved = promiseOneObserver("weave:eol");
 
   try {
-    Service._fetchInfo();
+    await Service._fetchInfo();
     Service.scheduler.adjustSyncInterval();   // As if we failed or succeeded in syncing.
   } catch (ex) {
     // Because fetchInfo will fail on a 513.
   }
   let { subject } = await promiseObserved;
   do_check_eq("hard-eol", subject.code);
   do_check_hard_eol(eh, start);
   do_check_eq(Service.scheduler.eolInterval, Service.scheduler.syncInterval);
@@ -109,17 +109,17 @@ add_task(async function test_513_hard() 
 add_task(async function test_200_soft() {
   let eh = Service.errorHandler;
   let start = Date.now();
   let server = sync_httpd_setup(handler200("soft-eol"));
   await setUp(server);
 
   let promiseObserved = promiseOneObserver("weave:eol");
 
-  Service._fetchInfo();
+  await Service._fetchInfo();
   Service.scheduler.adjustSyncInterval();   // As if we failed or succeeded in syncing.
   let { subject } = await promiseObserved;
   do_check_eq("soft-eol", subject.code);
   do_check_soft_eol(eh, start);
   do_check_eq(Service.scheduler.singleDeviceInterval, Service.scheduler.syncInterval);
   eh.clearServerAlerts();
 
   await promiseStopServer(server);
--- a/services/sync/tests/unit/test_errorhandler_sync_checkServerError.js
+++ b/services/sync/tests/unit/test_errorhandler_sync_checkServerError.js
@@ -17,22 +17,22 @@ var engineManager = Service.engineManage
 engineManager.clear();
 
 function CatapultEngine() {
   SyncEngine.call(this, "Catapult", Service);
 }
 CatapultEngine.prototype = {
   __proto__: SyncEngine.prototype,
   exception: null, // tests fill this in
-  _sync: function _sync() {
+  async _sync() {
     throw this.exception;
   }
 };
 
-function sync_httpd_setup() {
+async function sync_httpd_setup() {
   let collectionsHelper = track_collections_helper();
   let upd = collectionsHelper.with_updated_collection;
 
   let catapultEngine = engineManager.get("catapult");
   let engines        = {catapult: {version: catapultEngine.version,
                                    syncID:  catapultEngine.syncID}};
 
   // Track these using the collections helper, which keeps modified times
@@ -60,51 +60,55 @@ async function setUp(server) {
 async function generateAndUploadKeys(server) {
   generateNewKeys(Service.collectionKeys);
   let serverKeys = Service.collectionKeys.asWBO("crypto", "keys");
   serverKeys.encrypt(Service.identity.syncKeyBundle);
   let res = Service.resource(server.baseURI + "/1.1/johndoe/storage/crypto/keys");
   return (await serverKeys.upload(res)).success;
 }
 
+add_task(async function run_test() {
+  validate_all_future_pings();
+  await engineManager.register(CatapultEngine);
+});
 
 add_task(async function test_backoff500() {
   enableValidationPrefs();
 
   _("Test: HTTP 500 sets backoff status.");
-  let server = sync_httpd_setup();
+  let server = await sync_httpd_setup();
   await setUp(server);
 
   let engine = engineManager.get("catapult");
   engine.enabled = true;
   engine.exception = {status: 500};
 
   try {
     do_check_false(Status.enforceBackoff);
 
     // Forcibly create and upload keys here -- otherwise we don't get to the 500!
     do_check_true(await generateAndUploadKeys(server));
 
-    Service.login();
-    Service.sync();
+    await Service.login();
+    await Service.sync();
     do_check_true(Status.enforceBackoff);
     do_check_eq(Status.sync, SYNC_SUCCEEDED);
     do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
   } finally {
     Status.resetBackoff();
-    Service.startOver();
+    await Service.startOver();
   }
   await promiseStopServer(server);
 });
 
 add_task(async function test_backoff503() {
   enableValidationPrefs();
 
   _("Test: HTTP 503 with Retry-After header leads to backoff notification and sets backoff status.");
-  let server = sync_httpd_setup();
+  let server = await sync_httpd_setup();
   await setUp(server);
 
   const BACKOFF = 42;
   let engine = engineManager.get("catapult");
   engine.enabled = true;
   engine.exception = {status: 503,
                       headers: {"retry-after": BACKOFF}};
 
@@ -113,168 +117,162 @@ add_task(async function test_backoff503(
     backoffInterval = subject;
   });
 
   try {
     do_check_false(Status.enforceBackoff);
 
     do_check_true(await generateAndUploadKeys(server));
 
-    Service.login();
-    Service.sync();
+    await Service.login();
+    await Service.sync();
 
     do_check_true(Status.enforceBackoff);
     do_check_eq(backoffInterval, BACKOFF);
     do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
     do_check_eq(Status.sync, SERVER_MAINTENANCE);
   } finally {
     Status.resetBackoff();
     Status.resetSync();
-    Service.startOver();
+    await Service.startOver();
   }
   await promiseStopServer(server);
 });
 
 add_task(async function test_overQuota() {
   enableValidationPrefs();
 
   _("Test: HTTP 400 with body error code 14 means over quota.");
-  let server = sync_httpd_setup();
+  let server = await sync_httpd_setup();
   await setUp(server);
 
   let engine = engineManager.get("catapult");
   engine.enabled = true;
   engine.exception = {status: 400,
                       toString() {
                         return "14";
                       }};
 
   try {
     do_check_eq(Status.sync, SYNC_SUCCEEDED);
 
     do_check_true(await generateAndUploadKeys(server));
 
-    Service.login();
-    Service.sync();
+    await Service.login();
+    await Service.sync();
 
     do_check_eq(Status.sync, OVER_QUOTA);
     do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
   } finally {
     Status.resetSync();
-    Service.startOver();
+    await Service.startOver();
   }
   await promiseStopServer(server);
 });
 
 add_task(async function test_service_networkError() {
   enableValidationPrefs();
 
   _("Test: Connection refused error from Service.sync() leads to the right status code.");
-  let server = sync_httpd_setup();
+  let server = await sync_httpd_setup();
   await setUp(server);
   await promiseStopServer(server);
   // Provoke connection refused.
   Service.clusterURL = "http://localhost:12345/";
 
   try {
     do_check_eq(Status.sync, SYNC_SUCCEEDED);
 
     Service._loggedIn = true;
-    Service.sync();
+    await Service.sync();
 
     do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
     do_check_eq(Status.service, SYNC_FAILED);
   } finally {
     Status.resetSync();
-    Service.startOver();
+    await Service.startOver();
   }
 });
 
 add_task(async function test_service_offline() {
   enableValidationPrefs();
 
   _("Test: Wanting to sync in offline mode leads to the right status code but does not increment the ignorable error count.");
-  let server = sync_httpd_setup();
+  let server = await sync_httpd_setup();
   await setUp(server);
 
   await promiseStopServer(server);
   Services.io.offline = true;
   Services.prefs.setBoolPref("network.dns.offline-localhost", false);
 
   try {
     do_check_eq(Status.sync, SYNC_SUCCEEDED);
 
     Service._loggedIn = true;
-    Service.sync();
+    await Service.sync();
 
     do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
     do_check_eq(Status.service, SYNC_FAILED);
   } finally {
     Status.resetSync();
-    Service.startOver();
+    await Service.startOver();
   }
   Services.io.offline = false;
   Services.prefs.clearUserPref("network.dns.offline-localhost");
 });
 
 add_task(async function test_engine_networkError() {
   enableValidationPrefs();
 
   _("Test: Network related exceptions from engine.sync() lead to the right status code.");
-  let server = sync_httpd_setup();
+  let server = await sync_httpd_setup();
   await setUp(server);
 
   let engine = engineManager.get("catapult");
   engine.enabled = true;
   engine.exception = Components.Exception("NS_ERROR_UNKNOWN_HOST",
                                           Cr.NS_ERROR_UNKNOWN_HOST);
 
   try {
     do_check_eq(Status.sync, SYNC_SUCCEEDED);
 
     do_check_true(await generateAndUploadKeys(server));
 
-    Service.login();
-    Service.sync();
+    await Service.login();
+    await Service.sync();
 
     do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
     do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
   } finally {
     Status.resetSync();
-    Service.startOver();
+    await Service.startOver();
   }
   await promiseStopServer(server);
 });
 
 add_task(async function test_resource_timeout() {
   enableValidationPrefs();
 
-  let server = sync_httpd_setup();
+  let server = await sync_httpd_setup();
   await setUp(server);
 
   let engine = engineManager.get("catapult");
   engine.enabled = true;
   // Resource throws this when it encounters a timeout.
   engine.exception = Components.Exception("Aborting due to channel inactivity.",
                                           Cr.NS_ERROR_NET_TIMEOUT);
 
   try {
     do_check_eq(Status.sync, SYNC_SUCCEEDED);
 
     do_check_true(await generateAndUploadKeys(server));
 
-    Service.login();
-    Service.sync();
+    await Service.login();
+    await Service.sync();
 
     do_check_eq(Status.sync, LOGIN_FAILED_NETWORK_ERROR);
     do_check_eq(Status.service, SYNC_FAILED_PARTIAL);
   } finally {
     Status.resetSync();
-    Service.startOver();
+    await Service.startOver();
   }
   await promiseStopServer(server);
 });
-
-function run_test() {
-  validate_all_future_pings();
-  engineManager.register(CatapultEngine);
-  run_next_test();
-}
--- a/services/sync/tests/unit/test_extension_storage_engine.js
+++ b/services/sync/tests/unit/test_extension_storage_engine.js
@@ -6,36 +6,40 @@
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/engines/extension-storage.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 Cu.import("resource://gre/modules/ExtensionStorageSync.jsm");
 /* globals extensionStorageSync */
 
-Service.engineManager.register(ExtensionStorageEngine);
-const engine = Service.engineManager.get("extension-storage");
-do_get_profile();   // so we can use FxAccounts
-loadWebExtensionTestFunctions();
+let engine;
 
 function mock(options) {
   let calls = [];
   let ret = function() {
     calls.push(arguments);
     return options.returns;
   }
   Object.setPrototypeOf(ret, {
     __proto__: Function.prototype,
     get calls() {
       return calls;
     }
   });
   return ret;
 }
 
+add_task(async function setup() {
+  await Service.engineManager.register(ExtensionStorageEngine);
+  engine = Service.engineManager.get("extension-storage");
+  do_get_profile();   // so we can use FxAccounts
+  loadWebExtensionTestFunctions();
+});
+
 add_task(async function test_calling_sync_calls__sync() {
   let oldSync = ExtensionStorageEngine.prototype._sync;
   let syncMock = ExtensionStorageEngine.prototype._sync = mock({returns: true});
   try {
     // I wanted to call the main sync entry point for the entire
     // package, but that fails because it tries to sync ClientEngine
     // first, which fails.
     await engine.sync();
--- a/services/sync/tests/unit/test_extension_storage_tracker.js
+++ b/services/sync/tests/unit/test_extension_storage_tracker.js
@@ -6,20 +6,24 @@
 Cu.import("resource://services-sync/constants.js");
 Cu.import("resource://services-sync/engines.js");
 Cu.import("resource://services-sync/engines/extension-storage.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://gre/modules/ExtensionStorageSync.jsm");
 /* globals extensionStorageSync */
 
-Service.engineManager.register(ExtensionStorageEngine);
-const engine = Service.engineManager.get("extension-storage");
-do_get_profile();   // so we can use FxAccounts
-loadWebExtensionTestFunctions();
+let engine;
+
+add_task(async function setup() {
+  await Service.engineManager.register(ExtensionStorageEngine);
+  engine = Service.engineManager.get("extension-storage");
+  do_get_profile();   // so we can use FxAccounts
+  loadWebExtensionTestFunctions();
+});
 
 add_task(async function test_changing_extension_storage_changes_score() {
   const tracker = engine._tracker;
   const extension = {id: "my-extension-id"};
   Svc.Obs.notify("weave:engine:start-tracking");
   await withSyncContext(async function(context) {
     await extensionStorageSync.set(extension, {"a": "b"}, context);
   });
--- a/services/sync/tests/unit/test_form_validator.js
+++ b/services/sync/tests/unit/test_form_validator.js
@@ -49,45 +49,41 @@ function getDummyServerAndClient() {
         name: "foo3",
         fieldname: "foo3",
         value: "bar3",
       }
     ]
   };
 }
 
-add_test(function test_valid() {
+add_task(async function test_valid() {
   let { server, client } = getDummyServerAndClient();
   let validator = new FormValidator();
   let { problemData, clientRecords, records, deletedRecords } =
-      validator.compareClientWithServer(client, server);
+      await validator.compareClientWithServer(client, server);
   equal(clientRecords.length, 3);
   equal(records.length, 3)
   equal(deletedRecords.length, 0);
   deepEqual(problemData, validator.emptyProblemData());
-
-  run_next_test();
 });
 
 
-add_test(function test_formValidatorIgnoresMissingClients() {
+add_task(async function test_formValidatorIgnoresMissingClients() {
   // Since history form records are not deleted from the server, the
   // |FormValidator| shouldn't set the |missingClient| flag in |problemData|.
   let { server, client } = getDummyServerAndClient();
   client.pop();
 
   let validator = new FormValidator();
   let { problemData, clientRecords, records, deletedRecords } =
-      validator.compareClientWithServer(client, server);
+      await validator.compareClientWithServer(client, server);
 
   equal(clientRecords.length, 2);
   equal(records.length, 3);
   equal(deletedRecords.length, 0);
 
   let expected = validator.emptyProblemData();
   deepEqual(problemData, expected);
-
-  run_next_test();
 });
 
 function run_test() {
   run_next_test();
 }
--- a/services/sync/tests/unit/test_forms_store.js
+++ b/services/sync/tests/unit/test_forms_store.js
@@ -2,149 +2,150 @@
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 _("Make sure the form store follows the Store api and correctly accesses the backend form storage");
 Cu.import("resource://services-sync/engines/forms.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://gre/modules/Services.jsm");
 
-function run_test() {
+add_task(async function run_test() {
   let engine = new FormEngine(Service);
+  await engine.initialize();
   let store = engine._store;
 
-  function applyEnsureNoFailures(records) {
-    do_check_eq(store.applyIncomingBatch(records).length, 0);
+  async function applyEnsureNoFailures(records) {
+    do_check_eq((await store.applyIncomingBatch(records)).length, 0);
   }
 
   _("Remove any existing entries");
-  store.wipe();
-  if (store.getAllIDs().length) {
+  await store.wipe();
+  if ((await store.getAllIDs()).length) {
     do_throw("Shouldn't get any ids!");
   }
 
   _("Add a form entry");
-  applyEnsureNoFailures([{
+  await applyEnsureNoFailures([{
     id: Utils.makeGUID(),
     name: "name!!",
     value: "value??"
   }]);
 
   _("Should have 1 entry now");
   let id = "";
-  for (let _id in store.getAllIDs()) {
+  for (let _id in (await store.getAllIDs())) {
     if (id == "")
       id = _id;
     else
       do_throw("Should have only gotten one!");
   }
-  do_check_true(store.itemExists(id));
+  do_check_true((store.itemExists(id)));
 
   _("Should be able to find this entry as a dupe");
-  do_check_eq(engine._findDupe({name: "name!!", value: "value??"}), id);
+  do_check_eq((await engine._findDupe({name: "name!!", value: "value??"})), id);
 
-  let rec = store.createRecord(id);
+  let rec = await store.createRecord(id);
   _("Got record for id", id, rec);
   do_check_eq(rec.name, "name!!");
   do_check_eq(rec.value, "value??");
 
   _("Create a non-existent id for delete");
-  do_check_true(store.createRecord("deleted!!").deleted);
+  do_check_true((await store.createRecord("deleted!!")).deleted);
 
   _("Try updating.. doesn't do anything yet");
-  store.update({});
+  await store.update({});
 
   _("Remove all entries");
-  store.wipe();
-  if (store.getAllIDs().length) {
+  await store.wipe();
+  if ((await store.getAllIDs()).length) {
     do_throw("Shouldn't get any ids!");
   }
 
   _("Add another entry");
-  applyEnsureNoFailures([{
+  await applyEnsureNoFailures([{
     id: Utils.makeGUID(),
     name: "another",
     value: "entry"
   }]);
   id = "";
-  for (let _id in store.getAllIDs()) {
+  for (let _id in (await store.getAllIDs())) {
     if (id == "")
       id = _id;
     else
       do_throw("Should have only gotten one!");
   }
 
   _("Change the id of the new entry to something else");
-  store.changeItemID(id, "newid");
+  await store.changeItemID(id, "newid");
 
   _("Make sure it's there");
-  do_check_true(store.itemExists("newid"));
+  do_check_true((store.itemExists("newid")));
 
   _("Remove the entry");
-  store.remove({
+  await store.remove({
     id: "newid"
   });
-  if (store.getAllIDs().length) {
+  if ((await store.getAllIDs()).length) {
     do_throw("Shouldn't get any ids!");
   }
 
   _("Removing the entry again shouldn't matter");
-  store.remove({
+  await store.remove({
     id: "newid"
   });
-  if (store.getAllIDs().length) {
+  if ((await store.getAllIDs()).length) {
     do_throw("Shouldn't get any ids!");
   }
 
   _("Add another entry to delete using applyIncomingBatch");
   let toDelete = {
     id: Utils.makeGUID(),
     name: "todelete",
     value: "entry"
   };
-  applyEnsureNoFailures([toDelete]);
+  await applyEnsureNoFailures([toDelete]);
   id = "";
-  for (let _id in store.getAllIDs()) {
+  for (let _id in (await store.getAllIDs())) {
     if (id == "")
       id = _id;
     else
       do_throw("Should have only gotten one!");
   }
-  do_check_true(store.itemExists(id));
+  do_check_true((store.itemExists(id)));
   // mark entry as deleted
   toDelete.id = id;
   toDelete.deleted = true;
-  applyEnsureNoFailures([toDelete]);
-  if (store.getAllIDs().length) {
+  await applyEnsureNoFailures([toDelete]);
+  if ((await store.getAllIDs()).length) {
     do_throw("Shouldn't get any ids!");
   }
 
   _("Add an entry to wipe");
-  applyEnsureNoFailures([{
+  await applyEnsureNoFailures([{
     id: Utils.makeGUID(),
     name: "towipe",
     value: "entry"
   }]);
 
-  store.wipe();
+  await store.wipe();
 
-  if (store.getAllIDs().length) {
+  if ((await store.getAllIDs()).length) {
     do_throw("Shouldn't get any ids!");
   }
 
   _("Ensure we work if formfill is disabled.");
   Services.prefs.setBoolPref("browser.formfill.enable", false);
   try {
     // a search
-    if (store.getAllIDs().length) {
+    if ((await store.getAllIDs()).length) {
       do_throw("Shouldn't get any ids!");
     }
     // an update.
-    applyEnsureNoFailures([{
+    await applyEnsureNoFailures([{
       id: Utils.makeGUID(),
       name: "some",
       value: "entry"
     }]);
   } finally {
     Services.prefs.clearUserPref("browser.formfill.enable");
-    store.wipe();
+    await store.wipe();
   }
-}
+});
--- a/services/sync/tests/unit/test_forms_tracker.js
+++ b/services/sync/tests/unit/test_forms_tracker.js
@@ -1,72 +1,73 @@
 /* Any copyright is dedicated to the Public Domain.
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 Cu.import("resource://gre/modules/Log.jsm");
 Cu.import("resource://services-sync/engines/forms.js");
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 
-function run_test() {
+add_task(async function run_test() {
   _("Verify we've got an empty tracker to work with.");
   let engine = new FormEngine(Service);
+  await engine.initialize();
   let tracker = engine._tracker;
   // Don't do asynchronous writes.
   tracker.persistChangedIDs = false;
 
   do_check_empty(tracker.changedIDs);
   Log.repository.rootLogger.addAppender(new Log.DumpAppender());
 
-  function addEntry(name, value) {
-    engine._store.create({name, value});
+  async function addEntry(name, value) {
+    await engine._store.create({name, value});
   }
-  function removeEntry(name, value) {
-    let guid = engine._findDupe({name, value});
-    engine._store.remove({id: guid});
+  async function removeEntry(name, value) {
+    let guid = await engine._findDupe({name, value});
+    await engine._store.remove({id: guid});
   }
 
   try {
     _("Create an entry. Won't show because we haven't started tracking yet");
-    addEntry("name", "John Doe");
+    await addEntry("name", "John Doe");
     do_check_empty(tracker.changedIDs);
 
     _("Tell the tracker to start tracking changes.");
     Svc.Obs.notify("weave:engine:start-tracking");
-    removeEntry("name", "John Doe");
-    addEntry("email", "john@doe.com");
+    await removeEntry("name", "John Doe");
+    await addEntry("email", "john@doe.com");
     do_check_attribute_count(tracker.changedIDs, 2);
 
     _("Notifying twice won't do any harm.");
     Svc.Obs.notify("weave:engine:start-tracking");
-    addEntry("address", "Memory Lane");
+    await addEntry("address", "Memory Lane");
     do_check_attribute_count(tracker.changedIDs, 3);
 
 
     _("Check that ignoreAll is respected");
     tracker.clearChangedIDs();
     tracker.score = 0;
     tracker.ignoreAll = true;
-    addEntry("username", "johndoe123");
-    addEntry("favoritecolor", "green");
-    removeEntry("name", "John Doe");
+    await addEntry("username", "johndoe123");
+    await addEntry("favoritecolor", "green");
+    await removeEntry("name", "John Doe");
     tracker.ignoreAll = false;
     do_check_empty(tracker.changedIDs);
     equal(tracker.score, 0);
 
     _("Let's stop tracking again.");
     tracker.clearChangedIDs();
     Svc.Obs.notify("weave:engine:stop-tracking");
-    removeEntry("address", "Memory Lane");
+    await removeEntry("address", "Memory Lane");
     do_check_empty(tracker.changedIDs);
 
     _("Notifying twice won't do any harm.");
     Svc.Obs.notify("weave:engine:stop-tracking");
-    removeEntry("email", "john@doe.com");
+    await removeEntry("email", "john@doe.com");
     do_check_empty(tracker.changedIDs);
 
 
 
   } finally {
     _("Clean up.");
-    engine._store.wipe();
+    await engine._store.wipe();
   }
-}
+});
--- a/services/sync/tests/unit/test_fxa_node_reassignment.js
+++ b/services/sync/tests/unit/test_fxa_node_reassignment.js
@@ -13,42 +13,40 @@ Cu.import("resource://services-sync/cons
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/status.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://testing-common/services/sync/rotaryengine.js");
 Cu.import("resource://services-sync/browserid_identity.js");
 Cu.import("resource://testing-common/services/sync/utils.js");
 Cu.import("resource://gre/modules/PromiseUtils.jsm");
 
-// Disables all built-in engines. Important for avoiding errors thrown by the
-// add-ons engine.
-Service.engineManager.clear();
-
-function run_test() {
+add_task(async function setup() {
   Log.repository.getLogger("Sync.AsyncResource").level = Log.Level.Trace;
   Log.repository.getLogger("Sync.ErrorHandler").level  = Log.Level.Trace;
   Log.repository.getLogger("Sync.Resource").level      = Log.Level.Trace;
   Log.repository.getLogger("Sync.RESTRequest").level   = Log.Level.Trace;
   Log.repository.getLogger("Sync.Service").level       = Log.Level.Trace;
   Log.repository.getLogger("Sync.SyncScheduler").level = Log.Level.Trace;
   initTestLogging();
 
+  // Disables all built-in engines. Important for avoiding errors thrown by the
+  // add-ons engine.
+  Service.engineManager.clear();
+
   // Setup the FxA identity manager and cluster manager.
   Status.__authManager = Service.identity = new BrowserIDManager();
   Service._clusterManager = Service.identity.createClusterManager(Service);
 
   // None of the failures in this file should result in a UI error.
   function onUIError() {
     do_throw("Errors should not be presented in the UI.");
   }
   Svc.Obs.add("weave:ui:login:error", onUIError);
   Svc.Obs.add("weave:ui:sync:error", onUIError);
-
-  run_next_test();
-}
+});
 
 
 // API-compatible with SyncServer handler. Bind `handler` to something to use
 // as a ServerCollection handler.
 function handleReassign(handler, req, resp) {
   resp.setStatusLine(req.httpVersion, 401, "Node reassignment");
   resp.setHeader("Content-Type", "application/json");
   let reassignBody = JSON.stringify({error: "401inator in place"});
@@ -117,17 +115,17 @@ function getReassigned() {
  * to ensure that a node request was made.
  * Runs `between` between the two. This can be used to undo deliberate failure
  * setup, detach observers, etc.
  */
 async function syncAndExpectNodeReassignment(server, firstNotification, between,
                                              secondNotification, url) {
   _("Starting syncAndExpectNodeReassignment\n");
   let deferred = PromiseUtils.defer();
-  function onwards() {
+  async function onwards() {
     let numTokenRequestsBefore;
     function onFirstSync() {
       _("First sync completed.");
       Svc.Obs.remove(firstNotification, onFirstSync);
       Svc.Obs.add(secondNotification, onSecondSync);
 
       do_check_eq(Service.clusterURL, "");
 
@@ -142,23 +140,24 @@ async function syncAndExpectNodeReassign
       Svc.Obs.remove(secondNotification, onSecondSync);
       Service.scheduler.clearSyncTriggers();
 
       // Make absolutely sure that any event listeners are done with their work
       // before we proceed.
       waitForZeroTimer(function() {
         _("Second sync nextTick.");
         do_check_eq(numTokenRequests, numTokenRequestsBefore + 1, "fetched a new token");
-        Service.startOver();
-        server.stop(deferred.resolve);
+        Service.startOver().then(() => {
+          server.stop(deferred.resolve);
+        });
       });
     }
 
     Svc.Obs.add(firstNotification, onFirstSync);
-    Service.sync();
+    await Service.sync();
   }
 
   // Make sure that we really do get a 401 (but we can only do that if we are
   // already logged in, as the login process is what sets up the URLs)
   if (Service.isLoggedIn) {
     _("Making request to " + url + " which should 401");
     let request = new RESTRequest(url);
     request.get(function() {
@@ -191,47 +190,47 @@ add_task(async function test_single_toke
   // we got from the token (and as above, we are also checking we don't grab
   // a new token). If the test actually attempts to connect to this URL
   // it will crash.
   Service.clusterURL = "http://example.com/";
 
   let server = await prepareServer(afterTokenFetch);
 
   do_check_false(Service.isLoggedIn, "not already logged in");
-  Service.sync();
+  await Service.sync();
   do_check_eq(Status.sync, SYNC_SUCCEEDED, "sync succeeded");
   do_check_eq(numTokenFetches, 0, "didn't fetch a new token");
   // A bit hacky, but given we know how prepareServer works we can deduce
   // that clusterURL we expect.
   let expectedClusterURL = server.baseURI + "1.1/johndoe/";
   do_check_eq(Service.clusterURL, expectedClusterURL);
-  Service.startOver();
+  await Service.startOver();
   await promiseStopServer(server);
 });
 
 add_task(async function test_momentary_401_engine() {
   enableValidationPrefs();
 
   _("Test a failure for engine URLs that's resolved by reassignment.");
   let server = await prepareServer();
   let john   = server.user("johndoe");
 
   _("Enabling the Rotary engine.");
-  let { engine, tracker } = registerRotaryEngine();
+  let { engine, tracker } = await registerRotaryEngine();
 
   // We need the server to be correctly set up prior to experimenting. Do this
   // through a sync.
   let global = {syncID: Service.syncID,
                 storageVersion: STORAGE_VERSION,
                 rotary: {version: engine.version,
                          syncID:  engine.syncID}}
   john.createCollection("meta").insert("global", global);
 
   _("First sync to prepare server contents.");
-  Service.sync();
+  await Service.sync();
 
   _("Setting up Rotary collection to 401.");
   let rotary = john.createCollection("rotary");
   let oldHandler = rotary.collectionHandler;
   rotary.collectionHandler = handleReassign.bind(this, undefined);
 
   // We want to verify that the clusterURL pref has been cleared after a 401
   // inside a sync. Flag the Rotary engine to need syncing.
@@ -265,17 +264,17 @@ add_task(async function test_momentary_4
 // This test ends up being a failing info fetch *after we're already logged in*.
 add_task(async function test_momentary_401_info_collections_loggedin() {
   enableValidationPrefs();
 
   _("Test a failure for info/collections after login that's resolved by reassignment.");
   let server = await prepareServer();
 
   _("First sync to prepare server contents.");
-  Service.sync();
+  await Service.sync();
 
   _("Arrange for info/collections to return a 401.");
   let oldHandler = server.toplevelHandlers.info;
   server.toplevelHandlers.info = handleReassign;
 
   function undo() {
     _("Undoing test changes.");
     server.toplevelHandlers.info = oldHandler;
@@ -312,35 +311,35 @@ add_task(async function test_momentary_4
 
   // Return a 401 for the next /info request - it will be reset immediately
   // after a new token is fetched.
   oldHandler = server.toplevelHandlers.info
   server.toplevelHandlers.info = handleReassign;
 
   do_check_false(Service.isLoggedIn, "not already logged in");
 
-  Service.sync();
+  await Service.sync();
   do_check_eq(Status.sync, SYNC_SUCCEEDED, "sync succeeded");
   // sync was successful - check we grabbed a new token.
   do_check_true(sawTokenFetch, "a new token was fetched by this test.")
   // and we are done.
-  Service.startOver();
+  await Service.startOver();
   await promiseStopServer(server);
 });
 
 // This test ends up being a failing meta/global fetch *after we're already logged in*.
 add_task(async function test_momentary_401_storage_loggedin() {
   enableValidationPrefs();
 
   _("Test a failure for any storage URL after login that's resolved by" +
     "reassignment.");
   let server = await prepareServer();
 
   _("First sync to prepare server contents.");
-  Service.sync();
+  await Service.sync();
 
   _("Arrange for meta/global to return a 401.");
   let oldHandler = server.toplevelHandlers.storage;
   server.toplevelHandlers.storage = handleReassign;
 
   function undo() {
     _("Undoing test changes.");
     server.toplevelHandlers.storage = oldHandler;
--- a/services/sync/tests/unit/test_history_store.js
+++ b/services/sync/tests/unit/test_history_store.js
@@ -28,153 +28,149 @@ function queryPlaces(uri, options) {
 function queryHistoryVisits(uri) {
   let options = PlacesUtils.history.getNewQueryOptions();
   options.queryType = Ci.nsINavHistoryQueryOptions.QUERY_TYPE_HISTORY;
   options.resultType = Ci.nsINavHistoryQueryOptions.RESULTS_AS_VISIT;
   options.sortingMode = Ci.nsINavHistoryQueryOptions.SORT_BY_DATE_ASCENDING;
   return queryPlaces(uri, options);
 }
 
-function onNextVisit(callback) {
-  PlacesUtils.history.addObserver({
-    onBeginUpdateBatch: function onBeginUpdateBatch() {},
-    onEndUpdateBatch: function onEndUpdateBatch() {},
-    onPageChanged: function onPageChanged() {},
-    onTitleChanged: function onTitleChanged() {
-    },
-    onVisit: function onVisit() {
-      PlacesUtils.history.removeObserver(this);
-      Utils.nextTick(callback);
-    },
-    onDeleteVisits: function onDeleteVisits() {},
-    onPageExpired: function onPageExpired() {},
-    onDeleteURI: function onDeleteURI() {},
-    onClearHistory: function onClearHistory() {},
-    QueryInterface: XPCOMUtils.generateQI([
-      Ci.nsINavHistoryObserver,
-      Ci.nsINavHistoryObserver_MOZILLA_1_9_1_ADDITIONS,
-      Ci.nsISupportsWeakReference
-    ])
-  }, true);
+function promiseOnVisitObserved() {
+  return new Promise(res => {
+    PlacesUtils.history.addObserver({
+      onBeginUpdateBatch: function onBeginUpdateBatch() {},
+      onEndUpdateBatch: function onEndUpdateBatch() {},
+      onPageChanged: function onPageChanged() {},
+      onTitleChanged: function onTitleChanged() {
+      },
+      onVisit: function onVisit() {
+        PlacesUtils.history.removeObserver(this);
+        res();
+      },
+      onDeleteVisits: function onDeleteVisits() {},
+      onPageExpired: function onPageExpired() {},
+      onDeleteURI: function onDeleteURI() {},
+      onClearHistory: function onClearHistory() {},
+      QueryInterface: XPCOMUtils.generateQI([
+        Ci.nsINavHistoryObserver,
+        Ci.nsINavHistoryObserver_MOZILLA_1_9_1_ADDITIONS,
+        Ci.nsISupportsWeakReference
+      ])
+    }, true);
+  });
 }
 
-// Ensure exceptions from inside callbacks leads to test failures while
-// we still clean up properly.
-function ensureThrows(func) {
-  return function() {
-    try {
-      func.apply(this, arguments);
-    } catch (ex) {
-      PlacesTestUtils.clearHistory();
-      do_throw(ex);
-    }
-  };
-}
-
-var store = new HistoryEngine(Service)._store;
-function applyEnsureNoFailures(records) {
-  do_check_eq(store.applyIncomingBatch(records).length, 0);
+var engine = new HistoryEngine(Service);
+Async.promiseSpinningly(engine.initialize());
+var store = engine._store;
+async function applyEnsureNoFailures(records) {
+  do_check_eq((await store.applyIncomingBatch(records)).length, 0);
 }
 
 var fxuri, fxguid, tburi, tbguid;
 
 function run_test() {
   initTestLogging("Trace");
   run_next_test();
 }
 
-add_test(function test_store() {
+add_task(async function test_store() {
   _("Verify that we've got an empty store to work with.");
-  do_check_empty(store.getAllIDs());
+  do_check_empty((await store.getAllIDs()));
 
   _("Let's create an entry in the database.");
   fxuri = Utils.makeURI("http://getfirefox.com/");
 
-  PlacesTestUtils.addVisits({ uri: fxuri, title: "Get Firefox!",
-                              visitDate: TIMESTAMP1 })
-                 .then(() => {
-    _("Verify that the entry exists.");
-    let ids = Object.keys(store.getAllIDs());
-    do_check_eq(ids.length, 1);
-    fxguid = ids[0];
-    do_check_true(store.itemExists(fxguid));
+  await PlacesTestUtils.addVisits({ uri: fxuri, title: "Get Firefox!",
+                                  visitDate: TIMESTAMP1 });
+  _("Verify that the entry exists.");
+  let ids = Object.keys((await store.getAllIDs()));
+  do_check_eq(ids.length, 1);
+  fxguid = ids[0];
+  do_check_true((await store.itemExists(fxguid)));
 
-    _("If we query a non-existent record, it's marked as deleted.");
-    let record = store.createRecord("non-existent");
-    do_check_true(record.deleted);
+  _("If we query a non-existent record, it's marked as deleted.");
+  let record = await store.createRecord("non-existent");
+  do_check_true(record.deleted);
+
+  _("Verify createRecord() returns a complete record.");
+  record = await store.createRecord(fxguid);
+  do_check_eq(record.histUri, fxuri.spec);
+  do_check_eq(record.title, "Get Firefox!");
+  do_check_eq(record.visits.length, 1);
+  do_check_eq(record.visits[0].date, TIMESTAMP1);
+  do_check_eq(record.visits[0].type, Ci.nsINavHistoryService.TRANSITION_LINK);
 
-    _("Verify createRecord() returns a complete record.");
-    record = store.createRecord(fxguid);
-    do_check_eq(record.histUri, fxuri.spec);
-    do_check_eq(record.title, "Get Firefox!");
-    do_check_eq(record.visits.length, 1);
-    do_check_eq(record.visits[0].date, TIMESTAMP1);
-    do_check_eq(record.visits[0].type, Ci.nsINavHistoryService.TRANSITION_LINK);
-
-    _("Let's modify the record and have the store update the database.");
-    let secondvisit = {date: TIMESTAMP2,
-                       type: Ci.nsINavHistoryService.TRANSITION_TYPED};
-    onNextVisit(ensureThrows(function() {
-      let queryres = queryHistoryVisits(fxuri);
-      do_check_eq(queryres.length, 2);
-      do_check_eq(queryres[0].time, TIMESTAMP1);
-      do_check_eq(queryres[0].title, "Hol Dir Firefox!");
-      do_check_eq(queryres[1].time, TIMESTAMP2);
-      do_check_eq(queryres[1].title, "Hol Dir Firefox!");
-      run_next_test();
-    }));
-    applyEnsureNoFailures([
-      {id: fxguid,
-       histUri: record.histUri,
-       title: "Hol Dir Firefox!",
-       visits: [record.visits[0], secondvisit]}
-    ]);
-  });
+  _("Let's modify the record and have the store update the database.");
+  let secondvisit = {date: TIMESTAMP2,
+                     type: Ci.nsINavHistoryService.TRANSITION_TYPED};
+  let onVisitObserved = promiseOnVisitObserved();
+  await applyEnsureNoFailures([
+    {id: fxguid,
+     histUri: record.histUri,
+     title: "Hol Dir Firefox!",
+     visits: [record.visits[0], secondvisit]}
+  ]);
+  await onVisitObserved;
+  try {
+    let queryres = queryHistoryVisits(fxuri);
+    do_check_eq(queryres.length, 2);
+    do_check_eq(queryres[0].time, TIMESTAMP1);
+    do_check_eq(queryres[0].title, "Hol Dir Firefox!");
+    do_check_eq(queryres[1].time, TIMESTAMP2);
+    do_check_eq(queryres[1].title, "Hol Dir Firefox!");
+  } catch (ex) {
+    PlacesTestUtils.clearHistory();
+    do_throw(ex);
+  }
 });
 
-add_test(function test_store_create() {
+add_task(async function test_store_create() {
   _("Create a brand new record through the store.");
   tbguid = Utils.makeGUID();
   tburi = Utils.makeURI("http://getthunderbird.com");
-  onNextVisit(ensureThrows(function() {
-    do_check_attribute_count(store.getAllIDs(), 2);
-    let queryres = queryHistoryVisits(tburi);
-    do_check_eq(queryres.length, 1);
-    do_check_eq(queryres[0].time, TIMESTAMP3);
-    do_check_eq(queryres[0].title, "The bird is the word!");
-    run_next_test();
-  }));
-  applyEnsureNoFailures([
+  let onVisitObserved = promiseOnVisitObserved();
+  await applyEnsureNoFailures([
     {id: tbguid,
      histUri: tburi.spec,
      title: "The bird is the word!",
      visits: [{date: TIMESTAMP3,
                type: Ci.nsINavHistoryService.TRANSITION_TYPED}]}
   ]);
+  await onVisitObserved;
+  try {
+    do_check_attribute_count(Async.promiseSpinningly(store.getAllIDs()), 2);
+    let queryres = queryHistoryVisits(tburi);
+    do_check_eq(queryres.length, 1);
+    do_check_eq(queryres[0].time, TIMESTAMP3);
+    do_check_eq(queryres[0].title, "The bird is the word!");
+  } catch (ex) {
+    PlacesTestUtils.clearHistory();
+    do_throw(ex);
+  }
 });
 
-add_test(function test_null_title() {
+add_task(async function test_null_title() {
   _("Make sure we handle a null title gracefully (it can happen in some cases, e.g. for resource:// URLs)");
   let resguid = Utils.makeGUID();
   let resuri = Utils.makeURI("unknown://title");
-  applyEnsureNoFailures([
+  await applyEnsureNoFailures([
     {id: resguid,
      histUri: resuri.spec,
      title: null,
      visits: [{date: TIMESTAMP3,
                type: Ci.nsINavHistoryService.TRANSITION_TYPED}]}
   ]);
-  do_check_attribute_count(store.getAllIDs(), 3);
+  do_check_attribute_count((await store.getAllIDs()), 3);
   let queryres = queryHistoryVisits(resuri);
   do_check_eq(queryres.length, 1);
   do_check_eq(queryres[0].time, TIMESTAMP3);
-  run_next_test();
 });
 
-add_test(function test_invalid_records() {
+add_task(async function test_invalid_records() {
   _("Make sure we handle invalid URLs in places databases gracefully.");
   let connection = PlacesUtils.history
                               .QueryInterface(Ci.nsPIPlacesDatabase)
                               .DBConnection;
   let stmt = connection.createAsyncStatement(
     "INSERT INTO moz_places "
   + "(url, url_hash, title, rev_host, visit_count, last_visit_date) "
   + "VALUES ('invalid-uri', hash('invalid-uri'), 'Invalid URI', '.', 1, " + TIMESTAMP3 + ")"
@@ -185,45 +181,45 @@ add_test(function test_invalid_records()
   stmt = connection.createAsyncStatement(
     "INSERT INTO moz_historyvisits "
   + "(place_id, visit_date, visit_type, session) "
   + "VALUES ((SELECT id FROM moz_places WHERE url_hash = hash('invalid-uri') AND url = 'invalid-uri'), "
   + TIMESTAMP3 + ", " + Ci.nsINavHistoryService.TRANSITION_TYPED + ", 1)"
   );
   Async.querySpinningly(stmt);
   stmt.finalize();
-  do_check_attribute_count(store.getAllIDs(), 4);
+  do_check_attribute_count((await store.getAllIDs()), 4);
 
   _("Make sure we report records with invalid URIs.");
   let invalid_uri_guid = Utils.makeGUID();
-  let failed = store.applyIncomingBatch([{
+  let failed = await store.applyIncomingBatch([{
     id: invalid_uri_guid,
     histUri: ":::::::::::::::",
     title: "Doesn't have a valid URI",
     visits: [{date: TIMESTAMP3,
               type: Ci.nsINavHistoryService.TRANSITION_EMBED}]}
   ]);
   do_check_eq(failed.length, 1);
   do_check_eq(failed[0], invalid_uri_guid);
 
   _("Make sure we handle records with invalid GUIDs gracefully (ignore).");
-  applyEnsureNoFailures([
+  await applyEnsureNoFailures([
     {id: "invalid",
      histUri: "http://invalid.guid/",
      title: "Doesn't have a valid GUID",
      visits: [{date: TIMESTAMP3,
                type: Ci.nsINavHistoryService.TRANSITION_EMBED}]}
   ]);
 
   _("Make sure we handle records with invalid visit codes or visit dates, gracefully ignoring those visits.");
   let no_date_visit_guid = Utils.makeGUID();
   let no_type_visit_guid = Utils.makeGUID();
   let invalid_type_visit_guid = Utils.makeGUID();
   let non_integer_visit_guid = Utils.makeGUID();
-  failed = store.applyIncomingBatch([
+  failed = await store.applyIncomingBatch([
     {id: no_date_visit_guid,
      histUri: "http://no.date.visit/",
      title: "Visit has no date",
      visits: [{type: Ci.nsINavHistoryService.TRANSITION_EMBED}]},
     {id: no_type_visit_guid,
      histUri: "http://no.type.visit/",
      title: "Visit has no type",
      visits: [{date: TIMESTAMP3}]},
@@ -236,49 +232,46 @@ add_test(function test_invalid_records()
      histUri: "http://non.integer.visit/",
      title: "Visit has non-integer date",
      visits: [{date: 1234.567,
                type: Ci.nsINavHistoryService.TRANSITION_EMBED}]}
   ]);
   do_check_eq(failed.length, 0);
 
   _("Make sure we handle records with javascript: URLs gracefully.");
-  applyEnsureNoFailures([
+  await applyEnsureNoFailures([
     {id: Utils.makeGUID(),
      histUri: "javascript:''",
      title: "javascript:''",
      visits: [{date: TIMESTAMP3,
                type: Ci.nsINavHistoryService.TRANSITION_EMBED}]}
   ]);
 
   _("Make sure we handle records without any visits gracefully.");
-  applyEnsureNoFailures([
+  await applyEnsureNoFailures([
     {id: Utils.makeGUID(),
      histUri: "http://getfirebug.com",
      title: "Get Firebug!",
      visits: []}
   ]);
-
-  run_next_test();
 });
 
-add_test(function test_remove() {
+add_task(async function test_remove() {
   _("Remove an existent record and a non-existent from the store.");
-  applyEnsureNoFailures([{id: fxguid, deleted: true},
+  await applyEnsureNoFailures([{id: fxguid, deleted: true},
                          {id: Utils.makeGUID(), deleted: true}]);
-  do_check_false(store.itemExists(fxguid));
+  do_check_false((await store.itemExists(fxguid)));
   let queryres = queryHistoryVisits(fxuri);
   do_check_eq(queryres.length, 0);
 
   _("Make sure wipe works.");
-  store.wipe();
-  do_check_empty(store.getAllIDs());
+  await store.wipe();
+  do_check_empty((await store.getAllIDs()));
   queryres = queryHistoryVisits(fxuri);
   do_check_eq(queryres.length, 0);
   queryres = queryHistoryVisits(tburi);
   do_check_eq(queryres.length, 0);
-  run_next_test();
 });