Merge services-central with mozilla-central
authorPhilipp von Weitershausen <philipp@weitershausen.de>
Wed, 21 Sep 2011 12:55:42 -0700
changeset 77264 50b36274e6896899e9b3e1de6b4e0ad75ed60a37
parent 77263 57f8b1efe71145d89b918ed1eb34ced1d5656e02 (diff)
parent 77251 4ea7e20806667913ab2fc941b4d0605c53c263c2 (current diff)
child 77296 4495e1f795c218d562205f3364ac1f84aecee220
child 77750 ff0814fd7bbcbd541bb67d261b411082f7cd17f2
push id3
push userfelipc@gmail.com
push dateFri, 30 Sep 2011 20:09:13 +0000
milestone9.0a1
Merge services-central with mozilla-central
--- a/browser/base/content/syncSetup.js
+++ b/browser/base/content/syncSetup.js
@@ -595,16 +595,19 @@ var gSyncSetup = {
   },
 
   startEasySetup: function () {
     // Don't do anything if we have a client already (e.g. we went to
     // Sync Options and just came back).
     if (this._jpakeclient)
       return;
 
+    // When onAbort is called, Weave may already be gone
+    const JPAKE_ERROR_USERABORT = Weave.JPAKE_ERROR_USERABORT;
+
     let self = this;
     this._jpakeclient = new Weave.JPAKEClient({
       displayPIN: function displayPIN(pin) {
         document.getElementById("easySetupPIN1").value = pin.slice(0, 4);
         document.getElementById("easySetupPIN2").value = pin.slice(4, 8);
         document.getElementById("easySetupPIN3").value = pin.slice(8);
       },
 
@@ -614,18 +617,18 @@ var gSyncSetup = {
         Weave.Service.passphrase = credentials.synckey;
         Weave.Service.serverURL = credentials.serverURL;
         self.wizard.pageIndex = SETUP_SUCCESS_PAGE;
       },
 
       onAbort: function onAbort(error) {
         delete self._jpakeclient;
 
-        // No error means manual abort, e.g. wizard is aborted. Ignore.
-        if (!error)
+        // Ignore if wizard is aborted.
+        if (error == JPAKE_ERROR_USERABORT)
           return;
 
         // Automatically go to manual setup if we couldn't acquire a channel.
         if (error == Weave.JPAKE_ERROR_CHANNEL) {
           self.wizard.pageIndex = EXISTING_ACCOUNT_LOGIN_PAGE;
           return;
         }
 
--- a/services/sync/modules/engines.js
+++ b/services/sync/modules/engines.js
@@ -176,19 +176,34 @@ Tracker.prototype = {
     this._log.trace("Clearing changed ID list");
     this.changedIDs = {};
     this.saveChangedIDs();
   }
 };
 
 
 
-/*
- * Data Stores
- * These can wrap, serialize items and apply commands
+/**
+ * The Store serves as the interface between Sync and stored data.
+ *
+ * The name "store" is slightly a misnomer because it doesn't actually "store"
+ * anything. Instead, it serves as a gateway to something that actually does
+ * the "storing."
+ *
+ * The store is responsible for record management inside an engine. It tells
+ * Sync what items are available for Sync, converts items to and from Sync's
+ * record format, and applies records from Sync into changes on the underlying
+ * store.
+ *
+ * Store implementations require a number of functions to be implemented. These
+ * are all documented below.
+ *
+ * For stores that deal with many records or which have expensive store access
+ * routines, it is highly recommended to implement a custom applyIncomingBatch
+ * and/or applyIncoming function on top of the basic APIs.
  */
 
 function Store(name) {
   name = name || "Unnamed";
   this.name = name.toLowerCase();
 
   this._log = Log4Moz.repository.getLogger("Sync.Store." + name);
   let level = Svc.Prefs.get("log.logger.engine." + this.name, "Debug");
@@ -201,16 +216,31 @@ function Store(name) {
 Store.prototype = {
 
   _sleep: function _sleep(delay) {
     let cb = Async.makeSyncCallback();
     this._timer.initWithCallback(cb, delay, Ci.nsITimer.TYPE_ONE_SHOT);
     Async.waitForSyncCallback(cb);
   },
 
+  /**
+   * Apply multiple incoming records against the store.
+   *
+   * This is called with a set of incoming records to process. The function
+   * should look at each record, reconcile with the current local state, and
+   * make the local changes required to bring its state in alignment with the
+   * record.
+   *
+   * The default implementation simply iterates over all records and calls
+   * applyIncoming(). Store implementations may overwrite this function
+   * if desired.
+   *
+   * @param  records Array of records to apply
+   * @return Array of record IDs which did not apply cleanly
+   */
   applyIncomingBatch: function applyIncomingBatch(records) {
     let failed = [];
     for each (let record in records) {
       try {
         this.applyIncoming(record);
       } catch (ex if (ex.code == Engine.prototype.eEngineAbortApplyIncoming)) {
         // This kind of exception should have a 'cause' attribute, which is an
         // originating exception.
@@ -220,55 +250,143 @@ Store.prototype = {
         this._log.warn("Failed to apply incoming record " + record.id);
         this._log.warn("Encountered exception: " + Utils.exceptionStr(ex));
         failed.push(record.id);
       }
     };
     return failed;
   },
 
+  /**
+   * Apply a single record against the store.
+   *
+   * This takes a single record and makes the local changes required so the
+   * local state matches what's in the record.
+   *
+   * The default implementation calls one of remove(), create(), or update()
+   * depending on the state obtained from the store itself. Store
+   * implementations may overwrite this function if desired.
+   *
+   * @param record
+   *        Record to apply
+   */
   applyIncoming: function Store_applyIncoming(record) {
     if (record.deleted)
       this.remove(record);
     else if (!this.itemExists(record.id))
       this.create(record);
     else
       this.update(record);
   },
 
   // override these in derived objects
 
+  /**
+   * Create an item in the store from a record.
+   *
+   * This is called by the default implementation of applyIncoming(). If using
+   * applyIncomingBatch(), this won't be called unless your store calls it.
+   *
+   * @param record
+   *        The store record to create an item from
+   */
   create: function Store_create(record) {
     throw "override create in a subclass";
   },
 
+  /**
+   * Remove an item in the store from a record.
+   *
+   * This is called by the default implementation of applyIncoming(). If using
+   * applyIncomingBatch(), this won't be called unless your store calls it.
+   *
+   * @param record
+   *        The store record to delete an item from
+   */
   remove: function Store_remove(record) {
     throw "override remove in a subclass";
   },
 
+  /**
+   * Update an item from a record.
+   *
+   * This is called by the default implementation of applyIncoming(). If using
+   * applyIncomingBatch(), this won't be called unless your store calls it.
+   *
+   * @param record
+   *        The record to use to update an item from
+   */
   update: function Store_update(record) {
     throw "override update in a subclass";
   },
 
+  /**
+   * Determine whether a record with the specified ID exists.
+   *
+   * Takes a string record ID and returns a booleans saying whether the record
+   * exists.
+   *
+   * @param  id
+   *         string record ID
+   * @return boolean indicating whether record exists locally
+   */
   itemExists: function Store_itemExists(id) {
     throw "override itemExists in a subclass";
   },
 
+  /**
+   * Create a record from the specified ID.
+   *
+   * If the ID is known, the record should be populated with metadata from
+   * the store. If the ID is not known, the record should be created with the
+   * delete field set to true.
+   *
+   * @param  id
+   *         string record ID
+   * @param  collection
+   *         Collection to add record to. This is typically passed into the
+   *         constructor for the newly-created record.
+   * @return record type for this engine
+   */
   createRecord: function Store_createRecord(id, collection) {
     throw "override createRecord in a subclass";
   },
 
+  /**
+   * Change the ID of a record.
+   *
+   * @param  oldID
+   *         string old/current record ID
+   * @param  newID
+   *         string new record ID
+   */
   changeItemID: function Store_changeItemID(oldID, newID) {
     throw "override changeItemID in a subclass";
   },
 
+  /**
+   * Obtain the set of all known record IDs.
+   *
+   * @return Object with ID strings as keys and values of true. The values
+   *         are ignored.
+   */
   getAllIDs: function Store_getAllIDs() {
     throw "override getAllIDs in a subclass";
   },
 
+  /**
+   * Wipe all data in the store.
+   *
+   * This function is called during remote wipes or when replacing local data
+   * with remote data.
+   *
+   * This function should delete all local data that the store is managing. It
+   * can be thought of as clearing out all state and restoring the "new
+   * browser" state.
+   */
   wipe: function Store_wipe() {
     throw "override wipe in a subclass";
   }
 };
 
 
 // Singleton service, holds registered engines
 
--- a/services/sync/modules/policies.js
+++ b/services/sync/modules/policies.js
@@ -16,16 +16,17 @@
  * The Initial Developer of the Original Code is
  * the Mozilla Foundation.
  * Portions created by the Initial Developer are Copyright (C) 2011
  * the Initial Developer. All Rights Reserved.
  *
  * Contributor(s):
  *  Marina Samuel <msamuel@mozilla.com>
  *  Philipp von Weitershausen <philipp@weitershausen.de>
+ *  Chenxia Liu <liuche@mozilla.com>
  *
  * Alternatively, the contents of this file may be used under the terms of
  * either the GNU General Public License Version 2 or later (the "GPL"), or
  * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
  * in which case the provisions of the GPL or the LGPL are applicable instead
  * of those above. If you wish to allow use of your version of this file only
  * under the terms of either the GPL or the LGPL, and not to allow others to
  * use your version of this file under the terms of the MPL, indicate your
@@ -448,16 +449,17 @@ let ErrorHandler = {
     Svc.Obs.add("weave:service:sync:finish", this);
 
     this.initLogs();
   },
 
   initLogs: function initLogs() {
     this._log = Log4Moz.repository.getLogger("Sync.ErrorHandler");
     this._log.level = Log4Moz.Level[Svc.Prefs.get("log.logger.service.main")];
+    this._cleaningUpFileLogs = false;
 
     let root = Log4Moz.repository.getLogger("Sync");
     root.level = Log4Moz.Level[Svc.Prefs.get("log.rootLogger")];
 
     let formatter = new Log4Moz.BasicFormatter();
     let capp = new Log4Moz.ConsoleAppender(formatter);
     capp.level = Log4Moz.Level[Svc.Prefs.get("log.appender.console")];
     root.addAppender(capp);
@@ -548,16 +550,56 @@ let ErrorHandler = {
   syncAndReportErrors: function syncAndReportErrors() {
     this._log.debug("Beginning user-triggered sync.");
 
     this.dontIgnoreErrors = true;
     Utils.nextTick(Weave.Service.sync, Weave.Service);
   },
 
   /**
+   * Finds all logs older than maxErrorAge and deletes them without tying up I/O.
+   */
+  cleanupLogs: function cleanupLogs() {
+    let direntries = FileUtils.getDir("ProfD", ["weave", "logs"]).directoryEntries;
+    let oldLogs = [];
+    let index = 0;
+    let threshold = Date.now() - 1000 * Svc.Prefs.get("log.appender.file.maxErrorAge");
+
+    while (direntries.hasMoreElements()) {
+      let logFile = direntries.getNext().QueryInterface(Ci.nsIFile);
+      if (logFile.lastModifiedTime < threshold) {
+        oldLogs.push(logFile);
+      }
+    }
+
+    // Deletes a file from oldLogs each tick until there are none left.
+    function deleteFile() {
+      if (index >= oldLogs.length) {
+        ErrorHandler._cleaningUpFileLogs = false;
+        Svc.Obs.notify("weave:service:cleanup-logs");
+        return;
+      }
+      try {
+        oldLogs[index].remove(false);
+      } catch (ex) {
+        ErrorHandler._log._debug("Encountered error trying to clean up old log file '"
+                                 + oldLogs[index].leafName + "':"
+                                 + Utils.exceptionStr(ex));
+      }
+      index++;
+      Utils.nextTick(deleteFile);
+    }
+
+    if (oldLogs.length > 0) {
+      ErrorHandler._cleaningUpFileLogs = true;
+      Utils.nextTick(deleteFile);
+    }
+  },
+
+  /**
    * Generate a log file for the sync that just completed
    * and refresh the input & output streams.
    *
    * @param flushToFile
    *        the log file to be flushed/reset
    *
    * @param filenamePrefix
    *        a value of either LOG_PREFIX_SUCCESS or LOG_PREFIX_ERROR
@@ -568,16 +610,20 @@ let ErrorHandler = {
     this._logAppender.reset();
     if (flushToFile && inStream) {
       try {
         let filename = filenamePrefix + Date.now() + ".txt";
         let file = FileUtils.getFile("ProfD", ["weave", "logs", filename]);
         let outStream = FileUtils.openFileOutputStream(file);
         NetUtil.asyncCopy(inStream, outStream, function () {
           Svc.Obs.notify("weave:service:reset-file-log");
+          if (filenamePrefix == LOG_PREFIX_ERROR
+              && !ErrorHandler._cleaningUpFileLogs) {
+            Utils.nextTick(ErrorHandler.cleanupLogs, ErrorHandler);
+          }
         });
       } catch (ex) {
         Svc.Obs.notify("weave:service:reset-file-log");
       }
     } else {
       Svc.Obs.notify("weave:service:reset-file-log");
     }
   },
--- a/services/sync/services-sync.js
+++ b/services/sync/services-sync.js
@@ -29,16 +29,17 @@ pref("services.sync.jpake.pollInterval",
 pref("services.sync.jpake.firstMsgMaxTries", 300);
 pref("services.sync.jpake.maxTries", 10);
 
 pref("services.sync.log.appender.console", "Warn");
 pref("services.sync.log.appender.dump", "Error");
 pref("services.sync.log.appender.file.level", "Trace");
 pref("services.sync.log.appender.file.logOnError", true);
 pref("services.sync.log.appender.file.logOnSuccess", false);
+pref("services.sync.log.appender.file.maxErrorAge", 864000); // 10 days
 pref("services.sync.log.rootLogger", "Debug");
 pref("services.sync.log.logger.service.main", "Debug");
 pref("services.sync.log.logger.authenticator", "Debug");
 pref("services.sync.log.logger.network.resources", "Debug");
 pref("services.sync.log.logger.service.jpakeclient", "Debug");
 pref("services.sync.log.logger.engine.bookmarks", "Debug");
 pref("services.sync.log.logger.engine.clients", "Debug");
 pref("services.sync.log.logger.engine.forms", "Debug");
--- a/services/sync/tests/unit/test_errorhandler_filelog.js
+++ b/services/sync/tests/unit/test_errorhandler_filelog.js
@@ -1,18 +1,20 @@
 /* Any copyright is dedicated to the Public Domain.
    http://creativecommons.org/publicdomain/zero/1.0/ */
 
 Cu.import("resource://services-sync/service.js");
 Cu.import("resource://services-sync/util.js");
 Cu.import("resource://services-sync/log4moz.js");
 
-const logsdir = FileUtils.getDir("ProfD", ["weave", "logs"], true);
+const logsdir            = FileUtils.getDir("ProfD", ["weave", "logs"], true);
 const LOG_PREFIX_SUCCESS = "success-";
 const LOG_PREFIX_ERROR   = "error-";
+const CLEANUP_DELAY      = 1000; // delay to age files for cleanup (ms)
+const DELAY_BUFFER       = 50; // buffer for timers on different OS platforms
 
 const PROLONGED_ERROR_DURATION =
   (Svc.Prefs.get('errorhandler.networkFailureReportTimeout') * 2) * 1000;
 
 function setLastSync(lastSyncValue) {
   Svc.Prefs.set("lastSync", (new Date(Date.now() -
     lastSyncValue)).toString());
 }
@@ -226,8 +228,58 @@ add_test(function test_login_error_logOn
       run_next_test();
     });
   });
 
   // Fake an unsuccessful login due to prolonged failure.
   setLastSync(PROLONGED_ERROR_DURATION);
   Svc.Obs.notify("weave:service:login:error");
 });
+
+// Check that error log files are deleted above an age threshold.
+add_test(function test_logErrorCleanup_age() {
+  let maxAge = CLEANUP_DELAY/1000;
+  let firstlog_name;
+  let oldLogs = [];
+  let numLogs = 10;
+  let errString = "some error log\n";
+
+  Svc.Prefs.set("log.appender.file.logOnError", true);
+  Svc.Prefs.set("log.appender.file.maxErrorAge", maxAge);
+
+  // Make some files.
+  for (let i = 0; i < numLogs; i++) {
+    let filename = LOG_PREFIX_ERROR + Date.now() + i + ".txt";
+    let newLog = FileUtils.getFile("ProfD", ["weave", "logs", filename]);
+    let foStream = FileUtils.openFileOutputStream(newLog);
+    foStream.write(errString, errString.length);
+    foStream.close();
+    oldLogs.push(newLog.leafName);
+  }
+
+  Svc.Obs.add("weave:service:cleanup-logs", function onCleanupLogs() {
+    Svc.Obs.remove("weave:service:cleanup-logs", onCleanupLogs);
+
+    // Only the newest created log file remains.
+    let entries = logsdir.directoryEntries;
+    do_check_true(entries.hasMoreElements());
+    let logfile = entries.getNext().QueryInterface(Ci.nsILocalFile);
+    do_check_true(oldLogs.every(function (e) {
+      return e != logfile.leafName;
+    }));
+    do_check_false(entries.hasMoreElements());
+
+    // Clean up.
+    try {
+      logfile.remove(false);
+    } catch(ex) {
+      dump("Couldn't delete file: " + ex + "\n");
+      // Stupid Windows box.
+    }
+
+    Svc.Prefs.resetBranch("");
+    run_next_test();
+  });
+
+  Utils.namedTimer(function () Svc.Obs.notify("weave:service:sync:error"),
+                   CLEANUP_DELAY + DELAY_BUFFER, this, "cleanup-timer");
+
+});
--- a/services/sync/tps/extensions/tps/modules/history.jsm
+++ b/services/sync/tps/extensions/tps/modules/history.jsm
@@ -35,27 +35,52 @@
  *
  * ***** END LICENSE BLOCK ***** */
 
  /* This is a JavaScript module (JSM) to be imported via 
   * Components.utils.import() and acts as a singleton. Only the following 
   * listed symbols will exposed on import, and only when and where imported.
   */
 
-var EXPORTED_SYMBOLS = ["HistoryEntry"];
+var EXPORTED_SYMBOLS = ["HistoryEntry", "DumpHistory"];
 
 const CC = Components.classes;
 const CI = Components.interfaces;
 const CU = Components.utils;
 
 CU.import("resource://gre/modules/Services.jsm");
 CU.import("resource://gre/modules/PlacesUtils.jsm");
 CU.import("resource://tps/logger.jsm");
 CU.import("resource://services-sync/async.js");
 
+var DumpHistory = function TPS_History__DumpHistory() {
+  let writer = {
+    value: "",
+    write: function PlacesItem__dump__write(aStr, aLen) {
+      this.value += aStr;
+    }
+  };
+
+  let query = PlacesUtils.history.getNewQuery();
+  let options = PlacesUtils.history.getNewQueryOptions();
+  let root = PlacesUtils.history.executeQuery(query, options).root;
+  root.containerOpen = true;
+  Logger.logInfo("\n\ndumping history\n", true);
+  for (var i = 0; i < root.childCount; i++) {
+    let node = root.getChild(i);
+    let uri = node.uri;
+    let curvisits = HistoryEntry._getVisits(uri);
+    for each (var visit in curvisits) {
+      Logger.logInfo("URI: " + uri + ", type=" + visit.type + ", date=" + visit.date, true);
+    }
+  }
+  root.containerOpen = false;
+  Logger.logInfo("\nend history dump\n", true);
+};
+
 /**
  * HistoryEntry object
  *
  * Contains methods for manipulating browser history entries.
  */
 var HistoryEntry = {
   /**
    * _db
@@ -137,17 +162,17 @@ var HistoryEntry = {
    * @param usSinceEpoch The number of microseconds from Epoch to
    *        the time the current Crossweave run was started
    * @return true if all the visits for the uri are found, otherwise false
    */
   Find: function(item, usSinceEpoch) {
     Logger.AssertTrue("visits" in item && "uri" in item,
       "History entry in test file must have both 'visits' " +
       "and 'uri' properties");
-    let curvisits = curvisits = this._getVisits(item.uri);
+    let curvisits = this._getVisits(item.uri);
     for each (visit in curvisits) {
       for each (itemvisit in item.visits) {
         let expectedDate = itemvisit.date * 60 * 60 * 1000 * 1000 
             + usSinceEpoch;
         if (visit.type == itemvisit.type && visit.date == expectedDate) {
           itemvisit.found = true;
         }
       }
--- a/services/sync/tps/extensions/tps/modules/logger.jsm
+++ b/services/sync/tps/extensions/tps/modules/logger.jsm
@@ -112,26 +112,31 @@ var Logger =
   },
 
   AssertEqual: function(val1, val2, msg) {
     if (val1 != val2)
       throw("ASSERTION FAILED! " + msg + "; expected " + 
             JSON.stringify(val2) + ", got " + JSON.stringify(val1));
   },
 
-  log: function (msg) {
+  log: function (msg, withoutPrefix) {
     dump(msg + "\n");
-    var now = new Date()
-    this.write(now.getFullYear() + "-" + (now.getMonth() < 9 ? '0' : '') + 
-        (now.getMonth() + 1) + "-" + 
-        (now.getDay() < 9 ? '0' : '') + (now.getDay() + 1) + " " +
-        (now.getHours() < 10 ? '0' : '') + now.getHours() + ":" +
-        (now.getMinutes() < 10 ? '0' : '') + now.getMinutes() + ":" +
-        (now.getSeconds() < 10 ? '0' : '') + now.getSeconds() + " " + 
-        msg + "\n");
+    if (withoutPrefix) {
+      this.write(msg + "\n");
+    }
+    else {
+      var now = new Date()
+      this.write(now.getFullYear() + "-" + (now.getMonth() < 9 ? '0' : '') + 
+          (now.getMonth() + 1) + "-" + 
+          (now.getDay() < 9 ? '0' : '') + (now.getDay() + 1) + " " +
+          (now.getHours() < 10 ? '0' : '') + now.getHours() + ":" +
+          (now.getMinutes() < 10 ? '0' : '') + now.getMinutes() + ":" +
+          (now.getSeconds() < 10 ? '0' : '') + now.getSeconds() + " " + 
+          msg + "\n");
+    }
   },
 
   clearPotentialError: function() {
     this._potentialError = null;
   },
 
   logPotentialError: function(msg) {
     this._potentialError = msg;
@@ -145,17 +150,20 @@ var Logger =
     }
     this.log("CROSSWEAVE ERROR: " + message);
   },
 
   logError: function (msg) {
     this.log("CROSSWEAVE ERROR: " + msg);
   },
 
-  logInfo: function (msg) {
-    this.log("CROSSWEAVE INFO: " + msg);
+  logInfo: function (msg, withoutPrefix) {
+    if (withoutPrefix)
+      this.log(msg, true);
+    else
+      this.log("CROSSWEAVE INFO: " + msg);
   },
 
   logPass: function (msg) {
     this.log("CROSSWEAVE TEST PASS: " + msg);
   },
 };
 
--- a/services/sync/tps/extensions/tps/modules/passwords.jsm
+++ b/services/sync/tps/extensions/tps/modules/passwords.jsm
@@ -35,30 +35,42 @@
  *
  * ***** END LICENSE BLOCK ***** */
 
  /* This is a JavaScript module (JSM) to be imported via 
   * Components.utils.import() and acts as a singleton. Only the following 
   * listed symbols will exposed on import, and only when and where imported. 
   */
 
-var EXPORTED_SYMBOLS = ["Password"];
+var EXPORTED_SYMBOLS = ["Password", "DumpPasswords"];
 
 const CC = Components.classes;
 const CI = Components.interfaces;
 const CU = Components.utils;
 
 CU.import("resource://gre/modules/Services.jsm");
 CU.import("resource://tps/logger.jsm");
 
 let nsLoginInfo = new Components.Constructor(
                       "@mozilla.org/login-manager/loginInfo;1",  
                       CI.nsILoginInfo,  
                       "init");  
 
+var DumpPasswords = function TPS__Passwords__DumpPasswords() {
+  let logins = Services.logins.getAllLogins();
+  Logger.logInfo("\ndumping password list\n", true);
+  for (var i = 0; i < logins.length; i++) {
+    Logger.logInfo("* host=" + logins[i].hostname + ", submitURL=" + logins[i].formSubmitURL +
+                   ", realm=" + logins[i].httpRealm + ", password=" + logins[i].password +
+                   ", passwordField=" + logins[i].passwordField + ", username=" +
+                   logins[i].username + ", usernameField=" + logins[i].usernameField, true);
+  }
+  Logger.logInfo("\n\nend password list\n", true);
+};
+
 /**
  * PasswordProps object; holds password properties.
  */
 function PasswordProps(props) {
   this.hostname = null;
   this.submitURL = null;
   this.realm = null;
   this.username = "";
@@ -116,19 +128,20 @@ Password.prototype = {
    * Find
    *
    * Finds a password entry in the login manager, for the password
    * represented by this object's properties.
    *
    * @return the guid of the password if found, otherwise -1
    */
   Find: function() {
-    let logins = Services.logins.findLogins({}, this.props.hostname,
-                                         this.props.submitURL,
-                                         this.props.realm);
+    let logins = Services.logins.findLogins({},
+                                            this.props.hostname,
+                                            this.props.submitURL,
+                                            this.props.realm);
     for (var i = 0; i < logins.length; i++) {
       if (logins[i].username == this.props.username &&
           logins[i].password == this.props.password &&
           logins[i].usernameField == this.props.usernameField &&
           logins[i].passwordField == this.props.passwordField) {
         logins[i].QueryInterface(CI.nsILoginMetaInfo);
         return logins[i].guid;
       }
--- a/services/sync/tps/extensions/tps/modules/sync.jsm
+++ b/services/sync/tps/extensions/tps/modules/sync.jsm
@@ -89,16 +89,23 @@ var TPS = {
           this._syncErrors = 0;
           this._waitingForSync = false;
         }
         break;
     }
   },
 
   SetupSyncAccount: function TPS__SetupSyncAccount() {
+    try {
+      let serverURL = prefs.getCharPref('tps.account.serverURL');
+      if (serverURL) {
+        Weave.Service.serverURL = serverURL;
+      }
+    }
+    catch(e) {}
     Weave.Service.account = prefs.getCharPref('tps.account.username');
     Weave.Service.password = prefs.getCharPref('tps.account.password');
     Weave.Service.passphrase = prefs.getCharPref('tps.account.passphrase');
     Weave.Svc.Obs.notify("weave:service:setup-complete");
   },
 
   Sync: function TPS__Sync(options) {
     Logger.logInfo('Mozmill starting sync operation: ' + options);
--- a/services/sync/tps/extensions/tps/modules/tps.jsm
+++ b/services/sync/tps/extensions/tps/modules/tps.jsm
@@ -275,75 +275,87 @@ var TPS =
           Logger.AssertTrue(false, "invalid action: " + action);
       }
     }
     Logger.logPass("executing action " + action.toUpperCase() + 
                    " on formdata");
   },
 
   HandleHistory: function (entries, action) {
-    for each (entry in entries) {
-      Logger.logInfo("executing action " + action.toUpperCase() +
-                     " on history entry " + JSON.stringify(entry));
-      switch(action) {
-        case ACTION_ADD:
-          HistoryEntry.Add(entry, this._usSinceEpoch);
-          break;
-        case ACTION_DELETE:
-          HistoryEntry.Delete(entry, this._usSinceEpoch);
-          break;
-        case ACTION_VERIFY:
-          Logger.AssertTrue(HistoryEntry.Find(entry, this._usSinceEpoch),
-            "Uri visits not found in history database");
-          break;
-        case ACTION_VERIFY_NOT:
-          Logger.AssertTrue(!HistoryEntry.Find(entry, this._usSinceEpoch),
-            "Uri visits found in history database, but they shouldn't be");
-          break;
-        default:
-          Logger.AssertTrue(false, "invalid action: " + action);
+    try {
+      for each (entry in entries) {
+        Logger.logInfo("executing action " + action.toUpperCase() +
+                       " on history entry " + JSON.stringify(entry));
+        switch(action) {
+          case ACTION_ADD:
+            HistoryEntry.Add(entry, this._usSinceEpoch);
+            break;
+          case ACTION_DELETE:
+            HistoryEntry.Delete(entry, this._usSinceEpoch);
+            break;
+          case ACTION_VERIFY:
+            Logger.AssertTrue(HistoryEntry.Find(entry, this._usSinceEpoch),
+              "Uri visits not found in history database");
+            break;
+          case ACTION_VERIFY_NOT:
+            Logger.AssertTrue(!HistoryEntry.Find(entry, this._usSinceEpoch),
+              "Uri visits found in history database, but they shouldn't be");
+            break;
+          default:
+            Logger.AssertTrue(false, "invalid action: " + action);
+        }
       }
+      Logger.logPass("executing action " + action.toUpperCase() + 
+                     " on history");
     }
-    Logger.logPass("executing action " + action.toUpperCase() + 
-                   " on history");
+    catch(e) {
+      DumpHistory();
+      throw(e);
+    }
   },
 
   HandlePasswords: function (passwords, action) {
-    for each (password in passwords) {
-      let password_id = -1;
-      Logger.logInfo("executing action " + action.toUpperCase() + 
-                    " on password " + JSON.stringify(password));
-      var password = new Password(password);
-      switch (action) {
-        case ACTION_ADD:
-          Logger.AssertTrue(password.Create() > -1, "error adding password");
-          break;
-        case ACTION_VERIFY:
-          Logger.AssertTrue(password.Find() != -1, "password not found");
-          break;
-        case ACTION_VERIFY_NOT:
-          Logger.AssertTrue(password.Find() == -1, 
-            "password found, but it shouldn't exist");
-          break;
-        case ACTION_DELETE:
-          Logger.AssertTrue(password.Find() != -1, "password not found");
-          password.Remove();
-          break;
-        case ACTION_MODIFY:
-          if (password.updateProps != null) {
+    try {
+      for each (password in passwords) {
+        let password_id = -1;
+        Logger.logInfo("executing action " + action.toUpperCase() + 
+                      " on password " + JSON.stringify(password));
+        var password = new Password(password);
+        switch (action) {
+          case ACTION_ADD:
+            Logger.AssertTrue(password.Create() > -1, "error adding password");
+            break;
+          case ACTION_VERIFY:
+            Logger.AssertTrue(password.Find() != -1, "password not found");
+            break;
+          case ACTION_VERIFY_NOT:
+            Logger.AssertTrue(password.Find() == -1, 
+              "password found, but it shouldn't exist");
+            break;
+          case ACTION_DELETE:
             Logger.AssertTrue(password.Find() != -1, "password not found");
-            password.Update();
-          }
-          break;
-        default:
-          Logger.AssertTrue(false, "invalid action: " + action);
-      } 
+            password.Remove();
+            break;
+          case ACTION_MODIFY:
+            if (password.updateProps != null) {
+              Logger.AssertTrue(password.Find() != -1, "password not found");
+              password.Update();
+            }
+            break;
+          default:
+            Logger.AssertTrue(false, "invalid action: " + action);
+        } 
+      }
+      Logger.logPass("executing action " + action.toUpperCase() + 
+                     " on passwords");
     }
-    Logger.logPass("executing action " + action.toUpperCase() + 
-                   " on passwords");
+    catch(e) {
+      DumpPasswords();
+      throw(e);
+    }
   },
 
   HandleBookmarks: function (bookmarks, action) {
     try {
       let items = [];
       for (folder in bookmarks) {
         let last_item_pos = -1;
         for each (bookmark in bookmarks[folder]) {
@@ -507,16 +519,19 @@ var TPS =
 
       // Store account details as prefs so they're accessible to the mozmill
       // framework.
       let prefs = CC["@mozilla.org/preferences-service;1"]
                   .getService(CI.nsIPrefBranch);
       prefs.setCharPref('tps.account.username', this.config.account.username);
       prefs.setCharPref('tps.account.password', this.config.account.password);
       prefs.setCharPref('tps.account.passphrase', this.config.account.passphrase);
+      if (this.config.account['serverURL']) {
+        prefs.setCharPref('tps.account.serverURL', this.config.account.serverURL);
+      }
 
       // start processing the test actions
       this._currentAction = 0;
     }
     catch(e) {
       this.DumpError("Exception caught: " + e);
       return;
     }
--- a/testing/tps/INSTALL.sh
+++ b/testing/tps/INSTALL.sh
@@ -52,16 +52,21 @@ then
     echo "virtualenv wasn't installed correctly, aborting"
     exit 1
 fi
 
 # install TPS
 cd ${CWD}
 python setup.py install
 
+# clean up files created by setup.py
+rm -rf build/
+rm -rf dist/
+rm -rf tps.egg-info/
+
 if [ "$?" -gt 0 ]
 then
   exit 1
 fi
 
 CONFIG="`find ${VIRTUAL_ENV} -name config.json.in`"
 NEWCONFIG=${CONFIG:0:${#CONFIG}-3}
 
--- a/testing/tps/tps/cli.py
+++ b/testing/tps/tps/cli.py
@@ -31,20 +31,22 @@
 # decision by deleting the provisions above and replace them with the notice
 # and other provisions required by the GPL or the LGPL. If you do not delete
 # the provisions above, a recipient may use your version of this file under
 # the terms of any one of the MPL, the GPL or the LGPL.
 #
 # ***** END LICENSE BLOCK *****
 
 import json
+import logging
 import optparse
 import os
 import sys
-import logging
+import time
+import traceback
 
 from threading import RLock
 
 from tps import TPSFirefoxRunner, TPSPulseMonitor, TPSTestRunner
 
 def main():
   parser = optparse.OptionParser()
   parser.add_option("--email-results",
@@ -111,35 +113,42 @@ def main():
       # replace msys-style paths with proper Windows paths
       import re
       m = re.match('^\/\w\/', extensionDir)
       if m:
         extensionDir = "%s:/%s" % (m.group(0)[1:2], extensionDir[3:])
         extensionDir = extensionDir.replace("/", "\\")
 
   if options.binary is None:
-    # If no binary is specified, start the pulse build monitor, and wait
-    # until we receive build notifications before running tests.
-    monitor = TPSPulseMonitor(extensionDir,
-                              config=config,
-                              autolog=options.autolog,
-                              emailresults=options.emailresults,
-                              testfile=options.testfile,
-                              logfile=options.logfile,
-                              rlock=rlock)
-    print "waiting for pulse build notifications"
+    while True:
+      try:
+        # If no binary is specified, start the pulse build monitor, and wait
+        # until we receive build notifications before running tests.
+        monitor = TPSPulseMonitor(extensionDir,
+                                  config=config,
+                                  autolog=options.autolog,
+                                  emailresults=options.emailresults,
+                                  testfile=options.testfile,
+                                  logfile=options.logfile,
+                                  rlock=rlock)
+        print "waiting for pulse build notifications"
 
-    if options.pulsefile:
-      # For testing purposes, inject a pulse message directly into
-      # the monitor.
-      builddata = json.loads(open(options.pulsefile, 'r').read())
-      monitor.onBuildComplete(builddata)
+        if options.pulsefile:
+          # For testing purposes, inject a pulse message directly into
+          # the monitor.
+          builddata = json.loads(open(options.pulsefile, 'r').read())
+          monitor.onBuildComplete(builddata)
 
-    monitor.listen()
-    return
+        monitor.listen()
+      except KeyboardInterrupt:
+        sys.exit()
+      except:
+        traceback.print_exc()
+        print 'sleeping 5 minutes'
+        time.sleep(300)
 
   TPS = TPSTestRunner(extensionDir,
                       emailresults=options.emailresults,
                       testfile=options.testfile,
                       logfile=options.logfile,
                       binary=options.binary,
                       config=config,
                       rlock=rlock,
--- a/testing/tps/tps/testrunner.py
+++ b/testing/tps/tps/testrunner.py
@@ -101,17 +101,17 @@ class TPSTestRunner(object):
                           'services.sync.log.logger.service.main': 'Trace',
                           'services.sync.log.logger.engine.bookmarks': 'Trace',
                           'services.sync.log.appender.console': 'Trace',
                           'services.sync.log.appender.debugLog.enabled': True,
                           'browser.dom.window.dump.enabled': True,
                           'extensions.checkCompatibility.4.0': False,
                         }
   syncVerRe = re.compile(
-      r"Weave version: (?P<syncversion>.*)\n")
+      r"Sync version: (?P<syncversion>.*)\n")
   ffVerRe = re.compile(
       r"Firefox version: (?P<ffver>.*)\n")
   ffDateRe = re.compile(
       r"Firefox builddate: (?P<ffdate>.*)\n")
 
   def __init__(self, extensionDir, emailresults=False, testfile="sync.test",
                binary=None, config=None, rlock=None, mobile=False,
                autolog=False, logfile="tps.log"):
@@ -164,41 +164,16 @@ class TPSTestRunner(object):
       zip.write(os.path.join(rootDir, dir), dir)
     except:
       # on some OS's, adding directory entries doesn't seem to work
       pass
     for root, dirs, files in os.walk(os.path.join(rootDir, dir)):
       for f in files:
         zip.write(os.path.join(root, f), os.path.join(dir, f))
 
-  def make_xpi(self):
-    """Build the test extension."""
-
-    tpsdir = os.path.join(self.extensionDir, "tps")
-
-    if self.tpsxpi is None:
-      tpsxpi = os.path.join(tpsdir, "tps.xpi")
-
-      if os.access(tpsxpi, os.F_OK):
-        os.remove(tpsxpi)
-      if not os.access(os.path.join(tpsdir, "install.rdf"), os.F_OK):
-        raise Exception("extension code not found in %s" % tpsdir)
-
-      from zipfile import ZipFile
-      z = ZipFile(tpsxpi, 'w')
-      self._zip_add_file(z, 'chrome.manifest', tpsdir)
-      self._zip_add_file(z, 'install.rdf', tpsdir)
-      self._zip_add_dir(z, 'components', tpsdir)
-      self._zip_add_dir(z, 'modules', tpsdir)
-      z.close()
-
-      self.tpsxpi = tpsxpi
-
-    return self.tpsxpi
-
   def run_single_test(self, testdir, testname):
     testpath = os.path.join(testdir, testname)
     self.log("Running test %s\n" % testname)
 
     # Create a random account suffix that is used when creating test
     # accounts on a staging server.
     account_suffix = {"account-suffix": ''.join([str(random.randint(0,9))
                                                  for i in range(1,6)])}
@@ -400,17 +375,17 @@ class TPSTestRunner(object):
     if platform.uname()[0].find("Win") > -1:
       os_string = "Windows " + platform.uname()[3]
 
     # reset number of passed/failed tests
     self.numpassed = 0
     self.numfailed = 0
 
     # build our tps.xpi extension
-    self.extensions.append(self.make_xpi())
+    self.extensions.append(os.path.join(self.extensionDir, 'tps'))
     self.extensions.append(os.path.join(self.extensionDir, "mozmill"))
 
     # build the test list
     try:
       f = open(self.testfile)
       jsondata = f.read()
       f.close()
       testfiles = json.loads(jsondata)
--- a/testing/tps/tps/thread.py
+++ b/testing/tps/tps/thread.py
@@ -64,39 +64,43 @@ class TPSTestThread(Thread):
                         logfile=self.logfile,
                         binary=self.builddata['buildurl'],
                         config=self.config,
                         rlock=self.rlock,
                         mobile=False,
                         autolog=self.autolog)
     TPS.run_tests()
 
+    # Get the binary used by this TPS instance, and use it in subsequent
+    # ones, so it doesn't have to be re-downloaded each time.
+    binary = TPS.firefoxRunner.binary
+
     # ... and then again in mobile mode
-    TPS = TPSTestRunner(self.extensionDir,
-                        emailresults=self.emailresults,
-                        testfile=self.testfile,
-                        logfile=self.logfile,
-                        binary=self.builddata['buildurl'],
-                        config=self.config,
-                        rlock=self.rlock,
-                        mobile=True,
-                        autolog=self.autolog)
-    TPS.run_tests()
+    TPS_mobile = TPSTestRunner(self.extensionDir,
+                               emailresults=self.emailresults,
+                               testfile=self.testfile,
+                               logfile=self.logfile,
+                               binary=binary,
+                               config=self.config,
+                               rlock=self.rlock,
+                               mobile=True,
+                               autolog=self.autolog)
+    TPS_mobile.run_tests()
 
     # ... and again via the staging server, if credentials are present
     stageaccount = self.config.get('stageaccount')
     if stageaccount:
       username = stageaccount.get('username')
       password = stageaccount.get('password')
       passphrase = stageaccount.get('passphrase')
       if username and password and passphrase:
         stageconfig = self.config.copy()
         stageconfig['account'] = stageaccount.copy()
-        TPS = TPSTestRunner(self.extensionDir,
-                            emailresults=self.emailresults,
-                            testfile=self.testfile,
-                            logfile=self.logfile,
-                            binary=self.builddata['buildurl'],
-                            config=stageconfig,
-                            rlock=self.rlock,
-                            mobile=False,
-                            autolog=self.autolog)
-        TPS.run_tests()
+        TPS_stage = TPSTestRunner(self.extensionDir,
+                                  emailresults=self.emailresults,
+                                  testfile=self.testfile,
+                                  logfile=self.logfile,
+                                  binary=binary,
+                                  config=stageconfig,
+                                  rlock=self.rlock,
+                                  mobile=False,
+                                  autolog=self.autolog)
+        TPS_stage.run_tests()