Bug 872758 - Delete all documents on FHR upload; r=gps, a=akeybl
--- a/services/common/bagheeraclient.js
+++ b/services/common/bagheeraclient.js
@@ -109,19 +109,19 @@ BagheeraClient.prototype = Object.freeze
* a UUID in hex form.
* @param payload
* (string|object) Data to upload. Can be specified as a string (which
* is assumed to be JSON) or an object. If an object, it will be fed into
* JSON.stringify() for serialization.
* @param options
* (object) Extra options to control behavior. Recognized properties:
*
- * deleteID -- (string) Old document ID to delete as part of
+ * deleteIDs -- (array) Old document IDs to delete as part of
* upload. If not specified, no old documents will be deleted as
- * part of upload. The string value is typically a UUID in hex
+ * part of upload. The array values are typically UUIDs in hex
* form.
*
* telemetryCompressed -- (string) Telemetry histogram to record
* compressed size of payload under. If not defined, no telemetry
* data for the compressed size will be recorded.
*
* @return Promise<BagheeraClientRequestResult>
*/
@@ -156,22 +156,26 @@ BagheeraClient.prototype = Object.freeze
}
this._log.info("Uploading data to " + uri);
let request = new BagheeraRequest(uri);
request.loadFlags = this._loadFlags;
request.timeout = this.DEFAULT_TIMEOUT_MSEC;
- let deleteID;
+ // Since API changed, throw on old API usage.
+ if ("deleteID" in options) {
+ throw new Error("API has changed, use (array) deleteIDs instead");
+ }
- if (options.deleteID) {
- deleteID = options.deleteID;
- this._log.debug("Will delete " + deleteID);
- request.setHeader("X-Obsolete-Document", options.deleteID);
+ let deleteIDs;
+ if (options.deleteIDs && options.deleteIDs.length > 0) {
+ deleteIDs = options.deleteIDs;
+ this._log.debug("Will delete " + deleteIDs.join(", "));
+ request.setHeader("X-Obsolete-Document", deleteIDs.join(","));
}
let deferred = Promise.defer();
data = CommonUtils.convertString(data, "uncompressed", "deflate");
if (options.telemetryCompressed) {
try {
let h = Services.telemetry.getHistogramById(options.telemetryCompressed);
@@ -185,17 +189,17 @@ BagheeraClient.prototype = Object.freeze
// TODO proper header per bug 807134.
request.setHeader("Content-Type", "application/json+zlib; charset=utf-8");
this._log.info("Request body length: " + data.length);
let result = new BagheeraClientRequestResult();
result.namespace = namespace;
result.id = id;
- result.deleteID = deleteID;
+ result.deleteIDs = deleteIDs ? deleteIDs.slice(0) : null;
request.onComplete = this._onComplete.bind(this, request, deferred, result);
request.post(data);
return deferred.promise;
},
/**
--- a/services/common/modules-testing/bagheeraserver.js
+++ b/services/common/modules-testing/bagheeraserver.js
@@ -269,17 +269,19 @@ BagheeraServer.prototype = {
throw HTTP_400;
}
this.namespaces[namespace][id] = doc;
if (request.hasHeader("X-Obsolete-Document")) {
let obsolete = request.getHeader("X-Obsolete-Document");
this._log.info("Deleting from X-Obsolete-Document header: " + obsolete);
- delete this.namespaces[namespace][obsolete];
+ for (let obsolete_id of obsolete.split(",")) {
+ delete this.namespaces[namespace][obsolete_id];
+ }
}
response.setStatusLine(request.httpVersion, 201, "Created");
response.setHeader("Content-Type", "text/plain");
let body = id;
response.bodyOutputStream.write(body, body.length);
},
--- a/services/common/tests/unit/test_bagheera_client.js
+++ b/services/common/tests/unit/test_bagheera_client.js
@@ -1,17 +1,18 @@
/* Any copyright is dedicated to the Public Domain.
* http://creativecommons.org/publicdomain/zero/1.0/ */
"use strict";
Cu.import("resource://services-common/bagheeraclient.js");
Cu.import("resource://services-common/rest.js");
Cu.import("resource://testing-common/services-common/bagheeraserver.js");
-
+Cu.import("resource://gre/modules/Promise.jsm");
+Cu.import("resource://gre/modules/Task.jsm");
const PORT = 8080;
function getClientAndServer(port=PORT) {
let uri = "http://localhost";
let server = new BagheeraServer(uri);
server.start(port);
@@ -67,39 +68,74 @@ add_test(function test_post_json_bad_dat
function onResult(result) {
do_check_true(result.transportSuccess);
do_check_false(result.serverSuccess);
server.stop(run_next_test);
});
});
-add_test(function test_post_json_delete_obsolete() {
+add_task(function test_post_delete_multiple_obsolete_documents () {
let [client, server] = getClientAndServer();
- server.createNamespace("foo");
- server.setDocument("foo", "obsolete", "Old payload");
+ let namespace = "foo";
+ let documents = [
+ [namespace, "one", "{v:1}"],
+ [namespace, "two", "{v:2}"],
+ [namespace, "three", "{v:3}"],
+ [namespace, "four", "{v:4}"],
+ ];
- let promise = client.uploadJSON("foo", "new", {foo: "bar"}, {deleteID: "obsolete"});
- promise.then(function onSuccess(result) {
+ try {
+ // create initial documents
+ server.createNamespace(namespace);
+ for (let [ns, id, payload] of documents) {
+ server.setDocument(ns, id, payload);
+ do_check_true(server.hasDocument(ns, id));
+ }
+
+ // Test uploading with deleting some documents.
+ let deleteIDs = [0, 1].map((no) => { return documents[no][1]; });
+ let result = yield client.uploadJSON(namespace, "new-1", {foo: "bar"}, {deleteIDs: deleteIDs});
do_check_true(result.transportSuccess);
do_check_true(result.serverSuccess);
- do_check_true(server.hasDocument("foo", "new"));
- do_check_false(server.hasDocument("foo", "obsolete"));
+ do_check_true(server.hasDocument(namespace, "new-1"));
+ for (let id of deleteIDs) {
+ do_check_false(server.hasDocument(namespace, id));
+ }
+ // Check if the documents that were not staged for deletion are still there.
+ for (let [,id,] of documents) {
+ if (deleteIDs.indexOf(id) == -1) {
+ do_check_true(server.hasDocument(namespace, id));
+ }
+ }
- server.stop(run_next_test);
- });
+ // Test upload without deleting documents.
+ let ids = Object.keys(server.namespaces[namespace]);
+ result = yield client.uploadJSON(namespace, "new-2", {foo: "bar"});
+ do_check_true(result.transportSuccess);
+ do_check_true(result.serverSuccess);
+ do_check_true(server.hasDocument(namespace, "new-2"));
+ // Check to see if all the original documents are still there.
+ for (let id of ids) {
+ do_check_true(deleteIDs.indexOf(id) !== -1 || server.hasDocument(namespace, id));
+ }
+ } finally {
+ let deferred = Promise.defer();
+ server.stop(deferred.resolve.bind(deferred));
+ yield deferred.promise;
+ }
});
add_test(function test_delete_document() {
let [client, server] = getClientAndServer();
server.createNamespace("foo");
server.setDocument("foo", "bar", "{}");
client.deleteDocument("foo", "bar").then(function onResult(result) {
do_check_true(result.transportSuccess);
do_check_true(result.serverSuccess);
do_check_null(server.getDocument("foo", "bar"));
server.stop(run_next_test);
});
-});
+});
\ No newline at end of file
--- a/services/healthreport/healthreporter.jsm
+++ b/services/healthreport/healthreporter.jsm
@@ -172,41 +172,54 @@ HealthReporterState.prototype = Object.f
addRemoteID: function (id) {
this._log.warn("Recording new remote ID: " + id);
this._s.remoteIDs.push(id);
return this.save();
},
removeRemoteID: function (id) {
- this._log.warn("Removing document from remote ID list: " + id);
- let filtered = this._s.remoteIDs.filter((x) => x != id);
+ return this.removeRemoteIDs(id ? [id] : []);
+ },
+
+ removeRemoteIDs: function (ids) {
+ if (!ids || !ids.length) {
+ this._log.warn("No IDs passed for removal.");
+ return Promise.resolve();
+ }
+
+ this._log.warn("Removing documents from remote ID list: " + ids);
+ let filtered = this._s.remoteIDs.filter((x) => ids.indexOf(x) === -1);
if (filtered.length == this._s.remoteIDs.length) {
return Promise.resolve();
}
this._s.remoteIDs = filtered;
return this.save();
},
setLastPingDate: function (date) {
this._s.lastPingTime = date.getTime();
return this.save();
},
updateLastPingAndRemoveRemoteID: function (date, id) {
- if (!id) {
+ return this.updateLastPingAndRemoveRemoteIDs(date, id ? [id] : []);
+ },
+
+ updateLastPingAndRemoveRemoteIDs: function (date, ids) {
+ if (!ids) {
return this.setLastPingDate(date);
}
this._log.info("Recording last ping time and deleted remote document.");
this._s.lastPingTime = date.getTime();
- return this.removeRemoteID(id);
+ return this.removeRemoteIDs(ids);
},
_migratePrefs: function () {
let prefs = this._reporter._prefs;
let lastID = prefs.get("lastSubmitID", null);
let lastPingDate = CommonUtils.getDatePref(prefs, "lastPingTime",
0, this._log, OLDEST_ALLOWED_YEAR);
@@ -1300,20 +1313,20 @@ HealthReporter.prototype = Object.freeze
}
if (hrProvider && !isDelete) {
hrProvider.recordEvent("uploadSuccess", date);
}
if (isDelete) {
this._log.warn("Marking delete as successful.");
- yield this._state.removeRemoteID(result.id);
+ yield this._state.removeRemoteIDs([result.id]);
} else {
this._log.warn("Marking upload as successful.");
- yield this._state.updateLastPingAndRemoveRemoteID(date, result.deleteID);
+ yield this._state.updateLastPingAndRemoveRemoteIDs(date, result.deleteIDs);
}
request.onSubmissionSuccess(this._now());
throw new Task.Result(true);
}.bind(this));
},
@@ -1350,17 +1363,17 @@ HealthReporter.prototype = Object.freeze
: "firstDocumentUploadAttempt";
hrProvider.recordEvent(event, now);
}
TelemetryStopwatch.start(TELEMETRY_UPLOAD, this);
let result;
try {
let options = {
- deleteID: lastID,
+ deleteIDs: this._state.remoteIDs.filter((x) => { return x != id; }),
telemetryCompressed: TELEMETRY_PAYLOAD_SIZE_COMPRESSED,
};
result = yield client.uploadJSON(this.serverNamespace, id, payload,
options);
TelemetryStopwatch.finish(TELEMETRY_UPLOAD, this);
} catch (ex) {
TelemetryStopwatch.cancel(TELEMETRY_UPLOAD, this);
if (hrProvider) {
--- a/services/healthreport/tests/xpcshell/test_healthreporter.js
+++ b/services/healthreport/tests/xpcshell/test_healthreporter.js
@@ -597,30 +597,34 @@ add_task(function test_data_submission_s
do_check_eq(reporter.lastPingDate.getTime(), 0);
do_check_false(reporter.haveRemoteData());
let deferred = Promise.defer();
let now = new Date();
let request = new DataSubmissionRequest(deferred, now);
+ reporter._state.addRemoteID("foo");
reporter.requestDataUpload(request);
yield deferred.promise;
do_check_eq(request.state, request.SUBMISSION_SUCCESS);
do_check_true(reporter.lastPingDate.getTime() > 0);
do_check_true(reporter.haveRemoteData());
+ for (let remoteID of reporter._state.remoteIDs) {
+ do_check_neq(remoteID, "foo");
+ }
// Ensure data from providers made it to payload.
let o = yield reporter.getJSONPayload(true);
do_check_true("DummyProvider.DummyMeasurement" in o.data.last);
do_check_true("DummyConstantProvider.DummyMeasurement" in o.data.last);
let data = yield getHealthReportProviderValues(reporter, now);
do_check_eq(data._v, 1);
- do_check_eq(data.firstDocumentUploadAttempt, 1);
+ do_check_eq(data.continuationUploadAttempt, 1);
do_check_eq(data.uploadSuccess, 1);
do_check_eq(Object.keys(data).length, 3);
let d = reporter.lastPingDate;
let id = reporter.lastSubmitID;
reporter._shutdown();
@@ -959,46 +963,49 @@ add_task(function test_state_invalid_jso
do_check_null(reporter.lastSubmitID);
} finally {
reporter._shutdown();
}
});
add_task(function test_state_multiple_remote_ids() {
let [reporter, server] = yield getReporterAndServer("state_multiple_remote_ids");
-
+ let documents = [
+ [reporter.serverNamespace, "one", "{v:1}"],
+ [reporter.serverNamespace, "two", "{v:2}"],
+ ];
let now = new Date(Date.now() - 5000);
- server.setDocument(reporter.serverNamespace, "id1", "foo");
- server.setDocument(reporter.serverNamespace, "id2", "bar");
-
try {
- yield reporter._state.addRemoteID("id1");
- yield reporter._state.addRemoteID("id2");
+ for (let [ns, id, payload] of documents) {
+ server.setDocument(ns, id, payload);
+ do_check_true(server.hasDocument(ns, id));
+ yield reporter._state.addRemoteID(id);
+ do_check_eq(reporter._state.remoteIDs.indexOf(id), reporter._state.remoteIDs.length - 1);
+ }
yield reporter._state.setLastPingDate(now);
do_check_eq(reporter._state.remoteIDs.length, 2);
- do_check_eq(reporter._state.remoteIDs[0], "id1");
- do_check_eq(reporter._state.remoteIDs[1], "id2");
- do_check_eq(reporter.lastSubmitID, "id1");
+ do_check_eq(reporter.lastSubmitID, documents[0][1]);
let deferred = Promise.defer();
let request = new DataSubmissionRequest(deferred, now);
reporter.requestDataUpload(request);
yield deferred.promise;
- do_check_eq(reporter._state.remoteIDs.length, 2);
- do_check_eq(reporter._state.remoteIDs[0], "id2");
+ do_check_eq(reporter._state.remoteIDs.length, 1);
+ for (let [,id,] of documents) {
+ do_check_eq(reporter._state.remoteIDs.indexOf(id), -1);
+ do_check_false(server.hasDocument(reporter.serverNamespace, id));
+ }
do_check_true(reporter.lastPingDate.getTime() > now.getTime());
- do_check_false(server.hasDocument(reporter.serverNamespace, "id1"));
- do_check_true(server.hasDocument(reporter.serverNamespace, "id2"));
- let o = CommonUtils.readJSON(reporter._state._filename);
- do_check_eq(reporter._state.remoteIDs.length, 2);
- do_check_eq(reporter._state.remoteIDs[0], "id2");
- do_check_eq(reporter._state.remoteIDs[1], reporter._state.remoteIDs[1]);
+ let o = yield CommonUtils.readJSON(reporter._state._filename);
+ do_check_eq(o.remoteIDs.length, 1);
+ do_check_eq(o.remoteIDs[0], reporter._state.remoteIDs[0]);
+ do_check_eq(o.lastPingTime, reporter.lastPingDate.getTime());
} finally {
yield shutdownServer(server);
reporter._shutdown();
}
});
// If we have a state file then downgrade to prefs, the prefs should be
// reimported and should supplement existing state.
@@ -1026,9 +1033,8 @@ add_task(function test_state_downgrade_u
let o = yield CommonUtils.readJSON(reporter._state._filename);
do_check_eq(o.remoteIDs.length, 3);
do_check_eq(o.lastPingTime, now.getTime() + 1000);
} finally {
reporter._shutdown();
}
});
-