Bug 1305581 - Verify that V4 updates were applied correctly by computing a checksum on the final result. r=francois
authordimi <dlee@mozilla.com>
Thu, 27 Oct 2016 08:36:26 +0800
changeset 319723 1509b970b24f6b97a26c3a5b9afa89d0fad6a5cd
parent 319722 660be9ffa179f71f28180a6f318590614d403364
child 319724 0680e3176fa56b00a75d187b9b338546371486a8
push id20748
push userphilringnalda@gmail.com
push dateFri, 28 Oct 2016 03:39:55 +0000
treeherderfx-team@715360440695 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersfrancois
bugs1305581
milestone52.0a1
Bug 1305581 - Verify that V4 updates were applied correctly by computing a checksum on the final result. r=francois MozReview-Commit-ID: LNtFOVMVw2U
toolkit/components/telemetry/Histograms.json
toolkit/components/url-classifier/Classifier.cpp
toolkit/components/url-classifier/LookupCacheV4.cpp
toolkit/components/url-classifier/LookupCacheV4.h
toolkit/components/url-classifier/tests/gtest/TestUrlClassifierTableUpdateV4.cpp
toolkit/components/url-classifier/tests/unit/test_listmanager.js
--- a/toolkit/components/telemetry/Histograms.json
+++ b/toolkit/components/telemetry/Histograms.json
@@ -3817,16 +3817,23 @@
     "alert_emails": ["safebrowsing-telemetry@mozilla.org"],
     "expires_in_version": "58",
     "kind": "exponential",
     "high": 1000,
     "n_buckets": 10,
     "bug_numbers": [1283007],
     "description": "Time spent fallocating Variable-Length PrefixSet (ms)"
   },
+  "URLCLASSIFIER_VLPS_LOAD_CORRUPT": {
+    "alert_emails": ["safebrowsing-telemetry@mozilla.org"],
+    "expires_in_version": "58",
+    "kind": "boolean",
+    "bug_numbers": [1305581],
+    "description": "Whether or not a variable-length prefix set loaded from disk is corrupted (true = file corrupted)."
+  },
   "URLCLASSIFIER_LC_PREFIXES": {
     "alert_emails": ["safebrowsing-telemetry@mozilla.org"],
     "expires_in_version": "never",
     "kind": "linear",
     "high": 1500000,
     "n_buckets": 15,
     "description": "Size of the prefix cache in entries"
   },
@@ -3862,17 +3869,17 @@
     "description": "This metric is recorded every time a gethash lookup is performed, `true` is recorded if the lookup times out."
   },
   "URLCLASSIFIER_UPDATE_ERROR_TYPE": {
     "alert_emails": ["safebrowsing-telemetry@mozilla.org"],
     "expires_in_version": "58",
     "kind": "enumerated",
     "n_values": 10,
     "bug_numbers": [1305801],
-    "description": "An error was encountered while parsing a partial update returned by a Safe Browsing V4 server (0 = addition of an already existing prefix, 1 = parser got into an infinite loop, 2 = removal index out of bounds)"
+    "description": "An error was encountered while parsing a partial update returned by a Safe Browsing V4 server (0 = addition of an already existing prefix, 1 = parser got into an infinite loop, 2 = removal index out of bounds, 3 = checksum mismatch, 4 = missing checksum)"
   },
   "CSP_DOCUMENTS_COUNT": {
     "alert_emails": ["seceng@mozilla.com"],
     "bug_numbers": [1252829],
     "expires_in_version": "55",
     "kind": "count",
     "description": "Number of unique pages that contain a CSP"
   },
--- a/toolkit/components/url-classifier/Classifier.cpp
+++ b/toolkit/components/url-classifier/Classifier.cpp
@@ -978,63 +978,59 @@ Classifier::UpdateTableV4(nsTArray<Table
   LookupCacheV4* lookupCache =
     LookupCache::Cast<LookupCacheV4>(GetLookupCache(aTable));
   if (!lookupCache) {
     return NS_ERROR_FAILURE;
   }
 
   nsresult rv = NS_OK;
 
-  // prefixes2 is only used in partial update. If there are multiple
-  // updates for the same table, prefixes1 & prefixes2 will act as
-  // input and output in turn to reduce memory copy overhead.
+  // If there are multiple updates for the same table, prefixes1 & prefixes2
+  // will act as input and output in turn to reduce memory copy overhead.
   PrefixStringMap prefixes1, prefixes2;
-  PrefixStringMap* output = &prefixes1;
+  PrefixStringMap* input = &prefixes1;
+  PrefixStringMap* output = &prefixes2;
 
   TableUpdateV4* lastAppliedUpdate = nullptr;
   for (uint32_t i = 0; i < aUpdates->Length(); i++) {
     TableUpdate *update = aUpdates->ElementAt(i);
     if (!update || !update->TableName().Equals(aTable)) {
       continue;
     }
 
     auto updateV4 = TableUpdate::Cast<TableUpdateV4>(update);
     NS_ENSURE_TRUE(updateV4, NS_ERROR_FAILURE);
 
     if (updateV4->IsFullUpdate()) {
-      TableUpdateV4::PrefixStdStringMap& map = updateV4->Prefixes();
-
+      input->Clear();
       output->Clear();
-      for (auto iter = map.Iter(); !iter.Done(); iter.Next()) {
-        // prefixes is an nsClassHashtable object stores prefix string.
-        // It will take the ownership of the put object.
-        nsCString* prefix = new nsCString(iter.Data()->GetPrefixString());
-        output->Put(iter.Key(), prefix);
+      rv = lookupCache->ApplyUpdate(updateV4, *input, *output);
+      if (NS_FAILED(rv)) {
+        return rv;
       }
     } else {
-      PrefixStringMap* input = nullptr;
       // If both prefix sets are empty, this means we are doing a partial update
       // without a prior full/partial update in the loop. In this case we should
       // get prefixes from the lookup cache first.
       if (prefixes1.IsEmpty() && prefixes2.IsEmpty()) {
         lookupCache->GetPrefixes(prefixes1);
-        input = &prefixes1;
-        output = &prefixes2;
       } else {
         MOZ_ASSERT(prefixes1.IsEmpty() ^ prefixes2.IsEmpty());
 
         // When there are multiple partial updates, input should always point
         // to the non-empty prefix set(filled by previous full/partial update).
         // output should always point to the empty prefix set.
         input = prefixes1.IsEmpty() ? &prefixes2 : &prefixes1;
         output = prefixes1.IsEmpty() ? &prefixes1 : &prefixes2;
       }
 
-      rv = lookupCache->ApplyPartialUpdate(updateV4, *input, *output);
-      NS_ENSURE_SUCCESS(rv, rv);
+      rv = lookupCache->ApplyUpdate(updateV4, *input, *output);
+      if (NS_FAILED(rv)) {
+        return rv;
+      }
 
       input->Clear();
     }
 
     // Keep track of the last applied update.
     lastAppliedUpdate = updateV4;
 
     aUpdates->ElementAt(i) = nullptr;
--- a/toolkit/components/url-classifier/LookupCacheV4.cpp
+++ b/toolkit/components/url-classifier/LookupCacheV4.cpp
@@ -1,15 +1,17 @@
 //* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "LookupCacheV4.h"
 #include "HashStore.h"
+#include "mozilla/Unused.h"
+#include <string>
 
 // MOZ_LOG=UrlClassifierDbService:5
 extern mozilla::LazyLogModule gUrlClassifierDbServiceLog;
 #define LOG(args) MOZ_LOG(gUrlClassifierDbServiceLog, mozilla::LogLevel::Debug, args)
 #define LOG_ENABLED() MOZ_LOG_TEST(gUrlClassifierDbServiceLog, mozilla::LogLevel::Debug)
 
 #define METADATA_SUFFIX NS_LITERAL_CSTRING(".metadata")
 
@@ -109,17 +111,32 @@ nsresult
 LookupCacheV4::StoreToFile(nsIFile* aFile)
 {
   return mVLPrefixSet->StoreToFile(aFile);
 }
 
 nsresult
 LookupCacheV4::LoadFromFile(nsIFile* aFile)
 {
-  return mVLPrefixSet->LoadFromFile(aFile);
+  nsresult rv = mVLPrefixSet->LoadFromFile(aFile);
+  if (NS_FAILED(rv)) {
+    return rv;
+  }
+
+  nsCString state, checksum;
+  rv = LoadMetadata(state, checksum);
+  if (NS_FAILED(rv)) {
+    return rv;
+  }
+
+  rv = VerifyChecksum(checksum);
+  Telemetry::Accumulate(Telemetry::URLCLASSIFIER_VLPS_LOAD_CORRUPT,
+                        rv == NS_ERROR_FILE_CORRUPTED);
+
+  return rv;
 }
 
 size_t
 LookupCacheV4::SizeOfPrefixSet()
 {
   return mVLPrefixSet->SizeOfIncludingThis(moz_malloc_size_of);
 }
 
@@ -132,111 +149,196 @@ AppendPrefixToMap(PrefixStringMap& prefi
 
   nsCString* prefixString = prefixes.LookupOrAdd(prefix.Length());
   prefixString->Append(prefix.BeginReading(), prefix.Length());
 }
 
 // Please see https://bug1287058.bmoattachments.org/attachment.cgi?id=8795366
 // for detail about partial update algorithm.
 nsresult
-LookupCacheV4::ApplyPartialUpdate(TableUpdateV4* aTableUpdate,
-                                  PrefixStringMap& aInputMap,
-                                  PrefixStringMap& aOutputMap)
+LookupCacheV4::ApplyUpdate(TableUpdateV4* aTableUpdate,
+                           PrefixStringMap& aInputMap,
+                           PrefixStringMap& aOutputMap)
 {
   MOZ_ASSERT(aOutputMap.IsEmpty());
 
+  nsCOMPtr<nsICryptoHash> crypto;
+  nsresult rv = InitCrypto(crypto);
+  if (NS_FAILED(rv)) {
+    return rv;
+  }
+
   // oldPSet contains prefixes we already have or we just merged last round.
   // addPSet contains prefixes stored in tableUpdate which should be merged with oldPSet.
   VLPrefixSet oldPSet(aInputMap);
   VLPrefixSet addPSet(aTableUpdate->Prefixes());
 
   // RemovalIndiceArray is a sorted integer array indicating the index of prefix we should
   // remove from the old prefix set(according to lexigraphic order).
   // |removalIndex| is the current index of RemovalIndiceArray.
   // |numOldPrefixPicked| is used to record how many prefixes we picked from the old map.
   TableUpdateV4::RemovalIndiceArray& removalArray = aTableUpdate->RemovalIndices();
   uint32_t removalIndex = 0;
   int32_t numOldPrefixPicked = -1;
 
   nsDependentCSubstring smallestOldPrefix;
   nsDependentCSubstring smallestAddPrefix;
 
+  bool isOldMapEmpty = false, isAddMapEmpty = false;
+
   // This is used to avoid infinite loop for partial update algorithm.
   // The maximum loops will be the number of old prefixes plus the number of add prefixes.
-  uint32_t index = oldPSet.Count() + addPSet.Count() + 1;
+  int32_t index = oldPSet.Count() + addPSet.Count() + 1;
   for(;index > 0; index--) {
     // Get smallest prefix from the old prefix set if we don't have one
-    if (smallestOldPrefix.IsEmpty()) {
-      // If prefixes from the old prefix set are all merged,
-      // then we can merge the entire add prefix set directly.
-      if (!oldPSet.GetSmallestPrefix(smallestOldPrefix)) {
-        AppendPrefixToMap(aOutputMap, smallestAddPrefix);
-        addPSet.Merge(aOutputMap);
-        break;
-      }
+    if (smallestOldPrefix.IsEmpty() && !isOldMapEmpty) {
+      isOldMapEmpty = !oldPSet.GetSmallestPrefix(smallestOldPrefix);
     }
 
     // Get smallest prefix from add prefix set if we don't have one
-    if (smallestAddPrefix.IsEmpty()) {
-      // If add prefixes are all merged and there is no removalIndices left,
-      // then merge the entire old prefix set directly. If there are still
-      // removalIndices left, we should still merge prefixes one by one
-      // to know which prefix from old prefix set should be removed.
-      if (!addPSet.GetSmallestPrefix(smallestAddPrefix) &&
-        removalIndex >= removalArray.Length()) {
-        AppendPrefixToMap(aOutputMap, smallestOldPrefix);
-        oldPSet.Merge(aOutputMap);
-        break;
-      }
+    if (smallestAddPrefix.IsEmpty() && !isAddMapEmpty) {
+      isAddMapEmpty = !addPSet.GetSmallestPrefix(smallestAddPrefix);
     }
 
-    // Compare the smallest string in old prefix set and add prefix set, merge the
-    // smaller one into new map to ensure merged string still follows
-    // lexigraphic order.
-    if (smallestOldPrefix < smallestAddPrefix ||
-        smallestAddPrefix.IsEmpty()) {
+    bool pickOld;
+
+    // If both prefix sets are not empty, then compare to find the smaller one.
+    if (!isOldMapEmpty && !isAddMapEmpty) {
+      if (smallestOldPrefix == smallestAddPrefix) {
+        LOG(("Add prefix should not exist in the original prefix set."));
+        Telemetry::Accumulate(Telemetry::URLCLASSIFIER_UPDATE_ERROR_TYPE,
+                              DUPLICATE_PREFIX);
+        return NS_ERROR_FAILURE;
+      }
+
+      // Compare the smallest string in old prefix set and add prefix set,
+      // merge the smaller one into new map to ensure merged string still
+      // follows lexigraphic order.
+      pickOld = smallestOldPrefix < smallestAddPrefix;
+    } else if (!isOldMapEmpty && isAddMapEmpty) {
+      pickOld = true;
+    } else if (isOldMapEmpty && !isAddMapEmpty) {
+      pickOld = false;
+    // If both maps are empty, then partial update is complete.
+    } else {
+      break;
+    }
+
+    if (pickOld) {
       numOldPrefixPicked++;
 
       // If the number of picks from old map matches the removalIndex, then this prefix
       // will be removed by not merging it to new map.
       if (removalIndex < removalArray.Length() &&
           numOldPrefixPicked == removalArray[removalIndex]) {
         removalIndex++;
       } else {
         AppendPrefixToMap(aOutputMap, smallestOldPrefix);
+
+        crypto->Update(reinterpret_cast<uint8_t*>(const_cast<char*>(
+                       smallestOldPrefix.BeginReading())),
+                       smallestOldPrefix.Length());
       }
       smallestOldPrefix.SetLength(0);
-    } else if (smallestOldPrefix > smallestAddPrefix ||
-               smallestOldPrefix.IsEmpty()){
+    } else {
       AppendPrefixToMap(aOutputMap, smallestAddPrefix);
+
+      crypto->Update(reinterpret_cast<uint8_t*>(const_cast<char*>(
+                     smallestAddPrefix.BeginReading())),
+                     smallestAddPrefix.Length());
+
       smallestAddPrefix.SetLength(0);
-    } else {
-      NS_WARNING("Add prefix should not exist in the original prefix set.");
-      Telemetry::Accumulate(Telemetry::URLCLASSIFIER_UPDATE_ERROR_TYPE,
-                            DUPLICATE_PREFIX);
-      return NS_ERROR_FAILURE;
     }
   }
 
   // We expect index will be greater to 0 because max number of runs will be
   // the number of original prefix plus add prefix.
   if (index <= 0) {
-    NS_WARNING("There are still prefixes remaining after reaching maximum runs.");
+    LOG(("There are still prefixes remaining after reaching maximum runs."));
     Telemetry::Accumulate(Telemetry::URLCLASSIFIER_UPDATE_ERROR_TYPE,
                           INFINITE_LOOP);
     return NS_ERROR_FAILURE;
   }
 
   if (removalIndex < removalArray.Length()) {
-    NS_WARNING("There are still prefixes to remove after exhausting the old PrefixSet.");
+    LOG(("There are still prefixes to remove after exhausting the old PrefixSet."));
     Telemetry::Accumulate(Telemetry::URLCLASSIFIER_UPDATE_ERROR_TYPE,
                           WRONG_REMOVAL_INDICES);
     return NS_ERROR_FAILURE;
   }
 
+  nsAutoCString checksum;
+  crypto->Finish(false, checksum);
+  if (aTableUpdate->Checksum().IsEmpty()) {
+    LOG(("Update checksum missing."));
+    Telemetry::Accumulate(Telemetry::URLCLASSIFIER_UPDATE_ERROR_TYPE,
+                          MISSING_CHECKSUM);
+
+    // Generate our own checksum to tableUpdate to ensure there is always
+    // checksum in .metadata
+    std::string stdChecksum(checksum.BeginReading(), checksum.Length());
+    aTableUpdate->NewChecksum(stdChecksum);
+
+  } else if (aTableUpdate->Checksum() != checksum){
+    LOG(("Checksum mismatch after applying partial update"));
+    Telemetry::Accumulate(Telemetry::URLCLASSIFIER_UPDATE_ERROR_TYPE,
+                          CHECKSUM_MISMATCH);
+    return NS_ERROR_FAILURE;
+  }
+
+  return NS_OK;
+}
+
+nsresult
+LookupCacheV4::InitCrypto(nsCOMPtr<nsICryptoHash>& aCrypto)
+{
+  nsresult rv;
+  aCrypto = do_CreateInstance(NS_CRYPTO_HASH_CONTRACTID, &rv);
+  if (NS_WARN_IF(NS_FAILED(rv))) {
+    return rv;
+  }
+
+  rv = aCrypto->Init(nsICryptoHash::SHA256);
+  Unused << NS_WARN_IF(NS_FAILED(rv));
+
+  return rv;
+}
+
+nsresult
+LookupCacheV4::VerifyChecksum(const nsACString& aChecksum)
+{
+  nsCOMPtr<nsICryptoHash> crypto;
+  nsresult rv = InitCrypto(crypto);
+  if (NS_FAILED(rv)) {
+    return rv;
+  }
+
+  PrefixStringMap map;
+  mVLPrefixSet->GetPrefixes(map);
+
+  VLPrefixSet loadPSet(map);
+  uint32_t index = loadPSet.Count() + 1;
+  for(;index > 0; index--) {
+    nsDependentCSubstring prefix;
+    if (!loadPSet.GetSmallestPrefix(prefix)) {
+      break;
+    }
+    crypto->Update(reinterpret_cast<uint8_t*>(const_cast<char*>(
+                   prefix.BeginReading())),
+                   prefix.Length());
+  }
+
+  nsAutoCString checksum;
+  crypto->Finish(false, checksum);
+
+  if (checksum != aChecksum) {
+    LOG(("Checksum mismatch when loading prefixes from file."));
+    return NS_ERROR_FILE_CORRUPTED;
+  }
+
   return NS_OK;
 }
 
 //////////////////////////////////////////////////////////////////////////
 // A set of lightweight functions for reading/writing value from/to file.
 
 namespace {
 
--- a/toolkit/components/url-classifier/LookupCacheV4.h
+++ b/toolkit/components/url-classifier/LookupCacheV4.h
@@ -24,40 +24,44 @@ public:
   virtual nsresult Init() override;
   virtual nsresult Has(const Completion& aCompletion,
                        bool* aHas, bool* aComplete) override;
 
   nsresult Build(PrefixStringMap& aPrefixMap);
 
   nsresult GetPrefixes(PrefixStringMap& aPrefixMap);
 
-  // ApplyPartialUpdate will merge partial update data stored in aTableUpdate
-  // with prefixes in aInputMap.
-  nsresult ApplyPartialUpdate(TableUpdateV4* aTableUpdate,
-                              PrefixStringMap& aInputMap,
-                              PrefixStringMap& aOutputMap);
+  // ApplyUpdate will merge data stored in aTableUpdate with prefixes in aInputMap.
+  nsresult ApplyUpdate(TableUpdateV4* aTableUpdate,
+                       PrefixStringMap& aInputMap,
+                       PrefixStringMap& aOutputMap);
 
   nsresult WriteMetadata(TableUpdateV4* aTableUpdate);
   nsresult LoadMetadata(nsACString& aState, nsACString& aChecksum);
 
   static const int VER;
 
 protected:
   virtual nsresult ClearPrefixes() override;
   virtual nsresult StoreToFile(nsIFile* aFile) override;
   virtual nsresult LoadFromFile(nsIFile* aFile) override;
   virtual size_t SizeOfPrefixSet() override;
 
 private:
   virtual int Ver() const override { return VER; }
 
+  nsresult InitCrypto(nsCOMPtr<nsICryptoHash>& aCrypto);
+  nsresult VerifyChecksum(const nsACString& aChecksum);
+
   enum UPDATE_ERROR_TYPES {
     DUPLICATE_PREFIX = 0,
     INFINITE_LOOP = 1,
     WRONG_REMOVAL_INDICES = 2,
+    CHECKSUM_MISMATCH = 3,
+    MISSING_CHECKSUM = 4,
   };
 
   RefPtr<VariableLengthPrefixSet> mVLPrefixSet;
 };
 
 } // namespace safebrowsing
 } // namespace mozilla
 
--- a/toolkit/components/url-classifier/tests/gtest/TestUrlClassifierTableUpdateV4.cpp
+++ b/toolkit/components/url-classifier/tests/gtest/TestUrlClassifierTableUpdateV4.cpp
@@ -1,23 +1,30 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
 #include "Classifier.h"
 #include "HashStore.h"
 #include "nsAppDirectoryServiceDefs.h"
 #include "nsIFile.h"
 #include "nsIThread.h"
 #include "string.h"
 #include "gtest/gtest.h"
 #include "nsThreadUtils.h"
 
 using namespace mozilla;
 using namespace mozilla::safebrowsing;
 
 typedef nsCString _Prefix;
 typedef nsTArray<_Prefix> _PrefixArray;
 
+#define GTEST_SAFEBROWSING_DIR NS_LITERAL_CSTRING("safebrowsing")
+#define GTEST_TABLE NS_LITERAL_CSTRING("gtest-malware-proto")
+#define GTEST_PREFIXFILE NS_LITERAL_CSTRING("gtest-malware-proto.pset")
+
 // This function removes common elements of inArray and outArray from
 // outArray. This is used by partial update testcase to ensure partial update
 // data won't contain prefixes we already have.
 static void
 RemoveIntersection(const _PrefixArray& inArray, _PrefixArray& outArray)
 {
   for (uint32_t i = 0; i < inArray.Length(); i++) {
     int32_t idx = outArray.BinaryIndexOf(inArray[i]);
@@ -32,31 +39,60 @@ RemoveIntersection(const _PrefixArray& i
 static void
 RemoveElements(const nsTArray<uint32_t>& removal, _PrefixArray& outArray)
 {
   for (int32_t i = removal.Length() - 1; i >= 0; i--) {
     outArray.RemoveElementAt(removal[i]);
   }
 }
 
+static void
+MergeAndSortArray(const _PrefixArray& array1,
+                  const _PrefixArray& array2,
+                  _PrefixArray& output)
+{
+  output.Clear();
+  output.AppendElements(array1);
+  output.AppendElements(array2);
+  output.Sort();
+}
+
 // This function converts lexigraphic-sorted prefixes to a hashtable
 // which key is prefix size and value is concatenated prefix string.
 static void
 PrefixArrayToPrefixStringMap(const _PrefixArray& prefixArray,
                              PrefixStringMap& outMap)
 {
   outMap.Clear();
 
   for (uint32_t i = 0; i < prefixArray.Length(); i++) {
     const _Prefix& prefix = prefixArray[i];
     nsCString* prefixString = outMap.LookupOrAdd(prefix.Length());
     prefixString->Append(prefix.BeginReading(), prefix.Length());
   }
 }
 
+static void
+CalculateCheckSum(_PrefixArray& prefixArray, nsCString& checksum)
+{
+  prefixArray.Sort();
+
+  nsresult rv;
+  nsCOMPtr<nsICryptoHash> cryptoHash =
+    do_CreateInstance(NS_CRYPTO_HASH_CONTRACTID, &rv);
+
+  cryptoHash->Init(nsICryptoHash::SHA256);
+  for (uint32_t i = 0; i < prefixArray.Length(); i++) {
+    const _Prefix& prefix = prefixArray[i];
+    cryptoHash->Update(reinterpret_cast<uint8_t*>(
+                       const_cast<char*>(prefix.get())), prefix.Length());
+  }
+  cryptoHash->Finish(false, checksum);
+}
+
 // N: Number of prefixes, MIN/MAX: minimum/maximum prefix size
 // This function will append generated prefixes to outArray.
 static void
 CreateRandomSortedPrefixArray(uint32_t N,
                               uint32_t MIN,
                               uint32_t MAX,
                               _PrefixArray& outArray)
 {
@@ -98,47 +134,56 @@ CreateRandomRemovalIndices(uint32_t N,
     }
   }
 }
 
 // Function to generate TableUpdateV4.
 static void
 GenerateUpdateData(bool fullUpdate,
                    PrefixStringMap& add,
-                   nsTArray<uint32_t>& removal,
+                   nsTArray<uint32_t>* removal,
+                   nsCString* checksum,
                    nsTArray<TableUpdate*>& tableUpdates)
 {
-  TableUpdateV4* tableUpdate = new TableUpdateV4(NS_LITERAL_CSTRING("gtest-malware-proto"));
+  TableUpdateV4* tableUpdate = new TableUpdateV4(GTEST_TABLE);
   tableUpdate->SetFullUpdate(fullUpdate);
 
   for (auto iter = add.ConstIter(); !iter.Done(); iter.Next()) {
     nsCString* pstring = iter.Data();
     std::string str(pstring->BeginReading(), pstring->Length());
 
     tableUpdate->NewPrefixes(iter.Key(), str);
   }
 
-  tableUpdate->NewRemovalIndices(removal.Elements(), removal.Length());
+  if (removal) {
+    tableUpdate->NewRemovalIndices(removal->Elements(), removal->Length());
+  }
+
+  if (checksum) {
+    std::string stdChecksum;
+    stdChecksum.assign(const_cast<char*>(checksum->BeginReading()), checksum->Length());
+
+    tableUpdate->NewChecksum(stdChecksum);
+  }
 
   tableUpdates.AppendElement(tableUpdate);
 }
 
 static void
 VerifyPrefixSet(PrefixStringMap& expected)
 {
   // Verify the prefix set is written to disk.
   nsCOMPtr<nsIFile> file;
-  NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR,
-                         getter_AddRefs(file));
+  NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
 
-  file->AppendNative(NS_LITERAL_CSTRING("safebrowsing"));
-  file->AppendNative(NS_LITERAL_CSTRING("gtest-malware-proto.pset"));
+  file->AppendNative(GTEST_SAFEBROWSING_DIR);
+  file->AppendNative(GTEST_PREFIXFILE);
 
   RefPtr<VariableLengthPrefixSet> load = new VariableLengthPrefixSet;
-  load->Init(NS_LITERAL_CSTRING("gtest-malware-proto"));
+  load->Init(GTEST_TABLE);
 
   PrefixStringMap prefixesInFile;
   load->LoadFromFile(file);
   load->GetPrefixes(prefixesInFile);
 
   for (auto iter = expected.ConstIter(); !iter.Done(); iter.Next()) {
     nsCString* expectedPrefix = iter.Data();
     nsCString* resultPrefix = prefixesInFile.Get(iter.Key());
@@ -146,404 +191,579 @@ VerifyPrefixSet(PrefixStringMap& expecte
     ASSERT_TRUE(*resultPrefix == *expectedPrefix);
   }
 }
 
 static void
 Clear()
 {
   nsCOMPtr<nsIFile> file;
-  NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR,
-                         getter_AddRefs(file));
+  NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
 
   UniquePtr<Classifier> classifier(new Classifier());
   classifier->Open(*file);
   classifier->Reset();
 }
 
 static void
 testUpdateFail(nsTArray<TableUpdate*>& tableUpdates)
 {
   nsCOMPtr<nsIFile> file;
-  NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR,
-                         getter_AddRefs(file));
+  NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
 
   UniquePtr<Classifier> classifier(new Classifier());
   classifier->Open(*file);
 
   RunTestInNewThread([&] () -> void {
     nsresult rv = classifier->ApplyUpdates(&tableUpdates);
     ASSERT_TRUE(NS_FAILED(rv));
   });
 }
 
 static void
 testUpdate(nsTArray<TableUpdate*>& tableUpdates,
            PrefixStringMap& expected)
 {
   nsCOMPtr<nsIFile> file;
-  NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR,
-                         getter_AddRefs(file));
+  NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
 
   UniquePtr<Classifier> classifier(new Classifier());
   classifier->Open(*file);
 
   RunTestInNewThread([&] () -> void {
     nsresult rv = classifier->ApplyUpdates(&tableUpdates);
     ASSERT_TRUE(rv == NS_OK);
 
     VerifyPrefixSet(expected);
   });
 }
 
 static void
-testFullUpdate(PrefixStringMap& add)
+testFullUpdate(PrefixStringMap& add, nsCString* checksum)
 {
-  nsTArray<uint32_t> empty;
   nsTArray<TableUpdate*> tableUpdates;
-  GenerateUpdateData(true, add, empty, tableUpdates);
+
+  GenerateUpdateData(true, add, nullptr, checksum, tableUpdates);
 
   testUpdate(tableUpdates, add);
 }
 
 static void
 testPartialUpdate(PrefixStringMap& add,
-                  nsTArray<uint32_t>& removal,
+                  nsTArray<uint32_t>* removal,
+                  nsCString* checksum,
                   PrefixStringMap& expected)
 {
   nsTArray<TableUpdate*> tableUpdates;
-  GenerateUpdateData(false, add, removal, tableUpdates);
+  GenerateUpdateData(false, add, removal, checksum, tableUpdates);
 
   testUpdate(tableUpdates, expected);
 }
 
+static void
+testOpenLookupCache()
+{
+  nsCOMPtr<nsIFile> file;
+  NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
+  file->AppendNative(GTEST_SAFEBROWSING_DIR);
 
+  RunTestInNewThread([&] () -> void {
+    LookupCacheV4 cache(nsCString(GTEST_TABLE), file);
+    nsresult rv = cache.Init();
+    ASSERT_EQ(rv, NS_OK);
+
+    rv = cache.Open();
+    ASSERT_EQ(rv, NS_OK);
+  });
+}
+
+// Tests start from here.
 TEST(UrlClassifierTableUpdateV4, FixLenghtPSetFullUpdate)
 {
   srand(time(NULL));
 
    _PrefixArray array;
   PrefixStringMap map;
+  nsCString checksum;
 
   CreateRandomSortedPrefixArray(5000, 4, 4, array);
   PrefixArrayToPrefixStringMap(array, map);
+  CalculateCheckSum(array, checksum);
 
-  testFullUpdate(map);
+  testFullUpdate(map, &checksum);
 
   Clear();
 }
 
-
 TEST(UrlClassifierTableUpdateV4, VariableLenghtPSetFullUpdate)
 {
    _PrefixArray array;
   PrefixStringMap map;
+  nsCString checksum;
 
   CreateRandomSortedPrefixArray(5000, 5, 32, array);
   PrefixArrayToPrefixStringMap(array, map);
+  CalculateCheckSum(array, checksum);
 
-  testFullUpdate(map);
+  testFullUpdate(map, &checksum);
 
   Clear();
 }
 
 // This test contain both variable length prefix set and fixed-length prefix set
 TEST(UrlClassifierTableUpdateV4, MixedPSetFullUpdate)
 {
    _PrefixArray array;
   PrefixStringMap map;
+  nsCString checksum;
 
   CreateRandomSortedPrefixArray(5000, 4, 4, array);
   CreateRandomSortedPrefixArray(1000, 5, 32, array);
   PrefixArrayToPrefixStringMap(array, map);
+  CalculateCheckSum(array, checksum);
 
-  testFullUpdate(map);
+  testFullUpdate(map, &checksum);
 
   Clear();
 }
 
 TEST(UrlClassifierTableUpdateV4, PartialUpdateWithRemoval)
 {
-  _PrefixArray fArray, pArray, mergedArray;
-  PrefixStringMap fMap, pMap, mergedMap;
+  _PrefixArray fArray;
 
+  // Apply a full update first.
   {
+    PrefixStringMap fMap;
+    nsCString checksum;
+
     CreateRandomSortedPrefixArray(10000, 4, 4, fArray);
     CreateRandomSortedPrefixArray(2000, 5, 32, fArray);
     PrefixArrayToPrefixStringMap(fArray, fMap);
+    CalculateCheckSum(fArray, checksum);
 
-    testFullUpdate(fMap);
+    testFullUpdate(fMap, &checksum);
   }
 
+  // Apply a partial update with removal.
   {
+    _PrefixArray pArray, mergedArray;
+    PrefixStringMap pMap, mergedMap;
+    nsCString checksum;
+
     CreateRandomSortedPrefixArray(5000, 4, 4, pArray);
     CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
     RemoveIntersection(fArray, pArray);
     PrefixArrayToPrefixStringMap(pArray, pMap);
 
     // Remove 1/5 of elements of original prefix set.
     nsTArray<uint32_t> removal;
     CreateRandomRemovalIndices(fArray.Length() / 5, fArray.Length(), removal);
     RemoveElements(removal, fArray);
 
     // Calculate the expected prefix map.
-    mergedArray.AppendElements(fArray);
-    mergedArray.AppendElements(pArray);
-    mergedArray.Sort();
+    MergeAndSortArray(fArray, pArray, mergedArray);
     PrefixArrayToPrefixStringMap(mergedArray, mergedMap);
+    CalculateCheckSum(mergedArray, checksum);
 
-    testPartialUpdate(pMap, removal, mergedMap);
+    testPartialUpdate(pMap, &removal, &checksum, mergedMap);
   }
 
   Clear();
 }
 
 TEST(UrlClassifierTableUpdateV4, PartialUpdateWithoutRemoval)
 {
-  _PrefixArray fArray, pArray, mergedArray;
-  PrefixStringMap fMap, pMap, mergedMap;
+  _PrefixArray fArray;
 
+  // Apply a full update first.
   {
+    PrefixStringMap fMap;
+    nsCString checksum;
+
     CreateRandomSortedPrefixArray(10000, 4, 4, fArray);
     CreateRandomSortedPrefixArray(2000, 5, 32, fArray);
     PrefixArrayToPrefixStringMap(fArray, fMap);
+    CalculateCheckSum(fArray, checksum);
 
-    testFullUpdate(fMap);
+    testFullUpdate(fMap, &checksum);
   }
 
+  // Apply a partial update without removal
   {
-    nsTArray<uint32_t> empty;
+    _PrefixArray pArray, mergedArray;
+    PrefixStringMap pMap, mergedMap;
+    nsCString checksum;
 
     CreateRandomSortedPrefixArray(5000, 4, 4, pArray);
     CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
     RemoveIntersection(fArray, pArray);
     PrefixArrayToPrefixStringMap(pArray, pMap);
 
     // Calculate the expected prefix map.
-    mergedArray.AppendElements(fArray);
-    mergedArray.AppendElements(pArray);
-    mergedArray.Sort();
+    MergeAndSortArray(fArray, pArray, mergedArray);
     PrefixArrayToPrefixStringMap(mergedArray, mergedMap);
+    CalculateCheckSum(mergedArray, checksum);
 
-    testPartialUpdate(pMap, empty, mergedMap);
+    testPartialUpdate(pMap, nullptr, &checksum, mergedMap);
   }
 
   Clear();
 }
 
 // Expect failure because partial update contains prefix already
 // in old prefix set.
 TEST(UrlClassifierTableUpdateV4, PartialUpdatePrefixAlreadyExist)
 {
-  _PrefixArray fArray, pArray;
-  PrefixStringMap fMap, pMap;
+  _PrefixArray fArray;
 
+  // Apply a full update fist.
   {
+    PrefixStringMap fMap;
+    nsCString checksum;
+
     CreateRandomSortedPrefixArray(1000, 4, 32, fArray);
     PrefixArrayToPrefixStringMap(fArray, fMap);
+    CalculateCheckSum(fArray, checksum);
 
-    testFullUpdate(fMap);
+    testFullUpdate(fMap, &checksum);
   }
 
+  // Apply a partial update which contains a prefix in previous full update.
+  // This should cause an update error.
   {
-    nsTArray<uint32_t> empty;
+    _PrefixArray pArray;
+    PrefixStringMap pMap;
     nsTArray<TableUpdate*> tableUpdates;
 
     // Pick one prefix from full update prefix and add it to partial update.
     // This should result a failure when call ApplyUpdates.
     pArray.AppendElement(fArray[rand() % fArray.Length()]);
     CreateRandomSortedPrefixArray(200, 4, 32, pArray);
     PrefixArrayToPrefixStringMap(pArray, pMap);
 
-    GenerateUpdateData(false, pMap, empty, tableUpdates);
+    GenerateUpdateData(false, pMap, nullptr, nullptr, tableUpdates);
     testUpdateFail(tableUpdates);
   }
 
   Clear();
 }
 
 // Test apply partial update directly without applying an full update first.
 TEST(UrlClassifierTableUpdateV4, OnlyPartialUpdate)
 {
   _PrefixArray pArray;
   PrefixStringMap pMap;
-  nsTArray<uint32_t> empty;
+  nsCString checksum;
 
   CreateRandomSortedPrefixArray(5000, 4, 4, pArray);
   CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
   PrefixArrayToPrefixStringMap(pArray, pMap);
+  CalculateCheckSum(pArray, checksum);
 
-  testPartialUpdate(pMap, empty, pMap);
+  testPartialUpdate(pMap, nullptr, &checksum, pMap);
 
   Clear();
 }
 
 // Test partial update without any ADD prefixes, only removalIndices.
 TEST(UrlClassifierTableUpdateV4, PartialUpdateOnlyRemoval)
 {
-  _PrefixArray fArray, pArray;
-  PrefixStringMap fMap, pMap, mergedMap;
+  _PrefixArray fArray;
 
+  // Apply a full update first.
   {
+    PrefixStringMap fMap;
+    nsCString checksum;
+
     CreateRandomSortedPrefixArray(5000, 4, 4, fArray);
     CreateRandomSortedPrefixArray(1000, 5, 32, fArray);
     PrefixArrayToPrefixStringMap(fArray, fMap);
+    CalculateCheckSum(fArray, checksum);
 
-    testFullUpdate(fMap);
+    testFullUpdate(fMap, &checksum);
   }
 
+  // Apply a partial update without add prefix, only contain removal indices.
   {
+    _PrefixArray pArray;
+    PrefixStringMap pMap, mergedMap;
+    nsCString checksum;
+
     // Remove 1/5 of elements of original prefix set.
     nsTArray<uint32_t> removal;
     CreateRandomRemovalIndices(fArray.Length() / 5, fArray.Length(), removal);
     RemoveElements(removal, fArray);
 
     PrefixArrayToPrefixStringMap(fArray, mergedMap);
+    CalculateCheckSum(fArray, checksum);
 
-    testPartialUpdate(pMap, removal, mergedMap);
+    testPartialUpdate(pMap, &removal, &checksum, mergedMap);
   }
 
   Clear();
 }
 
 // Test one tableupdate array contains full update and multiple partial updates.
 TEST(UrlClassifierTableUpdateV4, MultipleTableUpdates)
 {
   _PrefixArray fArray, pArray, mergedArray;
   PrefixStringMap fMap, pMap, mergedMap;
+  nsCString checksum;
 
-  {
-    nsTArray<uint32_t> empty;
-    nsTArray<TableUpdate*> tableUpdates;
+  nsTArray<TableUpdate*> tableUpdates;
 
-    // Generate first full udpate
-    CreateRandomSortedPrefixArray(10000, 4, 4, fArray);
-    CreateRandomSortedPrefixArray(2000, 5, 32, fArray);
-    PrefixArrayToPrefixStringMap(fArray, fMap);
+  // Generate first full udpate
+  CreateRandomSortedPrefixArray(10000, 4, 4, fArray);
+  CreateRandomSortedPrefixArray(2000, 5, 32, fArray);
+  PrefixArrayToPrefixStringMap(fArray, fMap);
+  CalculateCheckSum(fArray, checksum);
 
-    GenerateUpdateData(true, fMap, empty, tableUpdates);
+  GenerateUpdateData(true, fMap, nullptr, &checksum, tableUpdates);
 
-    // Generate second partial update
-    CreateRandomSortedPrefixArray(3000, 4, 4, pArray);
-    CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
-    RemoveIntersection(fArray, pArray);
-    PrefixArrayToPrefixStringMap(pArray, pMap);
+  // Generate second partial update
+  CreateRandomSortedPrefixArray(3000, 4, 4, pArray);
+  CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
+  RemoveIntersection(fArray, pArray);
+  PrefixArrayToPrefixStringMap(pArray, pMap);
 
-    GenerateUpdateData(false, pMap, empty, tableUpdates);
+  MergeAndSortArray(fArray, pArray, mergedArray);
+  CalculateCheckSum(mergedArray, checksum);
 
-    // Generate thrid partial update
-    fArray.AppendElements(pArray);
-    fArray.Sort();
-    pArray.Clear();
-    CreateRandomSortedPrefixArray(3000, 4, 4, pArray);
-    CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
-    RemoveIntersection(fArray, pArray);
-    PrefixArrayToPrefixStringMap(pArray, pMap);
+  GenerateUpdateData(false, pMap, nullptr, &checksum, tableUpdates);
+
+  // Generate thrid partial update
+  fArray.AppendElements(pArray);
+  fArray.Sort();
+  pArray.Clear();
+  CreateRandomSortedPrefixArray(3000, 4, 4, pArray);
+  CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
+  RemoveIntersection(fArray, pArray);
+  PrefixArrayToPrefixStringMap(pArray, pMap);
 
-    // Remove 1/5 of elements of original prefix set.
-    nsTArray<uint32_t> removal;
-    CreateRandomRemovalIndices(fArray.Length() / 5, fArray.Length(), removal);
-    RemoveElements(removal, fArray);
-
-    GenerateUpdateData(false, pMap, removal, tableUpdates);
+  // Remove 1/5 of elements of original prefix set.
+  nsTArray<uint32_t> removal;
+  CreateRandomRemovalIndices(fArray.Length() / 5, fArray.Length(), removal);
+  RemoveElements(removal, fArray);
 
-    mergedArray.AppendElements(fArray);
-    mergedArray.AppendElements(pArray);
-    mergedArray.Sort();
-    PrefixArrayToPrefixStringMap(mergedArray, mergedMap);
+  MergeAndSortArray(fArray, pArray, mergedArray);
+  PrefixArrayToPrefixStringMap(mergedArray, mergedMap);
+  CalculateCheckSum(mergedArray, checksum);
 
-    testUpdate(tableUpdates, mergedMap);
-  }
+  GenerateUpdateData(false, pMap, &removal, &checksum, tableUpdates);
+
+  testUpdate(tableUpdates, mergedMap);
 
   Clear();
 }
 
 // Test apply full update first, and then apply multiple partial updates
 // in one tableupdate array.
 TEST(UrlClassifierTableUpdateV4, MultiplePartialUpdateTableUpdates)
 {
-  _PrefixArray fArray, pArray, mergedArray;
-  PrefixStringMap fMap, pMap, mergedMap;
+  _PrefixArray fArray;
 
+  // Apply a full update first
   {
+    PrefixStringMap fMap;
+    nsCString checksum;
+
     // Generate first full udpate
     CreateRandomSortedPrefixArray(10000, 4, 4, fArray);
     CreateRandomSortedPrefixArray(3000, 5, 32, fArray);
     PrefixArrayToPrefixStringMap(fArray, fMap);
+    CalculateCheckSum(fArray, checksum);
 
-    testFullUpdate(fMap);
+    testFullUpdate(fMap, &checksum);
   }
 
+  // Apply multiple partial updates in one table update
   {
+    _PrefixArray pArray, mergedArray;
+    PrefixStringMap pMap, mergedMap;
+    nsCString checksum;
     nsTArray<uint32_t> removal;
     nsTArray<TableUpdate*> tableUpdates;
 
     // Generate first partial update
     CreateRandomSortedPrefixArray(3000, 4, 4, pArray);
     CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
     RemoveIntersection(fArray, pArray);
     PrefixArrayToPrefixStringMap(pArray, pMap);
 
     // Remove 1/5 of elements of original prefix set.
     CreateRandomRemovalIndices(fArray.Length() / 5, fArray.Length(), removal);
     RemoveElements(removal, fArray);
 
-    GenerateUpdateData(false, pMap, removal, tableUpdates);
+    MergeAndSortArray(fArray, pArray, mergedArray);
+    CalculateCheckSum(mergedArray, checksum);
+
+    GenerateUpdateData(false, pMap, &removal, &checksum, tableUpdates);
 
     fArray.AppendElements(pArray);
     fArray.Sort();
     pArray.Clear();
     removal.Clear();
 
     // Generate second partial update.
     CreateRandomSortedPrefixArray(2000, 4, 4, pArray);
     CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
     RemoveIntersection(fArray, pArray);
     PrefixArrayToPrefixStringMap(pArray, pMap);
 
     // Remove 1/5 of elements of original prefix set.
     CreateRandomRemovalIndices(fArray.Length() / 5, fArray.Length(), removal);
     RemoveElements(removal, fArray);
 
-    GenerateUpdateData(false, pMap, removal, tableUpdates);
+    MergeAndSortArray(fArray, pArray, mergedArray);
+    PrefixArrayToPrefixStringMap(mergedArray, mergedMap);
+    CalculateCheckSum(mergedArray, checksum);
 
-    mergedArray.AppendElements(fArray);
-    mergedArray.AppendElements(pArray);
-    mergedArray.Sort();
-    PrefixArrayToPrefixStringMap(mergedArray, mergedMap);
+    GenerateUpdateData(false, pMap, &removal, &checksum, tableUpdates);
 
     testUpdate(tableUpdates, mergedMap);
   }
 
   Clear();
 }
 
 // Test removal indices are larger than the original prefix set.
 TEST(UrlClassifierTableUpdateV4, RemovalIndexTooLarge)
 {
-  _PrefixArray fArray, pArray;
-  PrefixStringMap fMap, pMap;
+  _PrefixArray fArray;
 
+  // Apply a full update first
   {
+    PrefixStringMap fMap;
+    nsCString checksum;
+
     CreateRandomSortedPrefixArray(1000, 4, 32, fArray);
     PrefixArrayToPrefixStringMap(fArray, fMap);
+    CalculateCheckSum(fArray, checksum);
 
-    testFullUpdate(fMap);
+    testFullUpdate(fMap, &checksum);
   }
 
+  // Apply a partial update with removal indice array larger than
+  // old prefix set(fArray). This should cause an error.
   {
+    _PrefixArray pArray;
+    PrefixStringMap pMap;
     nsTArray<uint32_t> removal;
     nsTArray<TableUpdate*> tableUpdates;
 
     CreateRandomSortedPrefixArray(200, 4, 32, pArray);
     RemoveIntersection(fArray, pArray);
     PrefixArrayToPrefixStringMap(pArray, pMap);
 
     for (uint32_t i = 0; i < fArray.Length() + 1 ;i++) {
       removal.AppendElement(i);
     }
 
-    GenerateUpdateData(false, pMap, removal, tableUpdates);
+    GenerateUpdateData(false, pMap, &removal, nullptr, tableUpdates);
+    testUpdateFail(tableUpdates);
+  }
+
+  Clear();
+}
+
+TEST(UrlClassifierTableUpdateV4, ChecksumMismatch)
+{
+  // Apply a full update first
+  {
+    _PrefixArray fArray;
+    PrefixStringMap fMap;
+    nsCString checksum;
+
+    CreateRandomSortedPrefixArray(1000, 4, 32, fArray);
+    PrefixArrayToPrefixStringMap(fArray, fMap);
+    CalculateCheckSum(fArray, checksum);
+
+    testFullUpdate(fMap, &checksum);
+  }
+
+  // Apply a partial update with incorrect checksum
+  {
+    _PrefixArray pArray;
+    PrefixStringMap pMap;
+    nsCString checksum;
+    nsTArray<TableUpdate*> tableUpdates;
+
+    CreateRandomSortedPrefixArray(200, 4, 32, pArray);
+    PrefixArrayToPrefixStringMap(pArray, pMap);
+
+    // Checksum should be calculated with both old prefix set and add prefix set,
+    // here we only calculate checksum with add prefix set to check if applyUpdate
+    // will return failure.
+    CalculateCheckSum(pArray, checksum);
+
+    GenerateUpdateData(false, pMap, nullptr, &checksum, tableUpdates);
     testUpdateFail(tableUpdates);
   }
 
   Clear();
 }
+
+TEST(UrlClassifierTableUpdateV4, ApplyUpdateThenLoad)
+{
+  // Apply update with checksum
+  {
+    _PrefixArray fArray;
+    PrefixStringMap fMap;
+    nsCString checksum;
+
+    CreateRandomSortedPrefixArray(1000, 4, 32, fArray);
+    PrefixArrayToPrefixStringMap(fArray, fMap);
+    CalculateCheckSum(fArray, checksum);
+
+    testFullUpdate(fMap, &checksum);
+
+    // Open lookup cache will load prefix set and verify the checksum
+    testOpenLookupCache();
+  }
+
+  Clear();
+
+  // Apply update without checksum
+  {
+    _PrefixArray fArray;
+    PrefixStringMap fMap;
+
+    CreateRandomSortedPrefixArray(1000, 4, 32, fArray);
+    PrefixArrayToPrefixStringMap(fArray, fMap);
+
+    testFullUpdate(fMap, nullptr);
+
+    testOpenLookupCache();
+  }
+
+  Clear();
+}
+
+// This test is used to avoid an eror from nsICryptoHash
+TEST(UrlClassifierTableUpdateV4, ApplyUpdateWithFixedChecksum)
+{
+  _PrefixArray fArray = { _Prefix("enus"), _Prefix("apollo"), _Prefix("mars"),
+                          _Prefix("Hecatonchires cyclopes"),
+                          _Prefix("vesta"), _Prefix("neptunus"), _Prefix("jupiter"),
+                          _Prefix("diana"), _Prefix("minerva"), _Prefix("ceres"),
+                          _Prefix("Aidos,Adephagia,Adikia,Aletheia"),
+                          _Prefix("hecatonchires"), _Prefix("alcyoneus"), _Prefix("hades"),
+                          _Prefix("vulcanus"), _Prefix("juno"), _Prefix("mercury"),
+                          _Prefix("Stheno, Euryale and Medusa")
+                        };
+  fArray.Sort();
+
+  PrefixStringMap fMap;
+  PrefixArrayToPrefixStringMap(fArray, fMap);
+
+  nsCString checksum("\xae\x18\x94\xd7\xd0\x83\x5f\xc1"
+                     "\x58\x59\x5c\x2c\x72\xb9\x6e\x5e"
+                     "\xf4\xe8\x0a\x6b\xff\x5e\x6b\x81"
+                     "\x65\x34\x06\x16\x06\x59\xa0\x67");
+
+  testFullUpdate(fMap, &checksum);
+
+  // Open lookup cache will load prefix set and verify the checksum
+  testOpenLookupCache();
+
+  Clear();
+}
+
--- a/toolkit/components/url-classifier/tests/unit/test_listmanager.js
+++ b/toolkit/components/url-classifier/tests/unit/test_listmanager.js
@@ -64,17 +64,17 @@ let gExpectedQueryV4 = "";
 let gHttpServV4 = null;
 
 // These two variables are used to synchronize the last two racing updates
 // (in terms of "update URL") in test_update_all_tables().
 let gUpdatedCntForTableData = 0; // For TEST_TABLE_DATA_LIST.
 let gIsV4Updated = false;   // For TEST_TABLE_DATA_V4.
 
 const NEW_CLIENT_STATE = 'sta\0te';
-const CHECKSUM = 'check\0sum';
+const CHECKSUM = '\x30\x67\xc7\x2c\x5e\x50\x1c\x31\xe3\xfe\xca\x73\xf0\x47\xdc\x34\x1a\x95\x63\x99\xec\x70\x5e\x0a\xee\x9e\xfb\x17\xa1\x55\x35\x78';
 
 prefBranch.setBoolPref("browser.safebrowsing.debug", true);
 
 // The "\xFF\xFF" is to generate a base64 string with "/".
 prefBranch.setCharPref("browser.safebrowsing.id", "Firefox\xFF\xFF");
 
 // Register tables.
 TEST_TABLE_DATA_LIST.forEach(function(t) {
@@ -257,24 +257,24 @@ function run_test() {
 
     // The protobuf binary represention of response:
     //
     // [
     //   {
     //     'threat_type': 2, // SOCIAL_ENGINEERING_PUBLIC
     //     'response_type': 2, // FULL_UPDATE
     //     'new_client_state': 'sta\x00te', // NEW_CLIENT_STATE
-    //     'checksum': { "sha256": 'check\x00sum' }, // CHECKSUM
+    //     'checksum': { "sha256": CHECKSUM }, // CHECKSUM
     //     'additions': { 'compression_type': RAW,
     //                    'prefix_size': 4,
     //                    'raw_hashes': "00000001000000020000000300000004"}
     //   }
     // ]
     //
-    let content = "\x0A\x33\x08\x02\x20\x02\x2A\x18\x08\x01\x12\x14\x08\x04\x12\x10\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03\x3A\x06\x73\x74\x61\x00\x74\x65\x42\x0B\x0A\x09\x63\x68\x65\x63\x6B\x00\x73\x75\x6D\x12\x08\x08\x08\x10\x80\x94\xEB\xDC\x03";
+    let content = "\x0A\x4A\x08\x02\x20\x02\x2A\x18\x08\x01\x12\x14\x08\x04\x12\x10\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03\x3A\x06\x73\x74\x61\x00\x74\x65\x42\x22\x0A\x20\x30\x67\xC7\x2C\x5E\x50\x1C\x31\xE3\xFE\xCA\x73\xF0\x47\xDC\x34\x1A\x95\x63\x99\xEC\x70\x5E\x0A\xEE\x9E\xFB\x17\xA1\x55\x35\x78\x12\x08\x08\x08\x10\x80\x94\xEB\xDC\x03";
 
     response.bodyOutputStream.write(content, content.length);
 
     if (gIsV4Updated) {
       // This falls to the case where test_partialUpdateV4 is running.
       // We are supposed to have verified the update request contains
       // the state we set in the previous request.
       run_next_test();