Bug 1509962: [update-verify] Import update-verify and final-verify code from build-tools; r=mtabara
authorTom Prince <mozilla@hocat.ca>
Mon, 03 Dec 2018 06:44:16 +0000
changeset 505628 3ed6bf5ca77217e5884de76be8e66d8cf880b251
parent 505627 9be51673e21f4e33ade4f897dc4e606f1d0568a4
child 505629 4c4352291d629aaaaf39718c3cea57870db048d1
push id10290
push userffxbld-merge
push dateMon, 03 Dec 2018 16:23:23 +0000
treeherdermozilla-beta@700bed2445e6 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersmtabara
bugs1509962
milestone65.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1509962: [update-verify] Import update-verify and final-verify code from build-tools; r=mtabara Differential Revision: https://phabricator.services.mozilla.com/D12976
tools/moz.build
tools/update-verify/python/util/__init__.py
tools/update-verify/python/util/commands.py
tools/update-verify/release/README.txt
tools/update-verify/release/common/cached_download.sh
tools/update-verify/release/common/check_updates.sh
tools/update-verify/release/common/download_builds.sh
tools/update-verify/release/common/download_mars.sh
tools/update-verify/release/common/installdmg.ex
tools/update-verify/release/common/unpack-diskimage.sh
tools/update-verify/release/common/unpack.sh
tools/update-verify/release/compare-directories.py
tools/update-verify/release/final-verification.sh
tools/update-verify/release/get-update-xml.sh
tools/update-verify/release/mar_certs/README
tools/update-verify/release/mar_certs/dep1.der
tools/update-verify/release/mar_certs/dep2.der
tools/update-verify/release/mar_certs/nightly_aurora_level3_primary.der
tools/update-verify/release/mar_certs/nightly_aurora_level3_secondary.der
tools/update-verify/release/mar_certs/release_primary.der
tools/update-verify/release/mar_certs/release_secondary.der
tools/update-verify/release/mar_certs/xpcshellCertificate.der
tools/update-verify/release/replace-updater-certs.py
tools/update-verify/release/test-mar-url.sh
tools/update-verify/release/updates/verify.sh
tools/update-verify/scripts/chunked-verify.py
tools/update-verify/scripts/chunked-verify.sh
--- a/tools/moz.build
+++ b/tools/moz.build
@@ -42,16 +42,19 @@ with Files("tryselect/**"):
     BUG_COMPONENT = ("Testing", "General")
 
 with Files("tryselect/selectors/release.py"):
     BUG_COMPONENT = ("Release Engineering", "General")
 
 with Files("update-packaging/**"):
     BUG_COMPONENT = ("Release Engineering", "Other")
 
+with Files("update-verify/**"):
+    BUG_COMPONENT = ("Release Engineering", "Release Automation: Updates")
+
 SPHINX_TREES['lint'] = 'lint/docs'
 
 with Files('lint/docs/**'):
     SCHEDULES.exclusive = ['docs']
 
 SPHINX_TREES['compare-locales'] = 'compare-locales/docs'
 
 with Files('compare-locales/docs/**'):
new file mode 100644
new file mode 100644
--- /dev/null
+++ b/tools/update-verify/python/util/commands.py
@@ -0,0 +1,49 @@
+"""Functions for running commands"""
+import subprocess
+import os
+import time
+import logging
+log = logging.getLogger(__name__)
+
+
+# timeout message, used in TRANSIENT_HG_ERRORS and in tests.
+TERMINATED_PROCESS_MSG = "timeout, process terminated"
+
+
+def log_cmd(cmd, **kwargs):
+    # cwd is special in that we always want it printed, even if it's not
+    # explicitly chosen
+    kwargs = kwargs.copy()
+    if 'cwd' not in kwargs:
+        kwargs['cwd'] = os.getcwd()
+    log.info("command: START")
+    log.info("command: %s" % subprocess.list2cmdline(cmd))
+    for key, value in kwargs.iteritems():
+        log.info("command: %s: %s", key, str(value))
+
+
+def merge_env(env):
+    new_env = os.environ.copy()
+    new_env.update(env)
+    return new_env
+
+
+def run_cmd(cmd, **kwargs):
+    """Run cmd (a list of arguments).  Raise subprocess.CalledProcessError if
+    the command exits with non-zero.  If the command returns successfully,
+    return 0."""
+    log_cmd(cmd, **kwargs)
+    # We update this after logging because we don't want all of the inherited
+    # env vars muddling up the output
+    if 'env' in kwargs:
+        kwargs['env'] = merge_env(kwargs['env'])
+    try:
+        t = time.time()
+        log.info("command: output:")
+        return subprocess.check_call(cmd, **kwargs)
+    except subprocess.CalledProcessError:
+        log.info('command: ERROR', exc_info=True)
+        raise
+    finally:
+        elapsed = time.time() - t
+        log.info("command: END (%.2fs elapsed)\n", elapsed)
new file mode 100644
--- /dev/null
+++ b/tools/update-verify/release/README.txt
@@ -0,0 +1,46 @@
+Mozilla Build Verification Scripts
+---
+
+--
+Contents
+--
+
+updates -> AUS and update verification
+l10n    -> l10n vs. en-US verification
+common  -> useful utility scripts
+
+--
+Update verification
+--
+
+verify.sh
+  does a low-level check of all advertised MAR files. Expects to have a
+  file named all-locales, but does not (yet) handle platform exceptions, so 
+  these should be removed from the locales file.
+
+  prints errors on both STDOUT and STDIN, the intention is to run the
+  script with STDOUT redirected to an output log. If there is not output
+  on the console and an exit code of 0 then all tests pass; otherwise one
+  or more tests failed.
+
+  Does the following:
+
+  1) download update.xml from AUS for a particular release
+  2) download the partial and full mar advertised
+  3) check that the partial and full match the advertised size and sha1sum
+  4) downloads the latest release, and an older release
+  5) applies MAR to the older release, and compares the two releases.
+  
+  Step 5 is repeated for both the complete and partial MAR.
+
+  Expects to have an updates.cfg file, describing all releases to try updating 
+  from.
+
+-
+Valid Platforms for AUS
+-
+Linux_x86-gcc3
+Darwin_Universal-gcc3
+Linux_x86-gcc3
+WINNT_x86-msvc
+Darwin_ppc-gcc3
new file mode 100644
--- /dev/null
+++ b/tools/update-verify/release/common/cached_download.sh
@@ -0,0 +1,42 @@
+# this library works like a wrapper around wget, to allow downloads to be cached
+# so that if later the same url is retrieved, the entry from the cache will be
+# returned.
+
+pushd `dirname $0` &>/dev/null
+cache_dir="$(pwd)/cache"
+popd &>/dev/null
+
+# to clear the entire cache, recommended at beginning and end of scripts that call it
+clear_cache () {
+    rm -rf "${cache_dir}"
+}
+
+# creates an empty cache, should be called once before downloading anything
+function create_cache () {
+    mkdir "${cache_dir}"
+    touch "${cache_dir}/urls.list"
+}
+
+# download method - you pass a filename to save the file under, and the url to call
+cached_download () {
+    local output_file="${1}"
+    local url="${2}"
+
+    if fgrep -x "${url}" "${cache_dir}/urls.list" >/dev/null; then
+        echo "Retrieving '${url}' from cache..."
+        local line_number="$(fgrep -nx  "${url}" "${cache_dir}/urls.list" | sed 's/:.*//')"
+        cp "${cache_dir}/obj_$(printf "%05d\n" "${line_number}").cache" "${output_file}"
+    else
+        echo "Downloading '${url}' and placing in cache..."
+        rm -f "${output_file}"
+        $retry wget -O "${output_file}" --progress=dot:mega --server-response --no-check-certificate "${url}" 2>&1
+        local exit_code=$?
+        if [ "${exit_code}" == 0 ]; then
+            echo "${url}" >> "${cache_dir}/urls.list"
+            local line_number="$(fgrep -nx  "${url}" "${cache_dir}/urls.list" | sed 's/:.*//')"
+            cp "${output_file}" "${cache_dir}/obj_$(printf "%05d\n" "${line_number}").cache"
+        else
+            return "${exit_code}"
+        fi
+    fi
+}
new file mode 100755
--- /dev/null
+++ b/tools/update-verify/release/common/check_updates.sh
@@ -0,0 +1,114 @@
+check_updates () {
+  # called with 9 args - platform, source package, target package, update package, old updater boolean,
+  # a path to the updater binary to use for the tests, a file to write diffs to, the update channel,
+  # and (sometimes) update-settings.ini values
+  update_platform=$1
+  source_package=$2
+  target_package=$3
+  locale=$4
+  use_old_updater=$5
+  updater=$6
+  diff_file=$7
+  channel=$8
+  mar_channel_IDs=$9
+
+  # cleanup
+  rm -rf source/*
+  rm -rf target/*
+
+  unpack_build $update_platform source "$source_package" $locale '' $mar_channel_IDs
+  if [ "$?" != "0" ]; then
+    echo "FAILED: cannot unpack_build $update_platform source $source_package"
+    return 1
+  fi
+  unpack_build $update_platform target "$target_package" $locale 
+  if [ "$?" != "0" ]; then
+    echo "FAILED: cannot unpack_build $update_platform target $target_package"
+    return 1
+  fi
+  
+  case $update_platform in
+      Darwin_ppc-gcc | Darwin_Universal-gcc3 | Darwin_x86_64-gcc3 | Darwin_x86-gcc3-u-ppc-i386 | Darwin_x86-gcc3-u-i386-x86_64 | Darwin_x86_64-gcc3-u-i386-x86_64) 
+          platform_dirname="*.app"
+          ;;
+      WINNT*) 
+          platform_dirname="bin"
+          ;;
+      Linux_x86-gcc | Linux_x86-gcc3 | Linux_x86_64-gcc3) 
+          platform_dirname=`echo $product | tr '[A-Z]' '[a-z]'`
+          ;;
+  esac
+
+  if [ -f update/update.status ]; then rm update/update.status; fi
+  if [ -f update/update.log ]; then rm update/update.log; fi
+
+  if [ -d source/$platform_dirname ]; then
+    if [ `uname | cut -c-5` == "MINGW" ]; then
+      # windows
+      # change /c/path/to/pwd to c:\\path\\to\\pwd
+      four_backslash_pwd=$(echo $PWD | sed -e 's,^/\([a-zA-Z]\)/,\1:/,' | sed -e 's,/,\\\\,g')
+      two_backslash_pwd=$(echo $PWD | sed -e 's,^/\([a-zA-Z]\)/,\1:/,' | sed -e 's,/,\\,g')
+      cwd="$two_backslash_pwd\\source\\$platform_dirname"
+      update_abspath="$two_backslash_pwd\\update"
+    else
+      # not windows
+      # use ls here, because mac uses *.app, and we need to expand it
+      cwd=$(ls -d $PWD/source/$platform_dirname)
+      update_abspath="$PWD/update"
+    fi
+
+    cd_dir=$(ls -d ${PWD}/source/${platform_dirname})
+    cd "${cd_dir}"
+    set -x
+    "$updater" "$update_abspath" "$cwd" "$cwd" 0
+    set +x
+    cd ../..
+  else
+    echo "FAIL: no dir in source/$platform_dirname"
+    return 1
+  fi
+
+  cat update/update.log
+  update_status=`cat update/update.status`
+
+  if [ "$update_status" != "succeeded" ]
+  then
+    echo "FAIL: update status was not successful: $update_status"
+    return 1
+  fi
+
+  # If we were testing an OS X mar on Linux, the unpack step copied the
+  # precomplete file from Contents/Resources to the root of the install
+  # to ensure the Linux updater binary could find it. However, only the
+  # precomplete file in Contents/Resources was updated, which means
+  # the copied version in the root of the install will usually have some
+  # differences between the source and target. To prevent this false
+  # positive from failing the tests, we simply remove it before diffing.
+  # The precomplete file in Contents/Resources is still diffed, so we
+  # don't lose any coverage by doing this.
+  cd `echo "source/$platform_dirname"`
+  if [[ -f "Contents/Resources/precomplete" && -f "precomplete" ]]
+  then
+    rm "precomplete"
+  fi
+  cd ../..
+  cd `echo "target/$platform_dirname"`
+  if [[ -f "Contents/Resources/precomplete" && -f "precomplete" ]]
+  then
+    rm "precomplete"
+  fi
+  cd ../..
+
+  ../compare-directories.py source/${platform_dirname} target/${platform_dirname}  ${channel} > "${diff_file}"
+  diffErr=$?
+  cat "${diff_file}"
+  if [ $diffErr == 2 ]
+  then
+    echo "FAIL: differences found after update"
+    return 1
+  elif [ $diffErr != 0 ]
+  then
+    echo "FAIL: unknown error from diff: $diffErr"
+    return 3
+  fi
+}
new file mode 100755
--- /dev/null
+++ b/tools/update-verify/release/common/download_builds.sh
@@ -0,0 +1,36 @@
+pushd `dirname $0` &>/dev/null
+MY_DIR=$(pwd)
+popd &>/dev/null
+retry="$MY_DIR/../../buildfarm/utils/retry.py -s 1 -r 3"
+
+download_builds() {
+  # cleanup
+  mkdir -p downloads/
+  rm -rf downloads/*
+
+  source_url="$1"
+  target_url="$2"
+
+  if [ -z "$source_url" ] || [ -z "$target_url" ]
+  then
+    "download_builds usage: <source_url> <target_url>"
+    exit 1
+  fi
+
+  for url in "$source_url" "$target_url"
+    do
+    source_file=`basename "$url"`
+    if [ -f "$source_file" ]; then rm "$source_file"; fi
+    cd downloads 
+    if [ -f "$source_file" ]; then rm "$source_file"; fi
+    cached_download "${source_file}" "${url}"
+    status=$?
+    if [ $status != 0 ]; then
+      echo "FAIL: Could not download source $source_file from $url"
+      echo "skipping.."
+      cd ../
+      return $status
+    fi
+    cd ../
+  done
+}
new file mode 100755
--- /dev/null
+++ b/tools/update-verify/release/common/download_mars.sh
@@ -0,0 +1,105 @@
+download_mars () {
+    update_url="$1"
+    only="$2"
+    test_only="$3"
+    to_build_id="$4"
+    to_app_version="$5"
+    to_display_version="$6"
+
+    max_tries=5
+    try=1
+    # retrying until we get offered an update
+    while [ "$try" -le "$max_tries" ]; do
+        echo "Using  $update_url"
+        # retrying until AUS gives us any response at all
+        cached_download update.xml "${update_url}"
+
+        echo "Got this response:"
+        cat update.xml
+        # If the first line after <updates> is </updates> then we have an
+        # empty snippet. Otherwise we're done
+        if [ "$(grep -A1 '<updates>' update.xml | tail -1)" != "</updates>" ]; then
+            break;
+        fi
+        echo "Empty response, sleeping"
+        sleep 5
+        try=$(($try+1))
+    done
+
+    echo; echo;  # padding
+
+    update_line=`fgrep "<update " update.xml`
+    grep_rv=$?
+    if [ 0 -ne $grep_rv ]; then
+        echo "FAIL: no <update/> found for $update_url"
+        return 1
+    fi
+    command=`echo $update_line | sed -e 's/^.*<update //' -e 's:>.*$::' -e 's:\&amp;:\&:g'`
+    eval "export $command"
+
+    if [ ! -z "$to_build_id" -a "$buildID" != "$to_build_id" ]; then
+        echo "FAIL: expected buildID $to_build_id does not match actual $buildID"
+        return 1
+    fi
+
+    if [ ! -z "$to_display_version" -a "$displayVersion" != "$to_display_version" ]; then
+        echo "FAIL: expected displayVersion $to_display_version does not match actual $displayVersion"
+        return 1
+    fi
+
+    if [ ! -z "$to_app_version" -a "$appVersion" != "$to_app_version" ]; then
+        echo "FAIL: expected appVersion $to_app_version does not match actual $appVersion"
+        return 1
+    fi
+
+    mkdir -p update/
+    if [ -z $only ]; then
+      only="partial complete"
+    fi
+    for patch_type in $only
+      do
+      line=`fgrep "patch type=\"$patch_type" update.xml`
+      grep_rv=$?
+
+      if [ 0 -ne $grep_rv ]; then
+        echo "FAIL: no $patch_type update found for $update_url"
+        return 1
+      fi
+
+      command=`echo $line | sed -e 's/^.*<patch //' -e 's:/>.*$::' -e 's:\&amp;:\&:g'`
+      eval "export $command"
+
+      if [ "$test_only" == "1" ]
+      then
+        echo "Testing $URL"
+        curl -k -s -I -L $URL
+        return
+      else
+        cached_download "update/${patch_type}.mar" "${URL}"
+      fi
+      if [ "$?" != 0 ]; then
+        echo "Could not download $patch_type!"
+        echo "from: $URL"
+      fi
+      actual_size=`perl -e "printf \"%d\n\", (stat(\"update/$patch_type.mar\"))[7]"`
+      actual_hash=`openssl dgst -$hashFunction update/$patch_type.mar | sed -e 's/^.*= //'`
+
+      if [ $actual_size != $size ]; then
+          echo "FAIL: $patch_type from $update_url wrong size"
+          echo "FAIL: update.xml size: $size"
+          echo "FAIL: actual size: $actual_size"
+          return 1
+      fi
+
+      if [ $actual_hash != $hashValue ]; then
+          echo "FAIL: $patch_type from $update_url wrong hash"
+          echo "FAIL: update.xml hash: $hashValue"
+          echo "FAIL: actual hash: $actual_hash"
+          return 1
+      fi
+
+      cp update/$patch_type.mar update/update.mar
+      echo $actual_size > update/$patch_type.size
+
+    done
+}
new file mode 100755
--- /dev/null
+++ b/tools/update-verify/release/common/installdmg.ex
@@ -0,0 +1,45 @@
+#!/usr/bin/expect
+# ***** BEGIN LICENSE BLOCK *****
+# Version: MPL 1.1/GPL 2.0/LGPL 2.1
+#
+# The contents of this file are subject to the Mozilla Public License Version
+# 1.1 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+# http://www.mozilla.org/MPL/
+#
+# Software distributed under the License is distributed on an "AS IS" basis,
+# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
+# for the specific language governing rights and limitations under the
+# License.
+#
+# The Original Code is Mozilla Corporation Code.
+#
+# The Initial Developer of the Original Code is
+# Clint Talbert.
+# Portions created by the Initial Developer are Copyright (C) 2007
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#  Armen Zambrano Gasparnian <armenzg@mozilla.com>
+#  Axel Hecht <l10n@mozilla.com>
+#
+# Alternatively, the contents of this file may be used under the terms of
+# either the GNU General Public License Version 2 or later (the "GPL"), or
+# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
+# in which case the provisions of the GPL or the LGPL are applicable instead
+# of those above. If you wish to allow use of your version of this file only
+# under the terms of either the GPL or the LGPL, and not to allow others to
+# use your version of this file under the terms of the MPL, indicate your
+# decision by deleting the provisions above and replace them with the notice
+# and other provisions required by the GPL or the LGPL. If you do not delete
+# the provisions above, a recipient may use your version of this file under
+# the terms of any one of the MPL, the GPL or the LGPL.
+#
+# ***** END LICENSE BLOCK *****
+#send_user $argv
+spawn hdiutil attach -readonly -mountroot /tmp -private -noautoopen [lindex $argv 0]
+expect {
+"byte" {send "G"; exp_continue}
+"END" {send "\r"; exp_continue}
+"Y/N?" {send "Y\r"; exp_continue}
+}
new file mode 100755
--- /dev/null
+++ b/tools/update-verify/release/common/unpack-diskimage.sh
@@ -0,0 +1,95 @@
+#!/bin/bash
+# ***** BEGIN LICENSE BLOCK *****
+# Version: MPL 1.1/GPL 2.0/LGPL 2.1
+#
+# The contents of this file are subject to the Mozilla Public License Version
+# 1.1 (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+# http://www.mozilla.org/MPL/
+#
+# Software distributed under the License is distributed on an "AS IS" basis,
+# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
+# for the specific language governing rights and limitations under the
+# License.
+#
+# The Original Code is the installdmg.sh script from taols utilities
+#
+# The Initial Developer of the Original Code is
+# Mozilla Corporation.
+# Portions created by the Initial Developer are Copyright (C) 2009
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+#  Chris AtLee <catlee@mozilla.com>
+#  Robert Kaiser <kairo@kairo.at>
+#
+# Alternatively, the contents of this file may be used under the terms of
+# either the GNU General Public License Version 2 or later (the "GPL"), or
+# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
+# in which case the provisions of the GPL or the LGPL are applicable instead
+# of those above. If you wish to allow use of your version of this file only
+# under the terms of either the GPL or the LGPL, and not to allow others to
+# use your version of this file under the terms of the MPL, indicate your
+# decision by deleting the provisions above and replace them with the notice
+# and other provisions required by the GPL or the LGPL. If you do not delete
+# the provisions above, a recipient may use your version of this file under
+# the terms of any one of the MPL, the GPL or the LGPL.
+#
+# ***** END LICENSE BLOCK *****
+
+# Unpack a disk image to a specified target folder
+#
+# Usage: unpack-diskimage <image_file>
+#                         <mountpoint>
+#                         <target_path>
+
+DMG_PATH=$1
+MOUNTPOINT=$2
+TARGETPATH=$3
+LOGFILE=unpack.output
+
+# How long to wait before giving up waiting for the mount to fininsh
+TIMEOUT=90
+
+# If the mount point already exists, then the previous run may not have cleaned
+# up properly.  We should try to umount and remove the its directory.
+if [ -d $MOUNTPOINT ]; then
+    echo "$MOUNTPOINT already exists, trying to clean up"
+    hdiutil detach $MOUNTPOINT -force
+    rm -rdfv $MOUNTPOINT
+fi
+
+# Install an on-exit handler that will unmount and remove the '$MOUNTPOINT' directory
+trap "{ if [ -d $MOUNTPOINT ]; then hdiutil detach $MOUNTPOINT -force; rm -rdfv $MOUNTPOINT; fi; }" EXIT
+
+mkdir -p $MOUNTPOINT
+
+hdiutil attach -verbose -noautoopen -mountpoint $MOUNTPOINT "$DMG_PATH" &> $LOGFILE
+# Wait for files to show up
+# hdiutil uses a helper process, diskimages-helper, which isn't always done its
+# work by the time hdiutil exits.  So we wait until something shows up in the
+# mount point directory.
+i=0
+while [ "$(echo $MOUNTPOINT/*)" == "$MOUNTPOINT/*" ]; do
+    if [ $i -gt $TIMEOUT ]; then
+        echo "No files found, exiting"
+        exit 1
+    fi
+    sleep 1
+    i=$(expr $i + 1)
+done
+# Now we can copy everything out of the $MOUNTPOINT directory into the target directory
+rsync -av $MOUNTPOINT/* $MOUNTPOINT/.DS_Store $MOUNTPOINT/.background $MOUNTPOINT/.VolumeIcon.icns $TARGETPATH/ > $LOGFILE
+# sometimes hdiutil fails with "Resource busy"
+hdiutil detach $MOUNTPOINT || { sleep  10; \
+    if [ -d $MOUNTPOINT ]; then hdiutil detach $MOUNTPOINT -force; fi; }
+i=0
+while [ "$(echo $MOUNTPOINT/*)" != "$MOUNTPOINT/*" ]; do
+    if [ $i -gt $TIMEOUT ]; then
+        echo "Cannot umount, exiting"
+        exit 1
+    fi
+    sleep 1
+    i=$(expr $i + 1)
+done
+rm -rdf $MOUNTPOINT
new file mode 100755
--- /dev/null
+++ b/tools/update-verify/release/common/unpack.sh
@@ -0,0 +1,115 @@
+#!/bin/bash
+
+function cleanup() { 
+    hdiutil detach ${DEV_NAME} || 
+      { sleep 5 && hdiutil detach ${DEV_NAME} -force; }; 
+    return $1 && $?; 
+};
+
+unpack_build () {
+    unpack_platform="$1"
+    dir_name="$2"
+    pkg_file="$3"
+    locale=$4
+    unpack_jars=$5
+    update_settings_string=$6
+
+    if [ ! -f "$pkg_file" ]; then
+      return 1
+    fi 
+    mkdir -p $dir_name
+    pushd $dir_name > /dev/null
+    case $unpack_platform in
+        mac|mac64|mac-ppc|Darwin_ppc-gcc|Darwin_Universal-gcc3|Darwin_x86_64-gcc3|Darwin_x86-gcc3-u-ppc-i386|Darwin_x86-gcc3-u-i386-x86_64|Darwin_x86_64-gcc3-u-i386-x86_64)
+            os=`uname`
+            # How we unpack a dmg differs depending on which platform we're on.
+            if [[ "$os" == "Darwin" ]]
+            then
+                cd ../
+                echo "installing $pkg_file"
+                ../common/unpack-diskimage.sh "$pkg_file" mnt $dir_name
+            else
+                7z x ../"$pkg_file" > /dev/null
+                if [ `ls -1 | wc -l` -ne 1 ]
+                then
+                    echo "Couldn't find .app package"
+                    return 1
+                fi
+                unpack_dir=$(ls -1)
+                unpack_dir=$(ls -d "${unpack_dir}")
+                mv "${unpack_dir}"/*.app .
+                rm -rf "${unpack_dir}"
+                appdir=$(ls -1)
+                appdir=$(ls -d *.app)
+                # The updater guesses the location of these files based on
+                # its own target architecture, not the mar. If we're not
+                # unpacking mac-on-mac, we need to copy them so it can find
+                # them. It's important to copy (and not move), because when
+                # we diff the installer vs updated build afterwards, the
+                # installer version will have them in their original place.
+                cp "${appdir}/Contents/Resources/update-settings.ini" "${appdir}/update-settings.ini"
+                cp "${appdir}/Contents/Resources/precomplete" "${appdir}/precomplete"
+            fi
+            update_settings_file="${appdir}/update-settings.ini"
+            ;;
+        win32|win64|WINNT_x86-msvc|WINNT_x86-msvc-x86|WINNT_x86-msvc-x64|WINNT_x86_64-msvc|WINNT_x86_64-msvc-x64)
+            7z x ../"$pkg_file" > /dev/null
+            if [ -d localized ]
+            then
+              mkdir bin/
+              cp -rp nonlocalized/* bin/
+              cp -rp localized/*    bin/
+              rm -rf nonlocalized
+              rm -rf localized
+              if [ $(find optional/ | wc -l) -gt 1 ]
+              then 
+                cp -rp optional/*     bin/
+                rm -rf optional
+              fi
+            elif [ -d core ]
+            then
+              mkdir bin/
+              cp -rp core/* bin/
+              rm -rf core
+            else
+              for file in *.xpi
+              do
+                unzip -o $file > /dev/null
+              done
+              unzip -o ${locale}.xpi > /dev/null
+            fi
+            update_settings_file='bin/update-settings.ini'
+            ;;
+        linux-i686|linux-x86_64|linux|linux64|Linux_x86-gcc|Linux_x86-gcc3|Linux_x86_64-gcc3)
+            if `echo $pkg_file | grep -q "tar.gz"`
+            then
+                tar xfz ../"$pkg_file" > /dev/null
+            elif `echo $pkg_file | grep -q "tar.bz2"`
+            then
+                tar xfj ../"$pkg_file" > /dev/null
+            else
+                echo "Unknown package type for file: $pkg_file"
+                exit 1
+            fi
+            update_settings_file=`echo $product | tr '[A-Z]' '[a-z]'`'/update-settings.ini'
+            ;;
+    esac
+
+    if [ ! -z $unpack_jars ]; then
+        for f in `find . -name '*.jar' -o -name '*.ja'`; do
+            unzip -o "$f" -d "$f.dir" > /dev/null
+        done
+    fi
+
+    if [ ! -z $update_settings_string ]; then
+       echo "Modifying update-settings.ini"
+       cat  "${update_settings_file}" | sed -e "s/^ACCEPTED_MAR_CHANNEL_IDS.*/ACCEPTED_MAR_CHANNEL_IDS=${update_settings_string}/" > "${update_settings_file}.new"
+       diff -u "${update_settings_file}" "${update_settings_file}.new"
+       echo " "
+       rm "${update_settings_file}"
+       mv "${update_settings_file}.new" "${update_settings_file}"
+    fi
+
+    popd > /dev/null
+
+}
new file mode 100755
--- /dev/null
+++ b/tools/update-verify/release/compare-directories.py
@@ -0,0 +1,206 @@
+#! /usr/bin/env python
+
+import argparse
+import difflib
+import hashlib
+import logging
+import os
+import sys
+
+
+""" Define the transformations needed to make source + update == target
+
+Required:
+The files list describes the files which a transform may be used on.
+The 'side' is one of ('source', 'target') and defines where each transform is applied
+The 'channel_prefix' list controls which channels a transform may be used for, where a value of
+'beta' means all of beta, beta-localtest, beta-cdntest, etc.
+
+One or more:
+A 'deletion' specifies a start of line to match on, removing the whole line
+A 'substitution' is a list of full string to match and its replacement
+
+Future note - this may need to move into the tree to make staging releases or release-on-try easier
+"""
+TRANSFORMS = [
+    # channel-prefs.js
+    {
+        # preprocessor comments, eg //@line 6 "/builds/worker/workspace/...
+        # this can be removed once each channel has a watershed above 59.0b2 (from bug 1431342)
+        'files': ['defaults/pref/channel-prefs.js', 'Contents/Resources/defaults/pref/channel-prefs.js'],
+        'channel_prefix': ['aurora', 'beta', 'release', 'esr'],
+        'side': 'source',
+        'deletion': '//@line 6 "',
+    },
+    {
+        # updates from a beta to an RC build, the latter specifies the release channel
+        'files': ['defaults/pref/channel-prefs.js', 'Contents/Resources/defaults/pref/channel-prefs.js'],
+        'channel_prefix': ['beta'],
+        'side': 'target',
+        'substitution': [
+            'pref("app.update.channel", "release");\n',
+            'pref("app.update.channel", "beta");\n'
+        ],
+    },
+    {
+        # updates from an RC to a beta build
+        'files': ['defaults/pref/channel-prefs.js', 'Contents/Resources/defaults/pref/channel-prefs.js'],
+        'channel_prefix': ['beta'],
+        'side': 'source',
+        'substitution': [
+            'pref("app.update.channel", "release");\n',
+            'pref("app.update.channel", "beta");\n'
+        ],
+    },
+    # update-settings.ini
+    {
+        # updates from a beta to an RC build, the latter specifies the release channel
+        # on mac, we actually have both files. The second location is the real one but we copy to the first
+        # to run the linux64 updater
+        'files': ['update-settings.ini', 'Contents/Resources/update-settings.ini'],
+        'channel_prefix': ['beta'],
+        'side': 'target',
+        'substitution': [
+            'ACCEPTED_MAR_CHANNEL_IDS=firefox-mozilla-release\n',
+            'ACCEPTED_MAR_CHANNEL_IDS=firefox-mozilla-beta,firefox-mozilla-release\n'
+        ],
+    },
+    {
+        # updates from an RC to a beta build
+        # on mac, we only need to modify the legit file this time. unpack_build handles the copy for the updater in
+        # both source and target
+        'files': ['Contents/Resources/update-settings.ini'],
+        'channel_prefix': ['beta'],
+        'side': 'source',
+        'substitution': [
+            'ACCEPTED_MAR_CHANNEL_IDS=firefox-mozilla-release\n',
+            'ACCEPTED_MAR_CHANNEL_IDS=firefox-mozilla-beta,firefox-mozilla-release\n'
+        ],
+    },
+]
+
+
+
+def walk_dir(path):
+    all_files = []
+    all_dirs = []
+
+    for root, dirs, files in os.walk(path):
+        all_dirs.extend([os.path.join(root, d) for d in dirs])
+        all_files.extend([os.path.join(root, f) for f in files])
+
+    # trim off directory prefix for easier comparison
+    all_dirs = [d[len(path)+1:] for d in all_dirs]
+    all_files = [f[len(path)+1:] for f in all_files]
+
+    return all_dirs, all_files
+
+
+def compare_listings(source_list, target_list, label, source_dir, target_dir):
+    obj1 = set(source_list)
+    obj2 = set(target_list)
+    difference_found = False
+
+    left_diff = obj1 - obj2
+    if left_diff:
+        logging.error('{} only in {}:'.format(label, source_dir))
+        for d in sorted(left_diff):
+            logging.error('  {}'.format(d))
+        difference_found = True
+
+    right_diff = obj2 - obj1
+    if right_diff:
+        logging.error('{} only in {}:'.format(label, target_dir))
+        for d in sorted(right_diff):
+            logging.error('  {}'.format(d))
+        difference_found = True
+
+    return difference_found
+
+
+def hash_file(filename):
+    h = hashlib.sha256()
+    with open(filename, 'rb', buffering=0) as f:
+        for b in iter(lambda: f.read(128 * 1024), b''):
+            h.update(b)
+    return h.hexdigest()
+
+
+def compare_common_files(files, channel, source_dir, target_dir):
+    difference_found = False
+    for filename in files:
+        source_file = os.path.join(source_dir, filename)
+        target_file = os.path.join(target_dir, filename)
+
+        if os.stat(source_file).st_size != os.stat(target_file).st_size or \
+                hash_file(source_file) != hash_file(target_file):
+            logging.info('Difference found in {}'.format(filename))
+            file_contents = {
+                'source': open(source_file).readlines(),
+                'target': open(target_file).readlines(),
+            }
+
+            transforms = [t for t in TRANSFORMS if filename in t['files'] and
+                          channel.startswith(tuple(t['channel_prefix']))]
+            logging.debug('Got {} transform(s) to consider for {}'.format(len(transforms), filename))
+            for transform in transforms:
+                side = transform['side']
+
+                if 'deletion' in transform:
+                    d = transform['deletion']
+                    logging.debug('Trying deleting lines starting {} from {}'.format(d, side))
+                    file_contents[side] = [l for l in file_contents[side] if not l.startswith(d)]
+
+                if 'substitution' in transform:
+                    r = transform['substitution']
+                    logging.debug('Trying replacement for {} in {}'.format(r, side))
+                    file_contents[side] = [l.replace(r[0], r[1]) for l in file_contents[side]]
+
+                if file_contents['source'] == file_contents['target']:
+                    logging.info('Transforms removed all differences')
+                    break
+
+            if file_contents['source'] != file_contents['target']:
+                difference_found = True
+                logging.error('{} still differs after transforms, residual diff:'.format(filename))
+                for l in difflib.unified_diff(file_contents['source'], file_contents['target']):
+                    logging.error(l.rstrip())
+
+    return difference_found
+
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser('Compare two directories recursively, with transformations for expected diffs')
+    parser.add_argument('source', help='Directory containing updated Firefox')
+    parser.add_argument('target', help='Directory containing expected Firefox')
+    parser.add_argument('channel', help='Update channel used')
+    parser.add_argument('--verbose', '-v', action='store_true', help='Enable verbose logging')
+
+    args = parser.parse_args()
+    level = logging.INFO
+    if args.verbose:
+        level = logging.DEBUG
+    logging.basicConfig(level=level, format='%(message)s', stream=sys.stdout)
+
+    source = args.source
+    target = args.target
+    if not os.path.exists(source) or not os.path.exists(target):
+        logging.error("Source and/or target directory doesn't exist")
+        sys.exit(3)
+
+    logging.info('Comparing {} with {}...'.format(source, target))
+    source_dirs, source_files = walk_dir(source)
+    target_dirs, target_files = walk_dir(target)
+
+    dir_list_diff = compare_listings(source_dirs, target_dirs, 'Directories', source, target)
+    file_list_diff = compare_listings(source_files, target_files, 'Files', source, target)
+    file_diff = compare_common_files(set(source_files) & set(target_files), args.channel, source, target)
+
+    if file_diff:
+        # Use status of 2 since python will use 1 if there is an error running the script
+        sys.exit(2)
+    elif dir_list_diff or file_list_diff:
+        # this has traditionally been a WARN, but we don't have files on one side anymore so lets FAIL
+        sys.exit(2)
+    else:
+        logging.info('No differences found')
new file mode 100755
--- /dev/null
+++ b/tools/update-verify/release/final-verification.sh
@@ -0,0 +1,519 @@
+#!/bin/bash
+
+function usage {
+    log "In the updates subdirectory of the directory this script is in,"
+    log "there are a bunch of config files. You should call this script,"
+    log "passing the names of one or more of those files as parameters"
+    log "to this script."
+    log ""
+    log "This will validate that the update.xml files all exist for the"
+    log "given config file, and that they report the correct file sizes"
+    log "for the associated mar files, and that the associated mar files"
+    log "are available on the update servers."
+    log ""
+    log "This script will spawn multiple curl processes to query the"
+    log "snippets (update.xml file downloads) and the download urls in"
+    log "parallel. The number of parallel curl processes can be managed"
+    log "with the -p MAX_PROCS option."
+    log ""
+    log "Only the first three bytes of the mar files are downloaded"
+    log "using curl -r 0-2 option to save time. GET requests are issued"
+    log "rather than HEAD requests, since Akamai (one of our CDN"
+    log "partners) caches GET and HEAD requests separately - therefore"
+    log "they can be out-of-sync, and it is important that we validate"
+    log "that the GET requests return the expected results."
+    log ""
+    log "Please note this script can run on linux and OS X. It has not"
+    log "been tested on Windows, but may also work. It can be run"
+    log "locally, and does not require access to the mozilla vpn or"
+    log "any other special network, since the update servers are"
+    log "available over the internet. However, it does require an"
+    log "up-to-date checkout of the tools repository, as the updates/"
+    log "subfolder changes over time, and reflects the currently"
+    log "available updates. It makes no changes to the update servers"
+    log "so there is no harm in running it. It simply generates a"
+    log "report. However, please try to avoid hammering the update"
+    log "servers aggressively, e.g. with thousands of parallel"
+    log "processes. For example, feel free to run the examples below,"
+    log "first making sure that your source code checkout is up-to-"
+    log "date on your own machine, to get the latest configs in the"
+    log "updates/ subdirectory."
+    log ""
+    log "Usage:"
+    log "    $(basename "${0}") [-p MAX_PROCS] config1 [config2 config3 config4 ...]"
+    log "    $(basename "${0}") -h"
+    log ""
+    log "Examples:"
+    log "    1. $(basename "${0}") -p 128 mozBeta-thunderbird-linux.cfg mozBeta-thunderbird-linux64.cfg"
+    log "    2. $(basename "${0}") mozBeta-thunderbird-linux64.cfg"
+}
+
+function log {
+    echo "$(date):  ${1}"
+}
+
+# subprocesses don't log in real time, due to synchronisation
+# issues which can cause log entries to overwrite each other.
+# therefore this function outputs log entries written to
+# temporary files on disk, and then deletes them.
+function flush_logs {
+    ls -1rt "${TMPDIR}" | grep '^log\.' | while read LOG
+    do
+        cat "${TMPDIR}/${LOG}"
+        rm "${TMPDIR}/${LOG}"
+    done
+}
+
+# this function takes an update.xml url as an argument
+# and then logs a list of config files and their line
+# numbers, that led to this update.xml url being tested
+function show_cfg_file_entries {
+    local update_xml_url="${1}"
+    cat "${update_xml_urls}" | cut -f1 -d' ' | grep -Fn "${update_xml_url}" | sed 's/:.*//' | while read match_line_no
+    do
+        cfg_file="$(sed -n -e "${match_line_no}p" "${update_xml_urls}" | cut -f3 -d' ')"
+        cfg_line_no="$(sed -n -e "${match_line_no}p" "${update_xml_urls}" | cut -f4 -d' ')"
+        log "        ${cfg_file} line ${cfg_line_no}: $(sed -n -e "${cfg_line_no}p" "${cfg_file}")"
+    done
+}
+
+# this function takes a mar url as an argument and then
+# logs information about which update.xml urls referenced
+# this mar url, and which config files referenced those
+# mar urls - so you have a full understanding of why this
+# mar url was ever tested
+function show_update_xml_entries {
+    local mar_url="${1}"
+    grep -Frl "${mar_url}" "${TMPDIR}" | grep '/update_xml_to_mar\.' | while read update_xml_to_mar
+    do
+        mar_size="$(cat "${update_xml_to_mar}" | cut -f2 -d' ')"
+        update_xml_url="$(cat "${update_xml_to_mar}" | cut -f3 -d' ')"
+        patch_type="$(cat "${update_xml_to_mar}" | cut -f4 -d' ')"
+        update_xml_actual_url="$(cat "${update_xml_to_mar}" | cut -f5 -d' ')"
+        log "        ${update_xml_url}"
+        [ -n "${update_xml_actual_url}" ] && log "            which redirected to: ${update_xml_actual_url}"
+        log "            This contained an entry for:"
+        log "                patch type: ${patch_type}"
+        log "                mar size: ${mar_size}"
+        log "                mar url: ${mar_url}"
+        log "            The update.xml url above was retrieved because of the following cfg file entries:"
+        show_cfg_file_entries "${update_xml_url}" | sed 's/        /                /'
+    done
+}
+
+echo -n "$(date):  Command called:"
+for ((INDEX=0; INDEX<=$#; INDEX+=1))
+do
+    echo -n " '${!INDEX}'"
+done
+echo ''
+log "From directory: '$(pwd)'"
+log ''
+log "Parsing arguments..."
+
+# Max procs lowered in bug 894368 to try to avoid spurious failures
+MAX_PROCS=48
+BAD_ARG=0
+BAD_FILE=0
+while getopts p:h OPT
+do
+    case "${OPT}" in
+        p) MAX_PROCS="${OPTARG}";;
+        h) usage
+           exit;;
+        *) BAD_ARG=1;;
+    esac
+done
+shift "$((OPTIND - 1))"
+
+# invalid option specified
+[ "${BAD_ARG}" == 1 ] && exit 66
+
+log "Checking one or more config files have been specified..."
+if [ $# -lt 1 ]
+then
+    usage
+    log "ERROR: You must specify one or more config files"
+    exit 64
+fi
+
+log "Checking whether MAX_PROCS is a number..."
+if ! let x=MAX_PROCS 2>/dev/null
+then
+    usage
+    log "ERROR: MAX_PROCS must be a number (-p option); you specified '${MAX_PROCS}' - this is not a number."
+    exit 65
+fi
+
+# config files are in updates subdirectory below this script
+if ! cd "$(dirname "${0}")/updates" 2>/dev/null
+then
+    log "ERROR: Cannot cd into '$(dirname "${0}")/updates' from '$(pwd)'"
+    exit 68
+fi
+
+log "Checking specified config files (and downloading them if necessary):"
+log ''
+configs=()
+for file in "${@}"
+do
+    if [[ ${file} == http* ]]
+    then
+        log "  Downloading config file '${file}'"
+        cfg=$(mktemp)
+        curl -fL "${file}" > "$cfg"
+        if [ "$?" != 0 ]; then
+            log "Error downloading config file '${file}'"
+            BAD_FILE=1
+        else
+            log "  * '${file}' ok, downloaded to '${cfg}'"
+            configs+=($cfg)
+        fi
+    elif [ -f "${file}" ]
+    then
+        log "  * '${file}' ok"
+        configs+=(${file})
+    else
+        log "  * '${file}' missing"
+        BAD_FILE=1
+    fi
+done
+log ''
+
+# invalid config specified
+if [ "${BAD_FILE}" == 1 ]
+then
+    log "ERROR: Unable to download config file(s) or config files are missing from repo - see above"
+    exit 67
+fi
+
+log "All checks completed successfully."
+log ''
+log "Starting stopwatch..."
+log ''
+log "Please be aware output will now be buffered up, and only displayed after completion."
+log "Therefore do not be alarmed if you see no output for several minutes."
+log "See https://bugzilla.mozilla.org/show_bug.cgi?id=863602#c5 for details".
+log ''
+
+START_TIME="$(date +%s)"
+
+# Create a temporary directory for all temp files, that can easily be
+# deleted afterwards. See https://bugzilla.mozilla.org/show_bug.cgi?id=863602
+# to understand why we write everything in distinct temporary files rather
+# than writing to standard error/standard out or files shared across
+# processes.
+# Need to unset TMPDIR first since it affects mktemp behaviour on next line
+unset TMPDIR
+export TMPDIR="$(mktemp -d -t final_verification.XXXXXXXXXX)"
+
+# this temporary file will list all update urls that need to be checked, in this format:
+# <update url> <comma separated list of patch types> <cfg file that requests it> <line number of config file>
+# e.g.
+# https://aus4.mozilla.org/update/3/Firefox/18.0/20130104154748/Linux_x86_64-gcc3/zh-TW/releasetest/default/default/default/update.xml?force=1 complete moz20-firefox-linux64-major.cfg 3
+# https://aus4.mozilla.org/update/3/Firefox/18.0/20130104154748/Linux_x86_64-gcc3/zu/releasetest/default/default/default/update.xml?force=1 complete moz20-firefox-linux64.cfg 7
+# https://aus4.mozilla.org/update/3/Firefox/19.0/20130215130331/Linux_x86_64-gcc3/ach/releasetest/default/default/default/update.xml?force=1 complete,partial moz20-firefox-linux64-major.cfg 11
+# https://aus4.mozilla.org/update/3/Firefox/19.0/20130215130331/Linux_x86_64-gcc3/af/releasetest/default/default/default/update.xml?force=1 complete,partial moz20-firefox-linux64.cfg 17
+update_xml_urls="$(mktemp -t update_xml_urls.XXXXXXXXXX)"
+
+####################################################################################
+# And now a summary of all temp files that will get generated during this process...
+#
+# 1) mktemp -t failure.XXXXXXXXXX
+#
+# Each failure will generate a one line temp file, which is a space separated
+# output of the error code, and the instance data for the failure.
+# e.g.
+#
+# PATCH_TYPE_MISSING https://aus4.mozilla.org/update/3/Firefox/4.0b12/20110222205441/Linux_x86-gcc3/dummy-locale/releasetest/update.xml?force=1 complete https://aus4.mozilla.org/update/3/Firefox/4.0b12/20110222205441/Linux_x86-gcc3/dummy-locale/releasetest/default/default/default/update.xml?force=1
+#
+# 2) mktemp -t update_xml_to_mar.XXXXXXXXXX
+#
+# For each mar url referenced in an update.xml file, a temp file will be created to store the
+# association between update.xml url and mar url. This is later used (e.g. in function
+# show_update_xml_entries) to trace back the update.xml url(s) that led to a mar url being
+# tested. It is also used to keep a full list of mar urls to test.
+# e.g.
+#
+# <mar url> <mar size> <update.xml url> <patch type> <update.xml redirection url, if HTTP 302 returned>
+#
+# 3) mktemp -t log.XXXXXXXXXX
+#
+# For each log message logged by a subprocesses, we will create a temp log file with the
+# contents of the log message, since we cannot safely output the log message from the subprocess
+# and guarantee that it will be correctly output. By buffering log output in individual log files
+# we guarantee that log messages will not interfere with each other. We then flush them when all
+# forked subprocesses have completed.
+#
+# 4) mktemp -t mar_headers.XXXXXXXXXX
+#
+# We keep a copy of the mar url http headers retrieved in one file per mar url.
+#
+# 5) mktemp -t update.xml.headers.XXXXXXXXXX
+#
+# We keep a copy of the update.xml http headers retrieved in one file per update.xml url.
+#
+# 6) mktemp -t update.xml.XXXXXXXXXX
+#
+# We keep a copy of each update.xml file retrieved in individual files.
+####################################################################################
+
+
+# generate full list of update.xml urls, followed by patch types,
+# as defined in the specified config files - and write into "${update_xml_urls}" file
+aus_server="https://aus5.mozilla.org"
+for cfg_file in "${configs[@]}"
+do
+    line_no=0
+    sed -e 's/localtest/cdntest/' "${cfg_file}" | while read config_line
+    do
+        let line_no++
+        # to avoid contamination between iterations, reset variables
+        # each loop in case they are not declared
+        # aus_server is not "cleared" each iteration - to be consistent with previous behaviour of old
+        # final-verification.sh script - might be worth reviewing if we really want this behaviour
+        release="" product="" platform="" build_id="" locales="" channel="" from="" patch_types="complete"
+        eval "${config_line}"
+        for locale in ${locales}
+        do
+            echo "${aus_server}/update/3/$product/$release/$build_id/$platform/$locale/$channel/default/default/default/update.xml?force=1" "${patch_types// /,}" "${cfg_file}" "${line_no}"
+        done
+    done
+done > "${update_xml_urls}"
+
+# download update.xml files and grab the mar urls from downloaded file for each patch type required
+cat "${update_xml_urls}" | cut -f1-2 -d' ' | sort -u | xargs -n2 "-P${MAX_PROCS}" ../get-update-xml.sh
+if [ "$?" != 0 ]; then
+    flush_logs
+    log "Error generating update requests"
+    exit 70
+fi
+
+flush_logs
+
+# download http header for each mar url
+find "${TMPDIR}" -name 'update_xml_to_mar.*' -type f | xargs cat | cut -f1-2 -d' ' | sort -u | xargs -n2 "-P${MAX_PROCS}" ../test-mar-url.sh
+if [ "$?" != 0 ]; then
+    flush_logs
+    log "Error HEADing mar urls"
+    exit 71
+fi
+
+flush_logs
+
+log ''
+log 'Stopping stopwatch...'
+STOP_TIME="$(date +%s)"
+
+number_of_failures="$(find "${TMPDIR}" -name 'failure.*' -type f | wc -l | sed 's/ //g')"
+number_of_update_xml_urls="$(cat "${update_xml_urls}" | cut -f1 -d' ' | sort -u | wc -l | sed 's/ //g')"
+number_of_mar_urls="$(find "${TMPDIR}" -name "update_xml_to_mar.*" | xargs cat | cut -f1 -d' ' | sort -u | wc -l | sed 's/ //g')"
+
+if [ "${number_of_failures}" -eq 0 ]
+then
+    log
+    log "All tests passed successfully."
+    log
+    exit_code=0
+else
+    log ''
+    log '===================================='
+    [ "${number_of_failures}" -gt 1 ] && log "${number_of_failures} FAILURES" || log '1 FAILURE'
+    failure=0
+    ls -1tr "${TMPDIR}" | grep '^failure\.' | while read failure_file
+    do
+        while read failure_code entry1 entry2 entry3 entry4 entry5 entry6 entry7
+        do
+            log '===================================='
+            log ''
+            case "${failure_code}" in
+
+                UPDATE_XML_UNAVAILABLE)
+                    update_xml_url="${entry1}"
+                    update_xml="${entry2}"
+                    update_xml_headers="${entry3}"
+                    update_xml_debug="${entry4}"
+                    update_xml_curl_exit_code="${entry5}"
+                    log "FAILURE $((++failure)): Update xml file not available"
+                    log ""
+                    log "    Download url: ${update_xml_url}"
+                    log "    Curl returned exit code: ${update_xml_curl_exit_code}"
+                    log ""
+                    log "    The HTTP headers were:"
+                    sed -e "s/$(printf '\r')//" -e "s/^/$(date):          /" -e '$a\' "${update_xml_headers}"
+                    log ""
+                    log "    The full curl debug output was:"
+                    sed -e "s/$(printf '\r')//" -e "s/^/$(date):          /" -e '$a\' "${update_xml_debug}"
+                    log ""
+                    log "    The returned update.xml file was:"
+                    sed -e "s/$(printf '\r')//" -e "s/^/$(date):          /" -e '$a\' "${update_xml}"
+                    log ""
+                    log "    This url was tested because of the following cfg file entries:"
+                    show_cfg_file_entries "${update_xml_url}"
+                    log ""
+
+                    ;;
+
+                UPDATE_XML_REDIRECT_FAILED)
+                    update_xml_url="${entry1}"
+                    update_xml_actual_url="${entry2}"
+                    update_xml="${entry3}"
+                    update_xml_headers="${entry4}"
+                    update_xml_debug="${entry5}"
+                    update_xml_curl_exit_code="${entry6}"
+                    log "FAILURE $((++failure)): Update xml file not available at *redirected* location"
+                    log ""
+                    log "    Download url: ${update_xml_url}"
+                    log "    Redirected to: ${update_xml_actual_url}"
+                    log "    It could not be downloaded from this url."
+                    log "    Curl returned exit code: ${update_xml_curl_exit_code}"
+                    log ""
+                    log "    The HTTP headers were:"
+                    sed -e "s/$(printf '\r')//" -e "s/^/$(date):          /" -e '$a\' "${update_xml_headers}"
+                    log ""
+                    log "    The full curl debug output was:"
+                    sed -e "s/$(printf '\r')//" -e "s/^/$(date):          /" -e '$a\' "${update_xml_debug}"
+                    log ""
+                    log "    The returned update.xml file was:"
+                    sed -e "s/$(printf '\r')//" -e "s/^/$(date):          /" -e '$a\' "${update_xml}"
+                    log ""
+                    log "    This url was tested because of the following cfg file entries:"
+                    show_cfg_file_entries "${update_xml_url}"
+                    log ""
+                    ;;
+
+                PATCH_TYPE_MISSING)
+                    update_xml_url="${entry1}"
+                    patch_type="${entry2}"
+                    update_xml="${entry3}"
+                    update_xml_headers="${entry4}"
+                    update_xml_debug="${entry5}"
+                    update_xml_actual_url="${entry6}"
+                    log "FAILURE $((++failure)): Patch type '${patch_type}' not present in the downloaded update.xml file."
+                    log ""
+                    log "    Update xml file downloaded from: ${update_xml_url}"
+                    [ -n "${update_xml_actual_url}" ] && log "    This redirected to the download url: ${update_xml_actual_url}"
+                    log "    Curl returned exit code: 0 (success)"
+                    log ""
+                    log "    The HTTP headers were:"
+                    sed -e "s/$(printf '\r')//" -e "s/^/$(date):          /" -e '$a\' "${update_xml_headers}"
+                    log ""
+                    log "    The full curl debug output was:"
+                    sed -e "s/$(printf '\r')//" -e "s/^/$(date):          /" -e '$a\' "${update_xml_debug}"
+                    log ""
+                    log "    The returned update.xml file was:"
+                    sed -e "s/$(printf '\r')//" -e "s/^/$(date):          /" -e '$a\' "${update_xml}"
+                    log ""
+                    log "    This url and patch type combination was tested due to the following cfg file entries:"
+                    show_cfg_file_entries "${update_xml_url}"
+                    log ""
+                    ;;
+
+                NO_MAR_FILE)
+                    mar_url="${entry1}"
+                    mar_headers_file="${entry2}"
+                    mar_headers_debug_file="${entry3}"
+                    mar_file_curl_exit_code="${entry4}"
+                    mar_actual_url="${entry5}"
+                    log "FAILURE $((++failure)): Could not retrieve mar file"
+                    log ""
+                    log "    Mar file url: ${mar_url}"
+                    [ -n "${mar_actual_url}" ] && log "    This redirected to: ${mar_actual_url}"
+                    log "    The mar file could not be downloaded from this location."
+                    log "    Curl returned exit code: ${mar_file_curl_exit_code}"
+                    log ""
+                    log "    The HTTP headers were:"
+                    sed -e "s/$(printf '\r')//" -e "s/^/$(date):          /" -e '$a\' "${mar_headers_file}"
+                    log ""
+                    log "    The full curl debug output was:"
+                    sed -e "s/$(printf '\r')//" -e "s/^/$(date):          /" -e '$a\' "${mar_headers_debug_file}"
+                    log ""
+                    log "    The mar download was tested because it was referenced in the following update xml file(s):"
+                    show_update_xml_entries "${mar_url}"
+                    log ""
+                    ;;
+
+                MAR_FILE_WRONG_SIZE)
+                    mar_url="${entry1}"
+                    mar_required_size="${entry2}"
+                    mar_actual_size="${entry3}"
+                    mar_headers_file="${entry4}"
+                    mar_headers_debug_file="${entry5}"
+                    mar_file_curl_exit_code="${entry6}"
+                    mar_actual_url="${entry7}"
+                    log "FAILURE $((++failure)): Mar file is wrong size"
+                    log ""
+                    log "    Mar file url: ${mar_url}"
+                    [ -n "${mar_actual_url}" ] && log "    This redirected to: ${mar_actual_url}"
+                    log "    The http header of the mar file url says that the mar file is ${mar_actual_size} bytes."
+                    log "    One or more of the following update.xml file(s) says that the file should be ${mar_required_size} bytes."
+                    log ""
+                    log "    These are the update xml file(s) that referenced this mar:"
+                    show_update_xml_entries "${mar_url}"
+                    log ""
+                    log "    Curl returned exit code: ${mar_file_curl_exit_code}"
+                    log ""
+                    log "    The HTTP headers were:"
+                    sed -e "s/$(printf '\r')//" -e "s/^/$(date):          /" -e '$a\' "${mar_headers_file}"
+                    log ""
+                    log "    The full curl debug output was:"
+                    sed -e "s/$(printf '\r')//" -e "s/^/$(date):          /" -e '$a\' "${mar_headers_debug_file}"
+                    log ""
+                    ;;
+
+                BAD_HTTP_RESPONSE_CODE_FOR_MAR)
+                    mar_url="${entry1}"
+                    mar_headers_file="${entry2}"
+                    mar_headers_debug_file="${entry3}"
+                    mar_file_curl_exit_code="${entry4}"
+                    mar_actual_url="${entry5}"
+                    http_response_code="$(sed -e "s/$(printf '\r')//" -n -e '/^HTTP\//p' "${mar_headers_file}" | tail -1)"
+                    log "FAILURE $((++failure)): '${http_response_code}' for mar file"
+                    log ""
+                    log "    Mar file url: ${mar_url}"
+                    [ -n "${mar_actual_url}" ] && log "    This redirected to: ${mar_actual_url}"
+                    log ""
+                    log "    These are the update xml file(s) that referenced this mar:"
+                    show_update_xml_entries "${mar_url}"
+                    log ""
+                    log "    Curl returned exit code: ${mar_file_curl_exit_code}"
+                    log ""
+                    log "    The HTTP headers were:"
+                    sed -e "s/$(printf '\r')//" -e "s/^/$(date):          /" -e '$a\' "${mar_headers_file}"
+                    log ""
+                    log "    The full curl debug output was:"
+                    sed -e "s/$(printf '\r')//" -e "s/^/$(date):          /" -e '$a\' "${mar_headers_debug_file}"
+                    log ""
+                    ;;
+
+                *)
+                    log "ERROR: Unknown failure code - '${failure_code}'"
+                    log "ERROR: This is a serious bug in this script."
+                    log "ERROR: Only known failure codes are: UPDATE_XML_UNAVAILABLE, UPDATE_XML_REDIRECT_FAILED, PATCH_TYPE_MISSING, NO_MAR_FILE, MAR_FILE_WRONG_SIZE, BAD_HTTP_RESPONSE_CODE_FOR_MAR"
+                    log ""
+                    log "FAILURE $((++failure)): Data from failure is: ${entry1} ${entry2} ${entry3} ${entry4} ${entry5} ${entry6}"
+                    log ""
+                    ;;
+
+            esac
+        done < "${TMPDIR}/${failure_file}"
+    done
+    exit_code=1
+fi
+
+
+log ''
+log '===================================='
+log 'KEY STATS'
+log '===================================='
+log ''
+log "Config files scanned:                       ${#@}"
+log "Update xml files downloaded and parsed:     ${number_of_update_xml_urls}"
+log "Unique mar urls found:                      ${number_of_mar_urls}"
+log "Failures:                                   ${number_of_failures}"
+log "Parallel processes used (maximum limit):    ${MAX_PROCS}"
+log "Execution time:                             $((STOP_TIME-START_TIME)) seconds"
+log ''
+
+rm -rf "${TMPDIR}"
+exit ${exit_code}
new file mode 100755
--- /dev/null
+++ b/tools/update-verify/release/get-update-xml.sh
@@ -0,0 +1,36 @@
+#!/bin/bash
+
+update_xml_url="${1}"
+patch_types="${2}"
+update_xml="$(mktemp -t update.xml.XXXXXXXXXX)"
+update_xml_headers="$(mktemp -t update.xml.headers.XXXXXXXXXX)"
+update_xml_debug="$(mktemp -t update.xml.debug.XXXXXXXXXX)"
+curl --retry 50 --retry-max-time 300 -k -s -D "${update_xml_headers}" -L -v "${update_xml_url}" > "${update_xml}" 2>"${update_xml_debug}"
+update_xml_curl_exit_code=$?
+if [ "${update_xml_curl_exit_code}" == 0 ]
+then
+    update_xml_actual_url="$(sed -e "s/$(printf '\r')//" -n -e 's/^Location: //p' "${update_xml_headers}" | tail -1)"
+    [ -n "${update_xml_actual_url}" ] && update_xml_url_with_redirects="${update_xml_url} => ${update_xml_actual_url}" || update_xml_url_with_redirects="${update_xml_url}"
+    echo "$(date):  Downloaded update.xml file from ${update_xml_url_with_redirects}" > "$(mktemp -t log.XXXXXXXXXX)"
+    for patch_type in ${patch_types//,/ }
+    do  
+        mar_url_and_size="$(sed -e 's/\&amp;/\&/g' -n -e 's/.*<patch .*type="'"${patch_type}"'".* URL="\([^"]*\)".*size="\([^"]*\)".*/\1 \2/p' "${update_xml}" | tail -1)"
+        if [ -z "${mar_url_and_size}" ]
+        then
+            echo "$(date):  FAILURE: No patch type '${patch_type}' found in update.xml from ${update_xml_url_with_redirects}" > "$(mktemp -t log.XXXXXXXXXX)"
+            echo "PATCH_TYPE_MISSING ${update_xml_url} ${patch_type} ${update_xml} ${update_xml_headers} ${update_xml_debug} ${update_xml_actual_url}" > "$(mktemp -t failure.XXXXXXXXXX)"
+        else
+            echo "$(date):  Mar url and file size for patch type '${patch_type}' extracted from ${update_xml_url_with_redirects} (${mar_url_and_size})" > "$(mktemp -t log.XXXXXXXXXX)"
+            echo "${mar_url_and_size} ${update_xml_url} ${patch_type} ${update_xml_actual_url}" > "$(mktemp -t update_xml_to_mar.XXXXXXXXXX)"
+        fi
+    done
+else
+    if [ -z "${update_xml_actual_url}" ]
+    then
+        echo "$(date):  FAILURE: Could not retrieve update.xml from ${update_xml_url} for patch type(s) '${patch_types}'" > "$(mktemp -t log.XXXXXXXXXX)"
+        echo "UPDATE_XML_UNAVAILABLE ${update_xml_url} ${update_xml} ${update_xml_headers} ${update_xml_debug} ${update_xml_curl_exit_code}" > "$(mktemp -t failure.XXXXXXXXXX)"
+    else
+        echo "$(date):  FAILURE: update.xml from ${update_xml_url} redirected to ${update_xml_actual_url} but could not retrieve update.xml from here" > "$(mktemp -t log.XXXXXXXXXX)"
+        echo "UPDATE_XML_REDIRECT_FAILED ${update_xml_url} ${update_xml_actual_url} ${update_xml} ${update_xml_headers} ${update_xml_debug} ${update_xml_curl_exit_code}" > "$(mktemp -t failure.XXXXXXXXXX)"
+    fi
+fi
new file mode 100644
--- /dev/null
+++ b/tools/update-verify/release/mar_certs/README
@@ -0,0 +1,24 @@
+These certificates are imported from mozilla-central (https://hg.mozilla.org/mozilla-central/file/tip/toolkit/mozapps/update/updater)
+and used to support staging update verify jobs. These jobs end up replacing the certificates within the binaries
+(through a binary search and replace), and must all be the same length for this to work correctly. If we recreate
+these certificates, and the resulting public certificates are not the same length anymore, the commonName may be
+changed to line them up again. https://github.com/google/der-ascii is a useful tool for doing this. For example:
+
+To convert the certificate to ascii:
+der2ascii -i dep1.der -o dep1.ascii
+
+Then use your favourite editor to change the commonName field. That block will look something like:
+    SEQUENCE {
+      SET {
+        SEQUENCE {
+          # commonName
+          OBJECT_IDENTIFIER { 2.5.4.3 }
+          PrintableString { "CI MAR signing key 1" }
+        }
+      }
+    }
+
+You can pad the PrintableString with spaces to increase the length of the cert (1 space = 1 byte).
+
+Then, convert back to der:
+ascii2der -i dep1.ascii -o newdep1.der
new file mode 100644
new file mode 100644
new file mode 100644
new file mode 100644
new file mode 100644
new file mode 100644
new file mode 100644
new file mode 100755
--- /dev/null
+++ b/tools/update-verify/release/replace-updater-certs.py
@@ -0,0 +1,33 @@
+import os.path
+import sys
+
+cert_dir = sys.argv[1]
+# Read twice, because strings cannot be copied
+updater_data = open(sys.argv[2], "rb").read()
+new_updater = open(sys.argv[2], "rb").read()
+outfile = sys.argv[3]
+
+cert_pairs = sys.argv[4:]
+
+if (len(cert_pairs) % 2) != 0:
+    print("Certs must be provided in pairs")
+    sys.exit(1)
+
+for find_cert, replace_cert in zip(*[iter(cert_pairs)]*2):
+    find = open(os.path.join(cert_dir, find_cert), "rb").read()
+    replace = open(os.path.join(cert_dir, replace_cert), "rb").read()
+    print("Looking for {}...".format(find_cert))
+    if find in new_updater:
+        print("Replacing {} with {}".format(find_cert, replace_cert))
+        new_updater = new_updater.replace(find, replace)
+    else:
+        print("Didn't find {}...".format(find_cert))
+
+if len(updater_data) != len(new_updater):
+    print("WARNING: new updater is not the same length as the old one (old: {}, new: {})".format(len(updater_data), len(new_updater)))
+
+if updater_data == new_updater:
+    print("WARNING: updater is unchanged")
+
+with open(outfile, 'wb+') as f:
+    f.write(new_updater)
new file mode 100755
--- /dev/null
+++ b/tools/update-verify/release/test-mar-url.sh
@@ -0,0 +1,46 @@
+#!/bin/bash
+mar_url="${1}"
+mar_required_size="${2}"
+
+mar_headers_file="$(mktemp -t mar_headers.XXXXXXXXXX)"
+mar_headers_debug_file="$(mktemp -t mar_headers_debug.XXXXXXXXXX)"
+curl --retry 50 --retry-max-time 300 -k -s -i -r 0-2 -L -v "${mar_url}" > "${mar_headers_file}" 2>"${mar_headers_debug_file}"
+mar_file_curl_exit_code=$?
+
+# Bug 894368 - HTTP 408's are not handled by the "curl --retry" mechanism; in this case retry in bash
+attempts=1
+while [ "$((++attempts))" -lt 50 ] && grep 'HTTP/1\.1 408 Request Timeout' "${mar_headers_file}" &>/dev/null
+do
+    sleep 1
+    curl --retry 50 --retry-max-time 300 -k -s -i -r 0-2 -L -v "${mar_url}" > "${mar_headers_file}" 2>"${mar_headers_debug_file}"
+    mar_file_curl_exit_code=$?
+done
+
+# check file size matches what was written in update.xml
+# strip out dos line returns from header if they occur
+# note: below, using $(printf '\r') for Darwin compatibility, rather than simple '\r'
+# (i.e. shell interprets '\r' rather than sed interpretting '\r')
+mar_actual_size="$(sed -e "s/$(printf '\r')//" -n -e 's/^Content-Range: bytes 0-2\///ip' "${mar_headers_file}" | tail -1)"
+mar_actual_url="$(sed -e "s/$(printf '\r')//" -n -e 's/^Location: //p' "${mar_headers_file}" | tail -1)"
+# note: below, sed -n '/^HTTP\//p' acts as grep '^HTTP/', but requires less overhead as sed already running
+http_response_code="$(sed -e "s/$(printf '\r')//" -n -e '/^HTTP\//p' "${mar_headers_file}" | tail -1)"
+
+[ -n "${mar_actual_url}" ] && mar_url_with_redirects="${mar_url} => ${mar_actual_url}" || mar_url_with_redirects="${mar_url}"
+
+if [ "${mar_actual_size}" == "${mar_required_size}" ]
+then
+    echo "$(date):  Mar file ${mar_url_with_redirects} available with correct size (${mar_actual_size} bytes)" > "$(mktemp -t log.XXXXXXXXXX)"
+elif [ -z "${mar_actual_size}" ]
+then
+    echo "$(date):  FAILURE: Could not retrieve http header for mar file from ${mar_url}" > "$(mktemp -t log.XXXXXXXXXX)"
+    echo "NO_MAR_FILE ${mar_url} ${mar_headers_file} ${mar_headers_debug_file} ${mar_file_curl_exit_code} ${mar_actual_url}" > "$(mktemp -t failure.XXXXXXXXXX)"
+    # If we get a response code (i.e. not an empty string), it better contain "206 Partial Content" or we should report on it.
+    # If response code is empty, this should be caught by a different block to this one (e.g. "could not retrieve http header").
+elif [ -n "${http_response_code}" ] && [ "${http_response_code}" == "${http_response_code/206 Partial Content/}" ]
+then
+    echo "$(date):  FAILURE: received a '${http_response_code}' response for mar file from ${mar_url} (expected HTTP 206 Partial Content)" > "$(mktemp -t log.XXXXXXXXXX)"
+    echo "BAD_HTTP_RESPONSE_CODE_FOR_MAR ${mar_url} ${mar_headers_file} ${mar_headers_debug_file} ${mar_file_curl_exit_code} ${mar_actual_url}" > "$(mktemp -t failure.XXXXXXXXXX)"
+else
+    echo "$(date):  FAILURE: Mar file incorrect size - should be ${mar_required_size} bytes, but is ${mar_actual_size} bytes - ${mar_url_with_redirects}" > "$(mktemp -t log.XXXXXXXXXX)"
+    echo "MAR_FILE_WRONG_SIZE ${mar_url} ${mar_required_size} ${mar_actual_size} ${mar_headers_file} ${mar_headers_debug_file} ${mar_file_curl_exit_code} ${mar_actual_url}" > "$(mktemp -t failure.XXXXXXXXXX)"
+fi
new file mode 100755
--- /dev/null
+++ b/tools/update-verify/release/updates/verify.sh
@@ -0,0 +1,289 @@
+#!/bin/bash
+#set -x
+
+. ../common/cached_download.sh
+. ../common/unpack.sh
+. ../common/download_mars.sh
+. ../common/download_builds.sh
+. ../common/check_updates.sh
+
+clear_cache
+create_cache
+
+ftp_server_to="http://stage.mozilla.org/pub/mozilla.org"
+ftp_server_from="http://stage.mozilla.org/pub/mozilla.org"
+aus_server="https://aus4.mozilla.org"
+to=""
+to_build_id=""
+to_app_version=""
+to_display_version=""
+override_certs=""
+diff_summary_log="$PWD/diff-summary.log"
+if [ -e ${diff_summary_log} ]; then
+  rm ${diff_summary_log}
+fi
+touch ${diff_summary_log}
+
+pushd `dirname $0` &>/dev/null
+MY_DIR=$(pwd)
+popd &>/dev/null
+retry="$MY_DIR/../../buildfarm/utils/retry.py -s 1 -r 3"
+cert_replacer="$MY_DIR/../replace-updater-certs.py"
+
+dep_overrides="nightly_aurora_level3_primary.der dep1.der nightly_aurora_level3_secondary.der dep2.der release_primary.der dep1.der release_secondary.der dep2.der"
+nightly_overrides="dep1.der nightly_aurora_level3_primary.der dep2.der nightly_aurora_level3_secondary.der release_primary.der nightly_aurora_level3_primary.der release_secondary.der nightly_aurora_level3_secondary.der"
+release_overrides="dep1.der release_primary.der dep2.der release_secondary.der nightly_aurora_level3_primary.der release_primary.der nightly_aurora_level3_secondary.der release_secondary.der"
+
+runmode=0
+config_file="updates.cfg"
+UPDATE_ONLY=1
+TEST_ONLY=2
+MARS_ONLY=3
+COMPLETE=4
+
+usage()
+{
+  echo "Usage: verify.sh [OPTION] [CONFIG_FILE]"
+  echo "    -u, --update-only      only download update.xml"
+  echo "    -t, --test-only        only test that MARs exist"
+  echo "    -m, --mars-only        only test MARs"
+  echo "    -c, --complete         complete upgrade test"
+}
+
+if [ -z "$*" ]
+then
+  usage
+  exit 0
+fi
+
+pass_arg_count=0
+while [ "$#" -gt "$pass_arg_count" ]
+do
+  case "$1" in
+    -u | --update-only)
+      runmode=$UPDATE_ONLY
+      shift
+      ;;
+    -t | --test-only)
+      runmode=$TEST_ONLY
+      shift
+      ;;
+    -m | --mars-only)
+      runmode=$MARS_ONLY
+      shift
+      ;;
+    -c | --complete)
+      runmode=$COMPLETE
+      shift
+      ;;
+    *)
+      # Move the unrecognized arg to the end of the list
+      arg="$1"
+      shift
+      set -- "$@" "$arg"
+      pass_arg_count=`expr $pass_arg_count + 1`
+  esac
+done
+
+if [ -n "$arg" ]
+then
+  config_file=$arg
+  echo "Using config file $config_file"
+else
+  echo "Using default config file $config_file"
+fi
+
+if [ "$runmode" == "0" ]
+then
+  usage
+  exit 0
+fi
+
+while read entry
+do
+  # initialize all config variables
+  release="" 
+  product="" 
+  platform="" 
+  build_id="" 
+  locales=""
+  channel=""
+  from=""
+  patch_types="complete"
+  use_old_updater=0
+  mar_channel_IDs=""
+  updater_package=""
+  eval $entry
+
+  # the arguments for updater changed in Gecko 34/SeaMonkey 2.31
+  major_version=`echo $release | cut -f1 -d.`
+  if [[ "$product" == "seamonkey" ]]; then
+    minor_version=`echo $release | cut -f2 -d.`
+    if [[ $major_version -le 2 && $minor_version -lt 31 ]]; then
+      use_old_updater=1
+    fi
+  elif [[ $major_version -lt 34 ]]; then
+      use_old_updater=1
+  fi
+
+  # Note: cross platform tests seem to work for everything except Mac-on-Windows.
+  # We probably don't care about this use case.
+  if [[ "$updater_package" == "" ]]; then
+    updater_package="$from"
+  fi
+
+  for locale in $locales
+  do
+    rm -f update/partial.size update/complete.size
+    for patch_type in $patch_types
+    do
+      update_path="${product}/${release}/${build_id}/${platform}/${locale}/${channel}/default/default/default"
+      if [ "$runmode" == "$MARS_ONLY" ] || [ "$runmode" == "$COMPLETE" ] ||
+         [ "$runmode" == "$TEST_ONLY" ]
+      then
+        if [ "$runmode" == "$TEST_ONLY" ]
+        then
+          download_mars "${aus_server}/update/3/${update_path}/default/update.xml?force=1" ${patch_type} 1 \
+            "${to_build_id}" "${to_app_version}" "${to_display_version}"
+          err=$?
+        else
+          download_mars "${aus_server}/update/3/${update_path}/update.xml?force=1" ${patch_type} 0 \
+            "${to_build_id}" "${to_app_version}" "${to_display_version}"
+          err=$?
+        fi
+        if [ "$err" != "0" ]; then
+          echo "FAIL: [${release} ${locale} ${patch_type}] download_mars returned non-zero exit code: ${err}"
+          continue
+        fi
+      else
+        mkdir -p updates/${update_path}/complete
+        mkdir -p updates/${update_path}/partial
+        $retry wget --no-check-certificate -q -O ${patch_type} updates/${update_path}/${patch_type}/update.xml "${aus_server}/update/3/${update_path}/update.xml?force=1"
+
+      fi
+      if [ "$runmode" == "$COMPLETE" ]
+      then
+        if [ -z "$from" ] || [ -z "$to" ]
+        then
+          continue
+        fi
+
+        updater_platform=""
+        updater_package_url=`echo "${ftp_server_from}${updater_package}" | sed "s/%locale%/${locale}/"`
+        updater_package_filename=`basename "$updater_package_url"`
+        case $updater_package_filename in
+          *dmg)
+            platform_dirname="*.app"
+            updater_bins="Contents/MacOS/updater.app/Contents/MacOS/updater Contents/MacOS/updater.app/Contents/MacOS/org.mozilla.updater"
+            updater_platform="mac"
+            ;;
+          *exe)
+            updater_package_url=`echo "${updater_package_url}" | sed "s/ja-JP-mac/ja/"`
+            platform_dirname="bin"
+            updater_bins="updater.exe"
+            updater_platform="win32"
+            ;;
+          *bz2)
+            updater_package_url=`echo "${updater_package_url}" | sed "s/ja-JP-mac/ja/"`
+            platform_dirname=`echo $product | tr '[A-Z]' '[a-z]'`
+            updater_bins="updater"
+            updater_platform="linux"
+            ;;
+          *)
+            echo "Couldn't detect updater platform"
+            exit 1
+            ;;
+        esac
+
+        rm -rf updater/*
+        cached_download "${updater_package_filename}" "${updater_package_url}"
+        unpack_build "$updater_platform" updater "$updater_package_filename" "$locale"
+
+        # Even on Windows, we want Unix-style paths for the updater, because of MSYS.
+        cwd=$(\ls -d $PWD/updater/$platform_dirname)
+        # Bug 1209376. Linux updater linked against other libraries in the installation directory
+        export LD_LIBRARY_PATH=$cwd
+        updater="null"
+        for updater_bin in $updater_bins; do
+            if [ -e "$cwd/$updater_bin" ]; then
+                echo "Found updater at $updater_bin"
+                updater="$cwd/$updater_bin"
+                break
+            fi
+        done
+
+        if [ ! -z "$override_certs" ]; then
+            echo "Replacing certs in updater binary"
+            cp "${updater}" "${updater}.orig"
+            case ${override_certs} in
+              dep)
+                overrides=${dep_overrides}
+                ;;
+              nightly)
+                overrides=${nightly_overrides}
+                ;;
+              release)
+                overrides=${release_overrides}
+                ;;
+              *)
+                echo "Unknown override cert - skipping"
+                ;;
+            esac
+            python "${cert_replacer}" "${MY_DIR}/../mar_certs" "${updater}.orig" "${updater}" ${overrides}
+        else
+            echo "override_certs is '${override_certs}', not replacing any certificates"
+        fi
+
+        if [ "$updater" == "null" ]; then
+            echo "Couldn't find updater binary"
+            continue
+        fi
+
+        from_path=`echo $from | sed "s/%locale%/${locale}/"`
+        to_path=`echo $to | sed "s/%locale%/${locale}/"`
+        download_builds "${ftp_server_from}${from_path}" "${ftp_server_to}${to_path}"
+        err=$?
+        if [ "$err" != "0" ]; then
+          echo "FAIL: [$release $locale $patch_type] download_builds returned non-zero exit code: $err"
+          continue
+        fi
+        source_file=`basename "$from_path"`
+        target_file=`basename "$to_path"`
+        diff_file="results.diff"
+        if [ -e ${diff_file} ]; then
+          rm ${diff_file}
+        fi
+        check_updates "${platform}" "downloads/${source_file}" "downloads/${target_file}" ${locale} ${use_old_updater} ${updater} ${diff_file} ${channel} ${mar_channel_IDs}
+        err=$?
+        if [ "$err" == "0" ]; then
+          continue
+        elif [ "$err" == "1" ]; then
+          echo "FAIL: [$release $locale $patch_type] check_updates returned failure for $platform downloads/$source_file vs. downloads/$target_file: $err"
+        elif [ "$err" == "2" ]; then
+          echo "WARN: [$release $locale $patch_type] check_updates returned warning for $platform downloads/$source_file vs. downloads/$target_file: $err"
+        else
+          echo "FAIL: [$release $locale $patch_type] check_updates returned unknown error for $platform downloads/$source_file vs. downloads/$target_file: $err"
+        fi
+
+        if [ -s ${diff_file} ]; then
+          echo "Found diffs for ${patch_type} update from ${aus_server}/update/3/${update_path}/update.xml?force=1" >> ${diff_summary_log}
+          cat ${diff_file} >> ${diff_summary_log}
+          echo "" >> ${diff_summary_log}
+        fi
+      fi
+    done
+    if [ -f update/partial.size ] && [ -f update/complete.size ]; then
+        partial_size=`cat update/partial.size`
+        complete_size=`cat update/complete.size`
+        if [ $partial_size -gt $complete_size ]; then
+            echo "FAIL: [$release $locale $patch_type] partial updates are larger than complete updates"
+        elif [ $partial_size -eq $complete_size ]; then
+            echo "WARN: [$release $locale $patch_type] partial updates are the same size as complete updates, this should only happen for major updates"
+        else
+            echo "SUCCESS: [$release $locale $patch_type] partial updates are smaller than complete updates, all is well in the universe"
+        fi
+    fi
+  done
+done < $config_file
+
+clear_cache
new file mode 100644
--- /dev/null
+++ b/tools/update-verify/scripts/chunked-verify.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+
+import logging
+import os
+from os import path
+import sys
+from tempfile import mkstemp
+
+sys.path.append(path.join(path.dirname(__file__), "../python"))
+logging.basicConfig(
+    stream=sys.stdout, level=logging.INFO, format="%(message)s")
+log = logging.getLogger(__name__)
+
+from mozrelease.update_verify import UpdateVerifyConfig
+from util.commands import run_cmd
+
+UPDATE_VERIFY_COMMAND = ["bash", "verify.sh", "-c"]
+UPDATE_VERIFY_DIR = path.join(
+    path.dirname(__file__), "../release/updates")
+
+
+if __name__ == "__main__":
+    from optparse import OptionParser
+    parser = OptionParser("")
+
+    parser.set_defaults(
+        configDict="updateChannels",
+        chunks=None,
+        thisChunk=None,
+    )
+    parser.add_option("--config-dict", dest="configDict")
+    parser.add_option("--verify-config", dest="verifyConfig")
+    parser.add_option("-t", "--release-tag", dest="releaseTag")
+    parser.add_option("-r", "--release-config", dest="releaseConfig")
+    parser.add_option("-p", "--platform", dest="platform")
+    parser.add_option("-C", "--release-channel", dest="release_channel")
+    parser.add_option("--verify-channel", dest="verify_channel")
+    parser.add_option("--chunks", dest="chunks", type="int")
+    parser.add_option("--this-chunk", dest="thisChunk", type="int")
+
+    options, args = parser.parse_args()
+    assert options.chunks and options.thisChunk, \
+        "chunks and this-chunk are required"
+    assert path.isfile(options.verifyConfig), "Update verify config must exist!"
+    verifyConfigFile = options.verifyConfig
+
+    fd, configFile = mkstemp()
+    fh = os.fdopen(fd, "w")
+    try:
+        verifyConfig = UpdateVerifyConfig()
+        verifyConfig.read(path.join(UPDATE_VERIFY_DIR, verifyConfigFile))
+        myVerifyConfig = verifyConfig.getChunk(
+            options.chunks, options.thisChunk)
+        # override the channel if explicitly set
+        if options.verify_channel:
+            myVerifyConfig.channel = options.verify_channel
+        myVerifyConfig.write(fh)
+        fh.close()
+        run_cmd(["cat", configFile])
+        run_cmd(UPDATE_VERIFY_COMMAND + [configFile], cwd=UPDATE_VERIFY_DIR)
+    finally:
+        if path.exists(configFile):
+            os.unlink(configFile)
new file mode 100755
--- /dev/null
+++ b/tools/update-verify/scripts/chunked-verify.sh
@@ -0,0 +1,113 @@
+#!/bin/bash
+set -ex
+set -o pipefail
+# This ugly hack is a cross-platform (Linux/Mac/Windows+MSYS) way to get the
+# absolute path to the directory containing this script
+pushd `dirname $0` &>/dev/null
+MY_DIR=$(pwd)
+popd &>/dev/null
+SCRIPTS_DIR="$MY_DIR/.."
+PYTHON="/tools/python/bin/python"
+if [ ! -x $PYTHON ]; then
+    PYTHON=python
+fi
+JSONTOOL="$PYTHON $SCRIPTS_DIR/buildfarm/utils/jsontool.py"
+workdir=`pwd`
+
+platform=$1
+configDict=$2
+chunks=$3
+thisChunk=$4
+channel=$5
+
+if [ -n "$PROPERTIES_FILE" -a -f "$PROPERTIES_FILE" ]; then
+    # Buildbot only
+    if $JSONTOOL -k properties.NO_BBCONFIG $PROPERTIES_FILE; then
+       NO_BBCONFIG=$($JSONTOOL -k properties.NO_BBCONFIG $PROPERTIES_FILE);
+    fi
+    if $JSONTOOL -k properties.CHANNEL $PROPERTIES_FILE; then
+       CHANNEL=$($JSONTOOL -k properties.CHANNEL $PROPERTIES_FILE);
+    fi
+    if $JSONTOOL -k properties.VERIFY_CONFIG $PROPERTIES_FILE; then
+       VERIFY_CONFIG=$($JSONTOOL -k properties.VERIFY_CONFIG $PROPERTIES_FILE);
+    fi
+    if $JSONTOOL -k properties.TOTAL_CHUNKS $PROPERTIES_FILE; then
+       chunks=$($JSONTOOL -k properties.TOTAL_CHUNKS $PROPERTIES_FILE);
+    fi
+    if $JSONTOOL -k properties.THIS_CHUNK $PROPERTIES_FILE; then
+       thisChunk=$($JSONTOOL -k properties.THIS_CHUNK $PROPERTIES_FILE);
+    fi
+    if [ -z "$NO_BBCONFIG" -a -z "$BUILDBOT_CONFIGS" ]; then
+        export BUILDBOT_CONFIGS="https://hg.mozilla.org/build/buildbot-configs"
+    fi
+    # Get the assumed slavebuilddir, and read in from buildbot if this is not
+    # Release promotion
+    SLAVEBUILDDIR=$(basename $(cd "$SCRIPTS_DIR/.."; pwd))
+    if [ -z "$NO_BBCONFIG" ]; then
+        RELEASE_CONFIG=$($JSONTOOL -k properties.release_config $PROPERTIES_FILE)
+        TAG=$($JSONTOOL -k properties.release_tag $PROPERTIES_FILE)
+        SLAVEBUILDDIR=$($JSONTOOL -k properties.slavebuilddir $PROPERTIES_FILE)
+    fi
+
+    $PYTHON -u $SCRIPTS_DIR/buildfarm/maintenance/purge_builds.py \
+        -s 16 -n info -n 'rel-*' -n 'tb-rel-*' -n $SLAVEBUILDDIR
+fi
+
+if [ -n "$TASKCLUSTER_VERIFY_CONFIG" ]; then
+    wget -O "$SCRIPTS_DIR/release/updates/update-verify.cfg" "$TASKCLUSTER_VERIFY_CONFIG"
+    VERIFY_CONFIG="update-verify.cfg"
+fi
+
+if [ -z "$VERIFY_CONFIG" -a -n "$NO_BBCONFIG" ]; then
+    echo "Unable to run without VERIFY_CONFIG specified when using NO_BBCONFIG"
+    exit 1
+fi
+
+if [ -z "$NO_BBCONFIG" ]; then
+  $PYTHON $MY_DIR/chunked-verify.py -t $TAG -r $RELEASE_CONFIG \
+  -b $BUILDBOT_CONFIGS -p $platform --chunks $chunks --this-chunk $thisChunk \
+  --config-dict $configDict --release-channel $channel \
+    2>&1 | tee $SCRIPTS_DIR/../verify_log.txt
+else
+  # release promotion
+  if [ -n "$CHANNEL" ]; then
+    EXTRA_PARAMS="--verify-channel $CHANNEL"
+  else
+    EXTRA_PARAMS=""
+  fi
+  $PYTHON $MY_DIR/chunked-verify.py --chunks $chunks --this-chunk $thisChunk \
+  --verify-config $VERIFY_CONFIG $EXTRA_PARAMS \
+  2>&1 | tee $SCRIPTS_DIR/../verify_log.txt
+fi
+
+print_failed_msg()
+{
+  echo "-------------------------"
+  echo "This run has failed, see the above log"
+  echo
+  return 1
+}
+
+print_warning_msg()
+{
+  echo "-------------------------"
+  echo "This run has warnings, see the above log"
+  echo
+  return 2
+}
+
+set +x
+
+echo "Scanning log for failures and warnings"
+echo "--------------------------------------"
+
+# Test for a failure, note we are set -e.
+# Grep returns 0 on a match and 1 on no match
+# Testing for failures first is important because it's OK to to mark as failed
+# when there's failures+warnings, but not OK to mark as warnings in the same
+# situation.
+( ! grep 'FAIL:' $SCRIPTS_DIR/../verify_log.txt ) || print_failed_msg
+( ! grep 'WARN:' $SCRIPTS_DIR/../verify_log.txt ) || print_warning_msg
+
+echo "-------------------------"
+echo "All is well"