Bug 1247428 - move release promotion Dockerfiles in tree r=rail DONTBUILD
authorKim Moir <kmoir@mozilla.com>
Fri, 04 Mar 2016 09:35:46 -0500
changeset 323125 cb95e5ff55faf8d7ee19b80ab63d2d2946ae43fd
parent 323124 77192f60c4a5992651c9a9c2550adf6dd3ece42f
child 323126 6189f9cbefee6e44902b038ec7134d641f110e85
push id5913
push userjlund@mozilla.com
push dateMon, 25 Apr 2016 16:57:49 +0000
treeherdermozilla-beta@dcaf0a6fa115 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersrail
bugs1247428
milestone47.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1247428 - move release promotion Dockerfiles in tree r=rail DONTBUILD
release/docker/funsize-balrog-submitter/Dockerfile
release/docker/funsize-balrog-submitter/Makefile
release/docker/funsize-balrog-submitter/dep.pubkey
release/docker/funsize-balrog-submitter/nightly.pubkey
release/docker/funsize-balrog-submitter/release.pubkey
release/docker/funsize-balrog-submitter/requirements.txt
release/docker/funsize-balrog-submitter/runme.sh
release/docker/funsize-balrog-submitter/scripts/funsize-balrog-submitter.py
release/docker/funsize-update-generator/Dockerfile
release/docker/funsize-update-generator/Makefile
release/docker/funsize-update-generator/dep.pubkey
release/docker/funsize-update-generator/nightly.pubkey
release/docker/funsize-update-generator/release.pubkey
release/docker/funsize-update-generator/requirements.txt
release/docker/funsize-update-generator/runme.sh
release/docker/funsize-update-generator/scripts/funsize.py
release/docker/funsize-update-generator/scripts/mbsdiff_hook.sh
new file mode 100644
--- /dev/null
+++ b/release/docker/funsize-balrog-submitter/Dockerfile
@@ -0,0 +1,33 @@
+FROM ubuntu:vivid
+MAINTAINER Rail Aliiev <rail@mozilla.com>
+
+# Required software
+ENV DEBIAN_FRONTEND noninteractive
+# Chain apt-get commands with apt-get clean in a single docker RUN
+# to make sure that files are removed within a single docker layer
+RUN apt-get update -q && \
+    apt-get install -yyq --no-install-recommends \
+    python mercurial curl python-boto python-setuptools python-cryptography && \
+    apt-get clean
+
+COPY requirements.txt /tmp/
+# python-pip installs a lot of dependencies increasing the size of an image
+# drastically.
+RUN easy_install pip
+RUN pip install -r /tmp/requirements.txt
+
+RUN hg clone https://hg.mozilla.org/build/tools /home/worker/tools
+
+RUN useradd -d /home/worker -s /bin/bash -m worker
+
+RUN mkdir /home/worker/bin
+COPY scripts/* /home/worker/bin/
+RUN mkdir /home/worker/keys
+COPY *.pubkey /home/worker/keys/
+COPY runme.sh /runme.sh
+RUN chmod 755 /home/worker/bin/* /runme.sh
+
+ENV           HOME          /home/worker
+ENV           SHELL         /bin/bash
+ENV           USER          worker
+ENV           LOGNAME       worker
new file mode 100644
--- /dev/null
+++ b/release/docker/funsize-balrog-submitter/Makefile
@@ -0,0 +1,17 @@
+DOCKERIO_USERNAME =$(error DOCKERIO_USERNAME should be set)
+IMAGE_NAME = funsize-balrog-submitter
+FULL_IMAGE_NAME = $(DOCKERIO_USERNAME)/$(IMAGE_NAME)
+
+build:
+	docker build -t $(FULL_IMAGE_NAME) --no-cache --rm .
+
+push:
+	docker push $(FULL_IMAGE_NAME):latest
+
+pull:
+	docker pull $(FULL_IMAGE_NAME):latest
+
+update_pubkeys:
+	curl https://hg.mozilla.org/mozilla-central/raw-file/default/toolkit/mozapps/update/updater/nightly_aurora_level3_primary.der | openssl x509 -inform DER -pubkey -noout > nightly.pubkey
+	curl https://hg.mozilla.org/mozilla-central/raw-file/default/toolkit/mozapps/update/updater/dep1.der | openssl x509 -inform DER -pubkey -noout > dep.pubkey
+	curl https://hg.mozilla.org/mozilla-central/raw-file/default/toolkit/mozapps/update/updater/release_primary.der | openssl x509 -inform DER -pubkey -noout > release.pubkey
new file mode 100644
--- /dev/null
+++ b/release/docker/funsize-balrog-submitter/dep.pubkey
@@ -0,0 +1,9 @@
+-----BEGIN PUBLIC KEY-----
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzjHSobdeiQ3JHP/cCIOp
+WaX9y12rL5mIo9OR9bpqEZdD0yXJJJeZA887Mv8slqsM+qObMUpKvfEE6zyYPIZJ
+ANib31neI5BBYHhfhf2f5EnkilSYlmU3Gx+uRsmsdt58PpYe124tOAGgca/8bUy3
+eb6kUUTwvMI0oWQuPkGUaoHVQyj/bBMTrIkyF3UbfFtiX/SfOPvIoabNUe+pQHUe
+pqC2+RxzDGj+shTq/hYhtXlptFzsEEb2+0foLy0MY8C30dP2QqbM2iavvr/P8OcS
+Gm3H0TQcRzIEBzvPcIjiZi1nQj/r/3TlYRNCjuYT/HsNLXrB/U5Tc990jjAUJxdH
+0wIDAQAB
+-----END PUBLIC KEY-----
new file mode 100644
--- /dev/null
+++ b/release/docker/funsize-balrog-submitter/nightly.pubkey
@@ -0,0 +1,9 @@
+-----BEGIN PUBLIC KEY-----
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4j/IS3gWbyVVnWn4ZRnC
+Fuzb6VAaHa0I+4E504ekhVAhbKlSfBstkLbXajdjUVAJpn02zWnOaTl5KAdpDpIp
+SkdA4mK20ej3/Ij7gIt8IwaX+ArXL8mP84pxDn5BgaNADm3206Z6YQzc/TDYu529
+qkDFmLqNUVRJAhPO+qqhKHIcVGh8HUHXN6XV1qOFip+UU0M474jAGgurVmAv8Rh7
+VvM0v5KmB6V6WHwM5gwjg2yRY/o+xYIsNeSes9rpp+MOs/RnUA6LI4WZGY4YahvX
+VclIXBDgbWPYtojexIJkmYj8JIIRsh3eCsrRRe14fq7cBurp3CxBYMlDHf0RUoaq
+hQIDAQAB
+-----END PUBLIC KEY-----
new file mode 100644
--- /dev/null
+++ b/release/docker/funsize-balrog-submitter/release.pubkey
@@ -0,0 +1,9 @@
+-----BEGIN PUBLIC KEY-----
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvH4r94FpQ0gvr1hhTfV9
+NUeWPJ5CN6TZRq7v/Dc4nkJ1J4IP1B3UEii34tcNKpy1nKupiZuTT6T1zQYT+z5x
+3UkDF9qQboQ8RNb/BEz/cN3on/LTEnZ7YSraRL11M6cEB8mvmJxddCEquwqccRbs
+Usp8WUB7uRv1w6Anley7N9F/LE1iLPwJasZypRnzWb3aYsJy0cMFOYy+OXVdpktn
+qYqlNIjnt84u4Nil6UXnBbIJNUVOCY8wOFClNvVpubjPkWK1gtdWy3x/hJU5RpAO
+K9cnHxq4M/I4SUWTWO3r7yweQiHG4Jyoc7sP1jkwjBkSG93sDEycfwOdOoZft3wN
+sQIDAQAB
+-----END PUBLIC KEY-----
new file mode 100644
--- /dev/null
+++ b/release/docker/funsize-balrog-submitter/requirements.txt
@@ -0,0 +1,1 @@
+mar==1.2
new file mode 100644
--- /dev/null
+++ b/release/docker/funsize-balrog-submitter/runme.sh
@@ -0,0 +1,22 @@
+#!/bin/bash
+
+set -xe
+
+test $PARENT_TASK_ARTIFACTS_URL_PREFIX
+test $BALROG_API_ROOT
+test $SIGNING_CERT
+
+ARTIFACTS_DIR="/home/worker/artifacts"
+mkdir -p "$ARTIFACTS_DIR"
+
+curl --location --retry 10 --retry-delay 10 -o "$ARTIFACTS_DIR/manifest.json" \
+    "$PARENT_TASK_ARTIFACTS_URL_PREFIX/manifest.json"
+
+cat "$ARTIFACTS_DIR/manifest.json"
+python /home/worker/bin/funsize-balrog-submitter.py \
+    --artifacts-url-prefix "$PARENT_TASK_ARTIFACTS_URL_PREFIX" \
+    --manifest "$ARTIFACTS_DIR/manifest.json" \
+    -a "$BALROG_API_ROOT" \
+    --signing-cert "/home/worker/keys/${SIGNING_CERT}.pubkey" \
+    --verbose \
+    $EXTRA_BALROG_SUBMITTER_PARAMS
new file mode 100644
--- /dev/null
+++ b/release/docker/funsize-balrog-submitter/scripts/funsize-balrog-submitter.py
@@ -0,0 +1,207 @@
+#!/usr/bin/env python
+import os
+import logging
+import argparse
+import json
+import sys
+import hashlib
+import requests
+import tempfile
+from boto.s3.connection import S3Connection
+from mardor.marfile import MarFile
+
+sys.path.insert(0, os.path.join(
+    os.path.dirname(__file__), "/home/worker/tools/lib/python"))
+
+from balrog.submitter.cli import NightlySubmitterV4, ReleaseSubmitterV4
+from util.retry import retry, retriable
+
+log = logging.getLogger(__name__)
+
+
+def get_hash(content, hash_type="md5"):
+    h = hashlib.new(hash_type)
+    h.update(content)
+    return h.hexdigest()
+
+
+@retriable()
+def download(url, dest, mode=None):
+    log.debug("Downloading %s to %s", url, dest)
+    r = requests.get(url)
+    r.raise_for_status()
+
+    bytes_downloaded = 0
+    with open(dest, 'wb') as fd:
+        for chunk in r.iter_content(4096):
+            fd.write(chunk)
+            bytes_downloaded += len(chunk)
+
+    log.debug('Downloaded %s bytes', bytes_downloaded)
+    if 'content-length' in r.headers:
+        log.debug('Content-Length: %s bytes', r.headers['content-length'])
+        if bytes_downloaded != int(r.headers['content-length']):
+            raise IOError('Unexpected number of bytes downloaded')
+
+    if mode:
+        log.debug("chmod %o %s", mode, dest)
+        os.chmod(dest, mode)
+
+
+def verify_signature(mar, signature):
+    log.info("Checking %s signature", mar)
+    m = MarFile(mar, signature_versions=[(1, signature)])
+    m.verify_signatures()
+
+
+def verify_copy_to_s3(bucket_name, aws_access_key_id, aws_secret_access_key,
+                      mar_url, mar_dest, signing_cert):
+    conn = S3Connection(aws_access_key_id, aws_secret_access_key)
+    bucket = conn.get_bucket(bucket_name)
+    _, dest = tempfile.mkstemp()
+    log.info("Downloading %s to %s...", mar_url, dest)
+    download(mar_url, dest)
+    log.info("Verifying the signature...")
+    if not os.getenv("MOZ_DISABLE_MAR_CERT_VERIFICATION"):
+        verify_signature(dest, signing_cert)
+    for name in possible_names(mar_dest, 10):
+        log.info("Checking if %s already exists", name)
+        key = bucket.get_key(name)
+        if not key:
+            log.info("Uploading to %s...", name)
+            key = bucket.new_key(name)
+            # There is a chance for race condition here. To avoid it we check
+            # the return value with replace=False. It should be not None.
+            length = key.set_contents_from_filename(dest, replace=False)
+            if length is None:
+                log.warn("Name race condition using %s, trying again...", name)
+                continue
+            else:
+                # key.make_public() may lead to race conditions, because
+                # it doesn't pass version_id, so it may not set permissions
+                bucket.set_canned_acl(acl_str='public-read', key_name=name,
+                                      version_id=key.version_id)
+                # Use explicit version_id to avoid using "latest" version
+                return key.generate_url(expires_in=0, query_auth=False,
+                                        version_id=key.version_id)
+        else:
+            if get_hash(key.get_contents_as_string()) == \
+                    get_hash(open(dest).read()):
+                log.info("%s has the same MD5 checksum, not uploading...",
+                         name)
+                return key.generate_url(expires_in=0, query_auth=False,
+                                        version_id=key.version_id)
+            log.info("%s already exists with different checksum, "
+                     "trying another one...", name)
+
+    raise RuntimeError("Cannot generate a unique name for %s", mar_dest)
+
+
+def possible_names(initial_name, amount):
+    """Generate names appending counter before extension"""
+    prefix, ext = os.path.splitext(initial_name)
+    return [initial_name] + ["{}-{}{}".format(prefix, n, ext) for n in
+                             range(1, amount + 1)]
+
+
+def main():
+    parser = argparse.ArgumentParser()
+    parser.add_argument("--artifacts-url-prefix", required=True,
+                        help="URL prefix for MAR")
+    parser.add_argument("--manifest", required=True)
+    parser.add_argument("-a", "--api-root", required=True,
+                        help="Balrog API root")
+    parser.add_argument("-d", "--dummy", action="store_true",
+                        help="Add '-dummy' suffix to branch name")
+    parser.add_argument("--signing-cert", required=True)
+    parser.add_argument("-v", "--verbose", action="store_const",
+                        dest="loglevel", const=logging.DEBUG,
+                        default=logging.INFO)
+    args = parser.parse_args()
+    logging.basicConfig(format="%(asctime)s - %(levelname)s - %(message)s",
+                        level=args.loglevel)
+    logging.getLogger("requests").setLevel(logging.WARNING)
+    logging.getLogger("boto").setLevel(logging.WARNING)
+
+    balrog_username = os.environ.get("BALROG_USERNAME")
+    balrog_password = os.environ.get("BALROG_PASSWORD")
+    if not balrog_username and not balrog_password:
+        raise RuntimeError("BALROG_USERNAME and BALROG_PASSWORD environment "
+                           "variables should be set")
+
+    s3_bucket = os.environ.get("S3_BUCKET")
+    aws_access_key_id = os.environ.get("AWS_ACCESS_KEY_ID")
+    aws_secret_access_key = os.environ.get("AWS_SECRET_ACCESS_KEY")
+    if not (s3_bucket and aws_access_key_id and aws_secret_access_key):
+        log.warn("Skipping S3 uploads...")
+        uploads_enabled = False
+    else:
+        uploads_enabled = True
+
+    manifest = json.load(open(args.manifest))
+    auth = (balrog_username, balrog_password)
+
+    for e in manifest:
+        complete_info = [{
+            "hash": e["to_hash"],
+            "size": e["to_size"],
+        }]
+        partial_info = [{
+            "hash": e["hash"],
+            "size": e["size"],
+        }]
+
+        if "previousVersion" in e and "previousBuildNumber" in e:
+            log.info("Release style balrog submission")
+            partial_info[0]["previousVersion"] = e["previousVersion"]
+            partial_info[0]["previousBuildNumber"] = e["previousBuildNumber"]
+            submitter = ReleaseSubmitterV4(api_root=args.api_root, auth=auth,
+                                           dummy=args.dummy)
+            retry(lambda: submitter.run(
+                platform=e["platform"], productName=e["appName"],
+                version=e["toVersion"],
+                build_number=e["toBuildNumber"],
+                appVersion=e["version"], extVersion=e["version"],
+                buildID=e["to_buildid"], locale=e["locale"],
+                hashFunction='sha512',
+                partialInfo=partial_info, completeInfo=complete_info,
+            ))
+        elif "from_buildid" in e and uploads_enabled:
+            log.info("Nightly style balrog submission")
+            partial_mar_url = "{}/{}".format(args.artifacts_url_prefix,
+                                             e["mar"])
+            complete_mar_url = e["to_mar"]
+            dest_prefix = "{branch}/{buildid}".format(
+                branch=e["branch"], buildid=e["to_buildid"])
+            partial_mar_dest = "{}/{}".format(dest_prefix, e["mar"])
+            complete_mar_filename = "{appName}-{branch}-{version}-" \
+                                    "{platform}-{locale}.complete.mar"
+            complete_mar_filename = complete_mar_filename.format(
+                appName=e["appName"], branch=e["branch"],
+                version=e["version"], platform=e["platform"],
+                locale=e["locale"]
+            )
+            complete_mar_dest = "{}/{}".format(dest_prefix,
+                                               complete_mar_filename)
+            partial_info[0]["url"] = verify_copy_to_s3(
+                s3_bucket, aws_access_key_id, aws_secret_access_key,
+                partial_mar_url, partial_mar_dest, args.signing_cert)
+            complete_info[0]["url"] = verify_copy_to_s3(
+                s3_bucket, aws_access_key_id, aws_secret_access_key,
+                complete_mar_url, complete_mar_dest, args.signing_cert)
+            partial_info[0]["from_buildid"] = e["from_buildid"]
+            submitter = NightlySubmitterV4(api_root=args.api_root, auth=auth,
+                                           dummy=args.dummy)
+            retry(lambda: submitter.run(
+                platform=e["platform"], buildID=e["to_buildid"],
+                productName=e["appName"], branch=e["branch"],
+                appVersion=e["version"], locale=e["locale"],
+                hashFunction='sha512', extVersion=e["version"],
+                partialInfo=partial_info, completeInfo=complete_info),
+                attempts=30, sleeptime=10, max_sleeptime=60,
+            )
+        else:
+            raise RuntimeError("Cannot determine Balrog submission style")
+
+if __name__ == '__main__':
+    main()
new file mode 100644
--- /dev/null
+++ b/release/docker/funsize-update-generator/Dockerfile
@@ -0,0 +1,32 @@
+FROM ubuntu:vivid
+MAINTAINER Rail Aliiev <rail@mozilla.com>
+
+# Required software
+ENV DEBIAN_FRONTEND noninteractive
+# Chain apt-get commands with apt-get clean in a single docker RUN
+# to make sure that files are removed within a single docker layer
+RUN apt-get update -q && \
+    apt-get install -yyq --no-install-recommends \
+    python python-setuptools python-cryptography  libgetopt-simple-perl \
+    bzip2 clamav clamav-freshclam python-requests python-sh curl && \
+    apt-get clean
+RUN useradd -d /home/worker -s /bin/bash -m worker
+COPY requirements.txt /tmp/
+# python-pip installs a lot of dependencies increasing the size of an image
+# drastically. Using easy_install saves us almost 200M.
+RUN easy_install pip
+RUN pip install -r /tmp/requirements.txt
+
+# scripts
+RUN mkdir /home/worker/bin
+COPY scripts/* /home/worker/bin/
+COPY runme.sh /runme.sh
+RUN chmod 755 /home/worker/bin/* /runme.sh
+RUN mkdir /home/worker/keys
+COPY *.pubkey /home/worker/keys/
+RUN freshclam --verbose
+
+ENV           HOME          /home/worker
+ENV           SHELL         /bin/bash
+ENV           USER          worker
+ENV           LOGNAME       worker
new file mode 100644
--- /dev/null
+++ b/release/docker/funsize-update-generator/Makefile
@@ -0,0 +1,17 @@
+DOCKERIO_USERNAME =$(error DOCKERIO_USERNAME should be set)
+IMAGE_NAME = funsize-update-generator
+FULL_IMAGE_NAME = $(DOCKERIO_USERNAME)/$(IMAGE_NAME)
+
+build:
+	docker build -t $(FULL_IMAGE_NAME) --no-cache --rm .
+
+push:
+	docker push $(FULL_IMAGE_NAME):latest
+
+pull:
+	docker pull $(FULL_IMAGE_NAME):latest
+
+update_pubkeys:
+	curl https://hg.mozilla.org/mozilla-central/raw-file/default/toolkit/mozapps/update/updater/nightly_aurora_level3_primary.der | openssl x509 -inform DER -pubkey -noout > nightly.pubkey
+	curl https://hg.mozilla.org/mozilla-central/raw-file/default/toolkit/mozapps/update/updater/dep1.der | openssl x509 -inform DER -pubkey -noout > dep.pubkey
+	curl https://hg.mozilla.org/mozilla-central/raw-file/default/toolkit/mozapps/update/updater/release_primary.der | openssl x509 -inform DER -pubkey -noout > release.pubkey
new file mode 100644
--- /dev/null
+++ b/release/docker/funsize-update-generator/dep.pubkey
@@ -0,0 +1,9 @@
+-----BEGIN PUBLIC KEY-----
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzjHSobdeiQ3JHP/cCIOp
+WaX9y12rL5mIo9OR9bpqEZdD0yXJJJeZA887Mv8slqsM+qObMUpKvfEE6zyYPIZJ
+ANib31neI5BBYHhfhf2f5EnkilSYlmU3Gx+uRsmsdt58PpYe124tOAGgca/8bUy3
+eb6kUUTwvMI0oWQuPkGUaoHVQyj/bBMTrIkyF3UbfFtiX/SfOPvIoabNUe+pQHUe
+pqC2+RxzDGj+shTq/hYhtXlptFzsEEb2+0foLy0MY8C30dP2QqbM2iavvr/P8OcS
+Gm3H0TQcRzIEBzvPcIjiZi1nQj/r/3TlYRNCjuYT/HsNLXrB/U5Tc990jjAUJxdH
+0wIDAQAB
+-----END PUBLIC KEY-----
new file mode 100644
--- /dev/null
+++ b/release/docker/funsize-update-generator/nightly.pubkey
@@ -0,0 +1,9 @@
+-----BEGIN PUBLIC KEY-----
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4j/IS3gWbyVVnWn4ZRnC
+Fuzb6VAaHa0I+4E504ekhVAhbKlSfBstkLbXajdjUVAJpn02zWnOaTl5KAdpDpIp
+SkdA4mK20ej3/Ij7gIt8IwaX+ArXL8mP84pxDn5BgaNADm3206Z6YQzc/TDYu529
+qkDFmLqNUVRJAhPO+qqhKHIcVGh8HUHXN6XV1qOFip+UU0M474jAGgurVmAv8Rh7
+VvM0v5KmB6V6WHwM5gwjg2yRY/o+xYIsNeSes9rpp+MOs/RnUA6LI4WZGY4YahvX
+VclIXBDgbWPYtojexIJkmYj8JIIRsh3eCsrRRe14fq7cBurp3CxBYMlDHf0RUoaq
+hQIDAQAB
+-----END PUBLIC KEY-----
new file mode 100644
--- /dev/null
+++ b/release/docker/funsize-update-generator/release.pubkey
@@ -0,0 +1,9 @@
+-----BEGIN PUBLIC KEY-----
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvH4r94FpQ0gvr1hhTfV9
+NUeWPJ5CN6TZRq7v/Dc4nkJ1J4IP1B3UEii34tcNKpy1nKupiZuTT6T1zQYT+z5x
+3UkDF9qQboQ8RNb/BEz/cN3on/LTEnZ7YSraRL11M6cEB8mvmJxddCEquwqccRbs
+Usp8WUB7uRv1w6Anley7N9F/LE1iLPwJasZypRnzWb3aYsJy0cMFOYy+OXVdpktn
+qYqlNIjnt84u4Nil6UXnBbIJNUVOCY8wOFClNvVpubjPkWK1gtdWy3x/hJU5RpAO
+K9cnHxq4M/I4SUWTWO3r7yweQiHG4Jyoc7sP1jkwjBkSG93sDEycfwOdOoZft3wN
+sQIDAQAB
+-----END PUBLIC KEY-----
new file mode 100644
--- /dev/null
+++ b/release/docker/funsize-update-generator/requirements.txt
@@ -0,0 +1,2 @@
+mar==1.2
+redo
new file mode 100644
--- /dev/null
+++ b/release/docker/funsize-update-generator/runme.sh
@@ -0,0 +1,26 @@
+#!/bin/sh
+
+set -xe
+
+test $TASK_ID
+test $SIGNING_CERT
+
+ARTIFACTS_DIR="/home/worker/artifacts"
+mkdir -p "$ARTIFACTS_DIR"
+
+curl --location --retry 10 --retry-delay 10 -o /home/worker/task.json \
+    "https://queue.taskcluster.net/v1/task/$TASK_ID"
+
+# enable locale cache
+export MBSDIFF_HOOK="/home/worker/bin/mbsdiff_hook.sh -c /tmp/fs-cache"
+
+EXTRA_PARAMS=
+if [ ! -z $FILENAME_TEMPLATE ]; then
+    EXTRA_PARAMS="--filename-template $FILENAME_TEMPLATE"
+fi
+
+/home/worker/bin/funsize.py \
+    --artifacts-dir "$ARTIFACTS_DIR" \
+    --task-definition /home/worker/task.json \
+    --signing-cert "/home/worker/keys/${SIGNING_CERT}.pubkey" \
+    $EXTRA_PARAMS
new file mode 100755
--- /dev/null
+++ b/release/docker/funsize-update-generator/scripts/funsize.py
@@ -0,0 +1,263 @@
+#!/usr/bin/env python
+
+import ConfigParser
+import argparse
+import functools
+import hashlib
+import json
+import logging
+import os
+import shutil
+import tempfile
+import requests
+import sh
+
+import redo
+from mardor.marfile import MarFile
+
+log = logging.getLogger(__name__)
+ALLOWED_URL_PREFIXES = [
+    "http://download.cdn.mozilla.net/pub/mozilla.org/firefox/nightly/",
+    "http://download.cdn.mozilla.net/pub/firefox/nightly/",
+    "https://mozilla-nightly-updates.s3.amazonaws.com",
+    "https://queue.taskcluster.net/",
+    "http://ftp.mozilla.org/",
+    "http://download.mozilla.org/",
+]
+
+DEFAULT_FILENAME_TEMPLATE = "{appName}-{branch}-{version}-{platform}-" \
+                            "{locale}-{from_buildid}-{to_buildid}.partial.mar"
+
+
+def verify_signature(mar, signature):
+    log.info("Checking %s signature", mar)
+    m = MarFile(mar, signature_versions=[(1, signature)])
+    m.verify_signatures()
+
+
+@redo.retriable()
+def download(url, dest, mode=None):
+    log.debug("Downloading %s to %s", url, dest)
+    r = requests.get(url)
+    r.raise_for_status()
+
+    bytes_downloaded = 0
+    with open(dest, 'wb') as fd:
+        for chunk in r.iter_content(4096):
+            fd.write(chunk)
+            bytes_downloaded += len(chunk)
+
+    log.debug('Downloaded %s bytes', bytes_downloaded)
+    if 'content-length' in r.headers:
+        log.debug('Content-Length: %s bytes', r.headers['content-length'])
+        if bytes_downloaded != int(r.headers['content-length']):
+            raise IOError('Unexpected number of bytes downloaded')
+
+    if mode:
+        log.debug("chmod %o %s", mode, dest)
+        os.chmod(dest, mode)
+
+
+def unpack(work_env, mar, dest_dir):
+    os.mkdir(dest_dir)
+    unwrap_cmd = sh.Command(os.path.join(work_env.workdir,
+                                         "unwrap_full_update.pl"))
+    log.debug("Unwrapping %s", mar)
+    out = unwrap_cmd(mar, _cwd=dest_dir, _env=work_env.env, _timeout=240,
+                     _err_to_out=True)
+    if out:
+        log.debug(out)
+
+
+def find_file(directory, filename):
+    log.debug("Searching for %s in %s", filename, directory)
+    for root, dirs, files in os.walk(directory):
+        if filename in files:
+            f = os.path.join(root, filename)
+            log.debug("Found %s", f)
+            return f
+
+
+def get_option(directory, filename, section, option):
+    log.debug("Exctracting [%s]: %s from %s/**/%s", section, option, directory,
+              filename)
+    f = find_file(directory, filename)
+    config = ConfigParser.ConfigParser()
+    config.read(f)
+    rv = config.get(section, option)
+    log.debug("Found %s", rv)
+    return rv
+
+
+def generate_partial(work_env, from_dir, to_dir, dest_mar, channel_ids,
+                     version):
+    log.debug("Generating partial %s", dest_mar)
+    env = work_env.env
+    env["MOZ_PRODUCT_VERSION"] = version
+    env["MOZ_CHANNEL_ID"] = channel_ids
+    make_incremental_update = os.path.join(work_env.workdir,
+                                           "make_incremental_update.sh")
+    out = sh.bash(make_incremental_update, dest_mar, from_dir, to_dir,
+                  _cwd=work_env.workdir, _env=env, _timeout=900,
+                  _err_to_out=True)
+    if out:
+        log.debug(out)
+
+
+def get_hash(path, hash_type="sha512"):
+    h = hashlib.new(hash_type)
+    with open(path, "rb") as f:
+        for chunk in iter(functools.partial(f.read, 4096), ''):
+            h.update(chunk)
+    return h.hexdigest()
+
+
+class WorkEnv(object):
+
+    def __init__(self):
+        self.workdir = tempfile.mkdtemp()
+
+    def setup(self):
+        self.download_unwrap()
+        self.download_martools()
+
+    def download_unwrap(self):
+        # unwrap_full_update.pl is not too sensitive to the revision
+        url = "https://hg.mozilla.org/mozilla-central/raw-file/default/" \
+            "tools/update-packaging/unwrap_full_update.pl"
+        download(url, dest=os.path.join(self.workdir, "unwrap_full_update.pl"),
+                 mode=0o755)
+
+    def download_buildsystem_bits(self, repo, revision):
+        prefix = "{repo}/raw-file/{revision}/tools/update-packaging"
+        prefix = prefix.format(repo=repo, revision=revision)
+        for f in ("make_incremental_update.sh", "common.sh"):
+            url = "{prefix}/{f}".format(prefix=prefix, f=f)
+            download(url, dest=os.path.join(self.workdir, f), mode=0o755)
+
+    def download_martools(self):
+        # TODO: check if the tools have to be branch specific
+        prefix = "https://ftp.mozilla.org/pub/mozilla.org/firefox/nightly/" \
+            "latest-mozilla-central/mar-tools/linux64"
+        for f in ("mar", "mbsdiff"):
+            url = "{prefix}/{f}".format(prefix=prefix, f=f)
+            download(url, dest=os.path.join(self.workdir, f), mode=0o755)
+
+    def cleanup(self):
+        shutil.rmtree(self.workdir)
+
+    @property
+    def env(self):
+        my_env = os.environ.copy()
+        my_env['LC_ALL'] = 'C'
+        my_env['MAR'] = os.path.join(self.workdir, "mar")
+        my_env['MBSDIFF'] = os.path.join(self.workdir, "mbsdiff")
+        return my_env
+
+
+def verify_allowed_url(mar):
+    if not any(mar.startswith(prefix) for prefix in ALLOWED_URL_PREFIXES):
+        raise ValueError("{mar} is not in allowed URL prefixes: {p}".format(
+            mar=mar, p=ALLOWED_URL_PREFIXES
+        ))
+
+
+def main():
+    parser = argparse.ArgumentParser()
+    parser.add_argument("--artifacts-dir", required=True)
+    parser.add_argument("--signing-cert", required=True)
+    parser.add_argument("--task-definition", required=True,
+                        type=argparse.FileType('r'))
+    parser.add_argument("--filename-template",
+                        default=DEFAULT_FILENAME_TEMPLATE)
+    parser.add_argument("-q", "--quiet", dest="log_level",
+                        action="store_const", const=logging.WARNING,
+                        default=logging.DEBUG)
+    args = parser.parse_args()
+
+    logging.basicConfig(format="%(asctime)s - %(levelname)s - %(message)s",
+                        level=args.log_level)
+    task = json.load(args.task_definition)
+    # TODO: verify task["extra"]["funsize"]["partials"] with jsonschema
+
+    log.info("Refreshing clamav db...")
+    redo.retry(lambda:
+               sh.freshclam("--stdout", "--verbose", _timeout=300, _err_to_out=True))
+    log.info("Done.")
+    manifest = []
+    for e in task["extra"]["funsize"]["partials"]:
+        for mar in (e["from_mar"], e["to_mar"]):
+            verify_allowed_url(mar)
+
+        work_env = WorkEnv()
+        # TODO: run setup once
+        work_env.setup()
+        complete_mars = {}
+        for mar_type, f in (("from", e["from_mar"]), ("to", e["to_mar"])):
+            dest = os.path.join(work_env.workdir, "{}.mar".format(mar_type))
+            unpack_dir = os.path.join(work_env.workdir, mar_type)
+            download(f, dest)
+            if not os.getenv("MOZ_DISABLE_MAR_CERT_VERIFICATION"):
+                verify_signature(dest, args.signing_cert)
+            complete_mars["%s_size" % mar_type] = os.path.getsize(dest)
+            complete_mars["%s_hash" % mar_type] = get_hash(dest)
+            unpack(work_env, dest, unpack_dir)
+            log.info("AV-scanning %s ...", unpack_dir)
+            sh.clamscan("-r", unpack_dir, _timeout=600, _err_to_out=True)
+            log.info("Done.")
+
+        path = os.path.join(work_env.workdir, "to")
+        from_path = os.path.join(work_env.workdir, "from")
+        mar_data = {
+            "ACCEPTED_MAR_CHANNEL_IDS": get_option(
+                path, filename="update-settings.ini", section="Settings",
+                option="ACCEPTED_MAR_CHANNEL_IDS"),
+            "version": get_option(path, filename="application.ini",
+                                  section="App", option="Version"),
+            "to_buildid": get_option(path, filename="application.ini",
+                                     section="App", option="BuildID"),
+            "from_buildid": get_option(from_path, filename="application.ini",
+                                       section="App", option="BuildID"),
+            "appName": get_option(from_path, filename="application.ini",
+                                  section="App", option="Name"),
+            # Use Gecko repo and rev from platform.ini, not application.ini
+            "repo": get_option(path, filename="platform.ini", section="Build",
+                               option="SourceRepository"),
+            "revision": get_option(path, filename="platform.ini",
+                                   section="Build", option="SourceStamp"),
+            "from_mar": e["from_mar"],
+            "to_mar": e["to_mar"],
+            "platform": e["platform"],
+            "locale": e["locale"],
+        }
+        for field in ("update_number", "previousVersion",
+                      "previousBuildNumber", "toVersion",
+                      "toBuildNumber"):
+            if field in e:
+                mar_data[field] = e[field]
+        mar_data.update(complete_mars)
+        # if branch not set explicitly use repo-name
+        mar_data["branch"] = e.get("branch",
+                                   mar_data["repo"].rstrip("/").split("/")[-1])
+        mar_name = args.filename_template.format(**mar_data)
+        mar_data["mar"] = mar_name
+        dest_mar = os.path.join(work_env.workdir, mar_name)
+        # TODO: download these once
+        work_env.download_buildsystem_bits(repo=mar_data["repo"],
+                                           revision=mar_data["revision"])
+        generate_partial(work_env, from_path, path, dest_mar,
+                         mar_data["ACCEPTED_MAR_CHANNEL_IDS"],
+                         mar_data["version"])
+        mar_data["size"] = os.path.getsize(dest_mar)
+        mar_data["hash"] = get_hash(dest_mar)
+
+        shutil.copy(dest_mar, args.artifacts_dir)
+        work_env.cleanup()
+        manifest.append(mar_data)
+    manifest_file = os.path.join(args.artifacts_dir, "manifest.json")
+    with open(manifest_file, "w") as fp:
+        json.dump(manifest, fp, indent=2, sort_keys=True)
+
+
+if __name__ == '__main__':
+    main()
new file mode 100755
--- /dev/null
+++ b/release/docker/funsize-update-generator/scripts/mbsdiff_hook.sh
@@ -0,0 +1,135 @@
+#!/bin/bash
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#
+# This tool contains functions that are to be used to handle/enable funsize
+# Author: Mihai Tabara
+#
+
+HOOK=
+SERVER_URL=
+LOCAL_CACHE_DIR=
+
+getsha512(){
+    echo "$(openssl sha512 "${1}" | awk '{print $2}')"
+}
+
+print_usage(){
+    echo "$(basename $0) -A SERVER-URL [-c LOCAL-CACHE-DIR-PATH] [-g] [-u] PATH-FROM-URL PATH-TO-URL PATH-PATCH"
+    echo "Script that saves/retrieves from cache presumptive patches as args"
+    echo ""
+    echo "-A SERVER-URL - host where to send the files"
+    echo "-c LOCAL-CACHE-DIR-PATH local path to which patches are cached"
+    echo "-g pre hook - tests whether patch already in cache"
+    echo "-u post hook - upload patch to cache for future use"
+    echo ""
+    echo "PATH-FROM-URL     : path on disk for source file"
+    echo "PATH-TO-URL       : path on disk for destination file"
+    echo "PATH-PATCH        : path on disk for patch between source and destination"
+}
+
+upload_patch(){
+    sha_from=`getsha512 "$1"`
+    sha_to=`getsha512 "$2"`
+    patch_path="$3"
+
+    # save to local cache first
+    if [ -n "$LOCAL_CACHE_DIR" ]; then
+        local_cmd="mkdir -p "$LOCAL_CACHE_DIR/$sha_from""
+        if `$local_cmd` >&2; then
+            cp -avf "$patch_path" "$LOCAL_CACHE_DIR/$sha_from/$sha_to"
+            echo "$patch_path saved on local cache!"
+        fi
+    fi
+    # The remote cache implementation is not used. The code is for usage
+    # reference only.
+     return 0
+
+    # send it over to funsize
+    cmd="curl -sSw %{http_code} -o /dev/null -X POST $SERVER_URL -F sha_from="$sha_from" -F sha_to="$sha_to" -F patch_file="@$patch_path""
+    ret_code=`$cmd`
+
+    if [ $ret_code -eq 200 ]; then
+        echo "$patch_path Successful uploaded to funsize!"
+        return 0
+    fi
+
+    echo "$patch_path Failed to be uploaded to funsize!"
+    return 1
+}
+
+get_patch(){
+    sha_from=`getsha512 "$1"`
+    sha_to=`getsha512 "$2"`
+    destination_file="$3"
+    tmp_file="$destination_file.tmp"
+
+    # try to retrieve from local cache first
+    if [ -r "$LOCAL_CACHE_DIR/$sha_from/$sha_to" ]; then
+        cp -avf "$LOCAL_CACHE_DIR/$sha_from/$sha_to" "$destination_file"
+        echo "Successful retrieved $destination_file from local cache!"
+        return 0
+    else
+        echo "File is not in the locale cache"
+        return 1
+    fi
+    # The remote cache implementation is not used. The code is for usage
+    # reference only.
+
+    # if unsuccessful, try to retrieve from funsize
+    cmd="curl -LsSGw %{http_code} $SERVER_URL/$sha_from/$sha_to -o $tmp_file"
+    ret_code=`$cmd`
+
+    if [ $ret_code -eq 200 ]; then
+        mv "$tmp_file" "$destination_file"
+        echo "Successful retrieved $destination_file from funsize!"
+        return 0
+    fi
+
+    rm  -f "$tmp_file"
+    echo "Failed to retrieve $destination_file from funsize!"
+    return 1
+}
+
+OPTIND=1
+
+while getopts ":A:c:gu" option; do
+    case $option in
+        A)
+            SERVER_URL="$OPTARG"
+            ;;
+        c)
+            LOCAL_CACHE_DIR="$OPTARG"
+            ;;
+        g)
+            HOOK="PRE"
+            ;;
+        u)
+            HOOK="POST"
+            ;;
+        \?)
+            echo "Invalid option: -$OPTARG" >&2
+            print_usage
+            exit 1
+            ;;
+        :)
+            echo "Option -$OPTARG requires an argument." >&2
+            print_usage
+            exit 1
+            ;;
+        *)
+            echo "Unimplemented option: -$OPTARG" >&2
+            print_usage
+            exit 1
+            ;;
+    esac
+done
+shift $((OPTIND-1))
+
+if [ "$HOOK" == "PRE" ]; then
+    get_patch "$1" "$2" "$3"
+elif [ "$HOOK" == "POST" ]; then
+    upload_patch "$1" "$2" "$3"
+fi