bug 1385946 Final update to funsize docker images r=catlee
authorSimon Fraser <sfraser@mozilla.com>
Tue, 01 Aug 2017 22:23:50 +0100
changeset 372331 e1b93d2abdb2968279ea2512aded7bd77c53a1c7
parent 372330 469a6a99593ace50cead3a5927142582825f024e
child 372332 d5210a2cb19a4c33200ba2219457226cc9cbd5b2
push id32273
push userkwierso@gmail.com
push dateWed, 02 Aug 2017 22:48:18 +0000
treeherdermozilla-central@4c7317211990 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerscatlee
bugs1385946
milestone56.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
bug 1385946 Final update to funsize docker images r=catlee MozReview-Commit-ID: 39geOuOeco3
taskcluster/docker/funsize-balrog-submitter/Dockerfile
taskcluster/docker/funsize-balrog-submitter/requirements.txt
taskcluster/docker/funsize-balrog-submitter/scripts/funsize-balrog-submitter.py
taskcluster/docker/funsize-update-generator/Dockerfile
taskcluster/docker/funsize-update-generator/README
taskcluster/docker/funsize-update-generator/requirements.txt
taskcluster/docker/funsize-update-generator/scripts/funsize.py
--- a/taskcluster/docker/funsize-balrog-submitter/Dockerfile
+++ b/taskcluster/docker/funsize-balrog-submitter/Dockerfile
@@ -2,17 +2,18 @@ FROM ubuntu:vivid
 MAINTAINER Rail Aliiev <rail@mozilla.com>
 
 # Required software
 ENV DEBIAN_FRONTEND noninteractive
 # Chain apt-get commands with apt-get clean in a single docker RUN
 # to make sure that files are removed within a single docker layer
 RUN apt-get update -q && \
     apt-get install -yyq --no-install-recommends \
-    python mercurial curl python-boto python-setuptools python-cryptography && \
+    python mercurial curl python-boto python-setuptools python-cryptography \
+    python-dev gcc liblzma-dev && \
     apt-get clean
 
 COPY requirements.txt /tmp/
 # python-pip installs a lot of dependencies increasing the size of an image
 # drastically.
 RUN easy_install pip
 RUN pip install -r /tmp/requirements.txt
 
--- a/taskcluster/docker/funsize-balrog-submitter/requirements.txt
+++ b/taskcluster/docker/funsize-balrog-submitter/requirements.txt
@@ -1,1 +1,1 @@
-mar==2.1.1
+mar==2.1.2
--- a/taskcluster/docker/funsize-balrog-submitter/scripts/funsize-balrog-submitter.py
+++ b/taskcluster/docker/funsize-balrog-submitter/scripts/funsize-balrog-submitter.py
@@ -47,17 +47,17 @@ def download(url, dest, mode=None):
         log.debug("chmod %o %s", mode, dest)
         os.chmod(dest, mode)
 
 
 def verify_signature(mar, certs):
     log.info("Checking %s signature", mar)
     with open(mar, 'rb') as mar_fh:
         m = MarReader(mar_fh)
-        m.verify(verify_key=certs.get(m.signature_type()))
+        m.verify(verify_key=certs.get(m.signature_type))
 
 
 def verify_copy_to_s3(bucket_name, aws_access_key_id, aws_secret_access_key,
                       mar_url, mar_dest, signing_certs):
     conn = S3Connection(aws_access_key_id, aws_secret_access_key)
     bucket = conn.get_bucket(bucket_name)
     _, dest = tempfile.mkstemp()
     log.info("Downloading %s to %s...", mar_url, dest)
@@ -140,22 +140,22 @@ def main():
         uploads_enabled = False
     else:
         uploads_enabled = True
 
     manifest = json.load(open(args.manifest))
     auth = (balrog_username, balrog_password)
 
     signing_certs = {
-        'sha1': args.sha1_signing_cert,
-        'sha384': args.sha384_signing_cert,
+        'sha1': open(args.sha1_signing_cert, 'rb').read(),
+        'sha384': open(args.sha384_signing_cert, 'rb').read(),
     }
 
-    assert(get_keysize(open(signing_certs['sha1'], 'rb').read()) == 2048)
-    assert(get_keysize(open(signing_certs['sha384'], 'rb').read()) == 4096)
+    assert(get_keysize(signing_certs['sha1']) == 2048)
+    assert(get_keysize(signing_certs['sha384']) == 4096)
 
     for e in manifest:
         complete_info = [{
             "hash": e["to_hash"],
             "size": e["to_size"],
         }]
         partial_info = [{
             "hash": e["hash"],
--- a/taskcluster/docker/funsize-update-generator/Dockerfile
+++ b/taskcluster/docker/funsize-update-generator/Dockerfile
@@ -3,31 +3,34 @@ MAINTAINER Rail Aliiev <rail@mozilla.com
 
 # Required software
 ENV DEBIAN_FRONTEND noninteractive
 # Chain apt-get commands with apt-get clean in a single docker RUN
 # to make sure that files are removed within a single docker layer
 RUN apt-get update -q && \
     apt-get install -yyq --no-install-recommends \
     python python-setuptools python-cryptography  libgetopt-simple-perl \
-    bzip2 clamav clamav-freshclam python-requests python-sh curl && \
+    bzip2 clamav clamav-freshclam python-requests python-sh curl \
+    python-dev gcc liblzma-dev xz-utils && \
     apt-get clean
 RUN useradd -d /home/worker -s /bin/bash -m worker
 COPY requirements.txt /tmp/
+
+# Freshclam may be flaky, retry if it fails
+RUN for i in 1 2 3 4 5; do freshclam --verbose && break || sleep 15; done
+
 # python-pip installs a lot of dependencies increasing the size of an image
 # drastically. Using easy_install saves us almost 200M.
 RUN easy_install pip
 RUN pip install -r /tmp/requirements.txt
 
 # scripts
 RUN mkdir /home/worker/bin
 COPY scripts/* /home/worker/bin/
 COPY runme.sh /runme.sh
 RUN chmod 755 /home/worker/bin/* /runme.sh
 RUN mkdir /home/worker/keys
 COPY *.pubkey /home/worker/keys/
-# Freshclam may be flaky, retry if it fails
-RUN for i in 1 2 3 4 5; do freshclam --verbose && break || sleep 15; done
 
 ENV           HOME          /home/worker
 ENV           SHELL         /bin/bash
 ENV           USER          worker
 ENV           LOGNAME       worker
new file mode 100644
--- /dev/null
+++ b/taskcluster/docker/funsize-update-generator/README
@@ -0,0 +1,7 @@
+
+To run this locally for testing/development purposes:
+
+1. Find a funsize generating task ID
+2. make pull DOCKERIO_USERNAME=mozillareleases
+3. docker run -t -e SHA1_SIGNING_CERT='nightly_sha1' -e SHA384_SIGNING_CERT='nightly_sha384' -e TASK_ID=LD5HUGP5QNeQdFKNTTuyCg mozillareleases/funsize-update-generator /runme.sh
+
--- a/taskcluster/docker/funsize-update-generator/requirements.txt
+++ b/taskcluster/docker/funsize-update-generator/requirements.txt
@@ -1,2 +1,2 @@
-mar==2.1.1
+mar==2.1.2
 redo
--- a/taskcluster/docker/funsize-update-generator/scripts/funsize.py
+++ b/taskcluster/docker/funsize-update-generator/scripts/funsize.py
@@ -31,22 +31,27 @@ ALLOWED_URL_PREFIXES = [
 DEFAULT_FILENAME_TEMPLATE = "{appName}-{branch}-{version}-{platform}-" \
                             "{locale}-{from_buildid}-{to_buildid}.partial.mar"
 
 
 def verify_signature(mar, certs):
     log.info("Checking %s signature", mar)
     with open(mar, 'rb') as mar_fh:
         m = MarReader(mar_fh)
-        m.verify(verify_key=certs.get(m.signature_type()))
+        m.verify(verify_key=certs.get(m.signature_type))
 
 
 def is_lzma_compressed_mar(mar):
     log.info("Checking %s for lzma compression", mar)
-    return MarReader(open(mar, 'rb')).compression_type() == 'xz'
+    result = MarReader(open(mar, 'rb')).compression_type == 'xz'
+    if result:
+        log.info("%s is lzma compressed", mar)
+    else:
+        log.info("%s is not lzma compressed", mar)
+    return result
 
 
 @redo.retriable()
 def download(url, dest, mode=None):
     log.debug("Downloading %s to %s", url, dest)
     r = requests.get(url)
     r.raise_for_status()
 
@@ -69,17 +74,17 @@ def download(url, dest, mode=None):
 
 def unpack(work_env, mar, dest_dir):
     os.mkdir(dest_dir)
     unwrap_cmd = sh.Command(os.path.join(work_env.workdir,
                                          "unwrap_full_update.pl"))
     log.debug("Unwrapping %s", mar)
     env = work_env.env
     if not is_lzma_compressed_mar(mar):
-        env['MAR_OLD_FORMAT'] = 1
+        env['MAR_OLD_FORMAT'] = '1'
     elif 'MAR_OLD_FORMAT' in env:
         del env['MAR_OLD_FORMAT']
     out = unwrap_cmd(mar, _cwd=dest_dir, _env=env, _timeout=240,
                      _err_to_out=True)
     if out:
         log.debug(out)
 
 
@@ -99,23 +104,23 @@ def get_option(directory, filename, sect
     config = ConfigParser.ConfigParser()
     config.read(f)
     rv = config.get(section, option)
     log.debug("Found %s", rv)
     return rv
 
 
 def generate_partial(work_env, from_dir, to_dir, dest_mar, channel_ids,
-                     version):
+                     version, use_old_format):
     log.debug("Generating partial %s", dest_mar)
     env = work_env.env
     env["MOZ_PRODUCT_VERSION"] = version
     env["MOZ_CHANNEL_ID"] = channel_ids
-    if not is_lzma_compressed_mar(dest_mar):
-        env['MAR_OLD_FORMAT'] = 1
+    if use_old_format:
+        env['MAR_OLD_FORMAT'] = '1'
     elif 'MAR_OLD_FORMAT' in env:
         del env['MAR_OLD_FORMAT']
     make_incremental_update = os.path.join(work_env.workdir,
                                            "make_incremental_update.sh")
     out = sh.bash(make_incremental_update, dest_mar, from_dir, to_dir,
                   _cwd=work_env.workdir, _env=env, _timeout=900,
                   _err_to_out=True)
     if out:
@@ -197,22 +202,22 @@ def main():
     args = parser.parse_args()
 
     logging.basicConfig(format="%(asctime)s - %(levelname)s - %(message)s",
                         level=args.log_level)
     task = json.load(args.task_definition)
     # TODO: verify task["extra"]["funsize"]["partials"] with jsonschema
 
     signing_certs = {
-        'sha1': args.sha1_signing_cert,
-        'sha384': args.sha384_signing_cert,
+        'sha1': open(args.sha1_signing_cert, 'rb').read(),
+        'sha384': open(args.sha384_signing_cert, 'rb').read(),
     }
 
-    assert(get_keysize(open(signing_certs['sha1'], 'rb').read()) == 2048)
-    assert(get_keysize(open(signing_certs['sha384'], 'rb').read()) == 4096)
+    assert(get_keysize(signing_certs['sha1']) == 2048)
+    assert(get_keysize(signing_certs['sha384']) == 4096)
 
     if args.no_freshclam:
         log.info("Skipping freshclam")
     else:
         log.info("Refreshing clamav db...")
         try:
             redo.retry(lambda: sh.freshclam("--stdout", "--verbose",
                                             _timeout=300, _err_to_out=True))
@@ -223,24 +228,27 @@ def main():
     for e in task["extra"]["funsize"]["partials"]:
         for mar in (e["from_mar"], e["to_mar"]):
             verify_allowed_url(mar)
 
         work_env = WorkEnv()
         # TODO: run setup once
         work_env.setup()
         complete_mars = {}
+        use_old_format = False
         for mar_type, f in (("from", e["from_mar"]), ("to", e["to_mar"])):
             dest = os.path.join(work_env.workdir, "{}.mar".format(mar_type))
             unpack_dir = os.path.join(work_env.workdir, mar_type)
             download(f, dest)
             if not os.getenv("MOZ_DISABLE_MAR_CERT_VERIFICATION"):
                 verify_signature(dest, signing_certs)
             complete_mars["%s_size" % mar_type] = os.path.getsize(dest)
             complete_mars["%s_hash" % mar_type] = get_hash(dest)
+            if mar_type == 'to' and not is_lzma_compressed_mar(dest):
+                use_old_format = True
             unpack(work_env, dest, unpack_dir)
             log.info("AV-scanning %s ...", unpack_dir)
             sh.clamscan("-r", unpack_dir, _timeout=600, _err_to_out=True)
             log.info("Done.")
 
         path = os.path.join(work_env.workdir, "to")
         from_path = os.path.join(work_env.workdir, "from")
         mar_data = {
@@ -280,17 +288,18 @@ def main():
         mar_name = args.filename_template.format(**mar_data)
         mar_data["mar"] = mar_name
         dest_mar = os.path.join(work_env.workdir, mar_name)
         # TODO: download these once
         work_env.download_buildsystem_bits(repo=mar_data["repo"],
                                            revision=mar_data["revision"])
         generate_partial(work_env, from_path, path, dest_mar,
                          mar_data["ACCEPTED_MAR_CHANNEL_IDS"],
-                         mar_data["version"])
+                         mar_data["version"],
+                         use_old_format)
         mar_data["size"] = os.path.getsize(dest_mar)
         mar_data["hash"] = get_hash(dest_mar)
 
         shutil.copy(dest_mar, args.artifacts_dir)
         work_env.cleanup()
         manifest.append(mar_data)
     manifest_file = os.path.join(args.artifacts_dir, "manifest.json")
     with open(manifest_file, "w") as fp: