bug 1338150 update to release-runner.py to support Fennec in a coming patch.
authorSimon Fraser <sfraser@mozilla.com>
Mon, 13 Feb 2017 14:35:28 +0000
changeset 7308 c85a80e0c3e4bf070828e50eb57abb983ae7b695
parent 7298 df2f943710552f2379a0afc8af853d937e6ed7ce
child 7309 7adbdc3a85be868b35f4d15f8237d0227d0188fc
push id5442
push usersfraser@mozilla.com
push dateMon, 13 Feb 2017 14:53:43 +0000
bugs1338150
bug 1338150 update to release-runner.py to support Fennec in a coming patch.
buildfarm/release/release-runner.py
buildfarm/release/release-runner.yml.example
lib/python/kickoff/__init__.py
--- a/buildfarm/release/release-runner.py
+++ b/buildfarm/release/release-runner.py
@@ -29,17 +29,17 @@ from kickoff.sanity.partials import Part
 from kickoff.build_status import are_en_us_builds_completed
 from release.info import readBranchConfig
 from release.l10n import parsePlainL10nChangesets
 from release.versions import getAppVersion
 from taskcluster import Scheduler, Index, Queue
 from taskcluster.utils import slugId
 from util.hg import mercurial
 from util.retry import retry
-from util.file import load_config, get_config
+import yaml
 
 log = logging.getLogger(__name__)
 
 
 # both CHECKSUMS and ALL_FILES have been defined to improve the release sanity
 # en-US binaries timing by whitelisting artifacts of interest - bug 1251761
 CHECKSUMS = set([
     '.checksums',
@@ -54,53 +54,75 @@ ALL_FILES = set([
     '.exe',
     '.dmg',
     'i686.tar.bz2',
     'x86_64.tar.bz2',
 ])
 
 CONFIGS_WORKDIR = 'buildbot-configs'
 
-def run_prebuild_sanity_checks(release_runner):
+
+def check_and_assign_long_revision(release_runner, release):
+    # Revisions must be checked before trying to get the long one.
+    RevisionsSanitizer(**release).run()
+    release['mozillaRevision'] = long_revision(
+        release['branch'], release['mozillaRevision'])
+
+
+def assign_and_check_l10n_changesets(release_runner, release):
+    release['l10n_changesets'] = parsePlainL10nChangesets(
+        release_runner.get_release_l10n(release['name']))
+    L10nSanitizer(**release).run()
+
+
+def assign_and_check_partial_updates(release_runner, release):
+    release['partial_updates'] = get_partials(
+        release_runner, release['partials'], release['product'])
+    branchConfig = get_branch_config(release)
+    release['release_channels'] = update_channels(
+        release['version'], branchConfig['release_channel_mappings'])
+    PartialsSanitizer(**release).run()
+
+
+# So people can't run arbitrary functions
+CHECKS_MAPPING = {
+    'long_revision': check_and_assign_long_revision,
+    'l10n_changesets': assign_and_check_l10n_changesets,
+    'partial_updates': assign_and_check_partial_updates,
+}
+
+
+def run_prebuild_sanity_checks(release_runner, releases_config):
     new_valid_releases = []
+
+    # results in:
+    # { 'firefox': ['long_revision', 'l10n_changesets', 'partial_updates']}
+    checks = {r['product'].lower(): r['checks'] for r in releases_config}
+
     for release in release_runner.new_releases:
         log.info('Got a new release request: %s' % release)
         try:
             # TODO: this won't work for Thunderbird...do we care?
             release['branchShortName'] = release['branch'].split("/")[-1]
 
-            check_and_assign_long_revision(release)
-            assign_and_check_l10n_changesets(release_runner, release)
-            assign_and_check_partial_updates(release_runner, release)
+            for check in checks[release['product']]:
+                if check not in CHECKS_MAPPING:
+                    log.error("Check %s not found", check)
+                    continue
+                CHECKS_MAPPING[check](release_runner, release)
 
             new_valid_releases.append(release)
         except Exception as e:
-            release_runner.mark_as_failed(release, 'Sanity checks failed. Errors: %s' % e)
-            log.exception('Sanity checks failed. Errors: %s. Release: %s', e, release)
+            release_runner.mark_as_failed(
+                release, 'Sanity checks failed. Errors: %s' % e)
+            log.exception(
+                'Sanity checks failed. Errors: %s. Release: %s', e, release)
     return new_valid_releases
 
 
-def check_and_assign_long_revision(release):
-    # Revisions must be checked before trying to get the long one.
-    RevisionsSanitizer(**release).run()
-    release['mozillaRevision'] = long_revision(release['branch'], release['mozillaRevision'])
-
-
-def assign_and_check_l10n_changesets(release_runner, release):
-    release['l10n_changesets'] = parsePlainL10nChangesets(release_runner.get_release_l10n(release['name']))
-    L10nSanitizer(**release).run()
-
-
-def assign_and_check_partial_updates(release_runner, release):
-    release['partial_updates'] = get_partials(release_runner, release['partials'], release['product'])
-    branchConfig = get_branch_config(release)
-    release['release_channels'] = update_channels(release['version'], branchConfig['release_channel_mappings'])
-    PartialsSanitizer(**release).run()
-
-
 def get_branch_config(release):
     return readBranchConfig(path.join(CONFIGS_WORKDIR, "mozilla"), branch=release['branchShortName'])
 
 
 def update_channels(version, mappings):
     """Return a list of update channels for a version using version mapping
 
     >>> update_channels("40.0", [(r"^\d+\.0$", ["beta", "release"]), (r"^\d+\.\d+\.\d+$", ["release"])])
@@ -169,35 +191,39 @@ def download_all_artifacts(queue, artifa
 
 
 def validate_checksums(_dict, dir_path):
     for name in _dict.keys():
         filepath = os.path.join(dir_path, name)
         computed_hash = get_hash(filepath)
         correct_hash = _dict[name]
         if computed_hash != correct_hash:
-            log.error("failed to validate checksum for %s", name, exc_info=True)
+            log.error("failed to validate checksum for %s",
+                      name, exc_info=True)
             raise SanityException("Failed to check digest for %s" % name)
 
 
 def file_in_whitelist(artifact, whitelist):
     return any([artifact.endswith(x) for x in whitelist])
 
 
 def sanitize_en_US_binary(queue, task_id, gpg_key_path):
     # each platform en-US gets its own tempdir workground
     tempdir = tempfile.mkdtemp()
     log.debug('Temporary playground is %s', tempdir)
 
     # get all artifacts and trim but 'name' field from the json entries
-    all_artifacts = [k['name'] for k in queue.listLatestArtifacts(task_id)['artifacts']]
+    all_artifacts = [k['name']
+                     for k in queue.listLatestArtifacts(task_id)['artifacts']]
     # filter files to hold the whitelist-related only
-    artifacts = filter(lambda k: file_in_whitelist(k, ALL_FILES), all_artifacts)
+    artifacts = filter(lambda k: file_in_whitelist(
+        k, ALL_FILES), all_artifacts)
     # filter out everything but the checkums artifacts
-    checksums_artifacts = filter(lambda k: file_in_whitelist(k, CHECKSUMS), all_artifacts)
+    checksums_artifacts = filter(
+        lambda k: file_in_whitelist(k, CHECKSUMS), all_artifacts)
     other_artifacts = list(set(artifacts) - set(checksums_artifacts))
     # iterate in artifacts and grab checksums and its signature only
     log.info("Retrieve the checksums file and its signature ...")
     for artifact in checksums_artifacts:
         name = os.path.basename(artifact)
         build_url = queue.buildSignedUrl(
             'getLatestArtifact',
             task_id,
@@ -258,91 +284,95 @@ def validate_graph_kwargs(queue, gpg_key
         log.info('Performing release sanity for %s en-US binary', platform)
         sanitize_en_US_binary(queue, task_id, gpg_key_path)
 
     log.info("Release sanity for all en-US is now completed!")
 
 
 def main(options):
     log.info('Loading config from %s' % options.config)
-    config = load_config(options.config)
 
-    if config.getboolean('release-runner', 'verbose'):
+    with open(options.config, 'r') as config_file:
+        config = yaml.load(config_file)
+
+    if config['release-runner'].get('verbose', False):
         log_level = logging.DEBUG
     else:
         log_level = logging.INFO
     logging.basicConfig(format="%(asctime)s - %(levelname)s - %(message)s",
                         level=log_level)
     # Suppress logging of retry(), see bug 925321 for the details
     logging.getLogger("util.retry").setLevel(logging.WARN)
 
-    # Shorthand
-    api_root = config.get('api', 'api_root')
-    username = config.get('api', 'username')
-    password = config.get('api', 'password')
-    buildbot_configs = config.get('release-runner', 'buildbot_configs')
-    buildbot_configs_branch = config.get('release-runner',
-                                         'buildbot_configs_branch')
-    sleeptime = config.getint('release-runner', 'sleeptime')
-    notify_from = get_config(config, 'release-runner', 'notify_from', None)
-    notify_to = get_config(config, 'release-runner', 'notify_to_announce', None)
-    docker_worker_key = get_config(config, 'release-runner',
-                                   'docker_worker_key', None)
-    signing_pvt_key = get_config(config, 'signing', 'pvt_key', None)
+    api_root = config['api']['api_root']
+    username = config['api']['username']
+    password = config['api']['password']
+
+    rr_config = config['release-runner']
+
+    buildbot_configs = rr_config['buildbot_configs']
+    buildbot_configs_branch = rr_config['buildbot_configs_branch']
+    sleeptime = rr_config['sleeptime']
+    notify_from = rr_config['notify_from']
+    notify_to = rr_config['notify_to_announce']
+    docker_worker_key = rr_config['docker_worker_key']
+    signing_pvt_key = config['signing']['pvt_key']
     if isinstance(notify_to, basestring):
         notify_to = [x.strip() for x in notify_to.split(',')]
-    smtp_server = get_config(config, 'release-runner', 'smtp_server',
-                             'localhost')
+    smtp_server = rr_config.get('smtp_server', 'localhost')
     tc_config = {
         "credentials": {
-            "clientId": get_config(config, "taskcluster", "client_id", None),
-            "accessToken": get_config(config, "taskcluster", "access_token", None),
+            "clientId": config['taskcluster'].get('client_id'),
+            "accessToken": config['taskcluster'].get('access_token'),
         }
     }
     # Extend tc_config for retries, see Bug 1293744
     # https://github.com/taskcluster/taskcluster-client.py/blob/0.0.24/taskcluster/client.py#L30
     # This is a stopgap until Bug 1259627 is fixed.
     retrying_tc_config = tc_config.copy()
     retrying_tc_config.update({"maxRetries": 12})
-    balrog_username = get_config(config, "balrog", "username", None)
-    balrog_password = get_config(config, "balrog", "password", None)
-    extra_balrog_submitter_params = get_config(config, "balrog", "extra_balrog_submitter_params", None)
-    beetmover_aws_access_key_id = get_config(config, "beetmover", "aws_access_key_id", None)
-    beetmover_aws_secret_access_key = get_config(config, "beetmover", "aws_secret_access_key", None)
-    gpg_key_path = get_config(config, "signing", "gpg_key_path", None)
+    balrog_username = config['balrog']["username"]
+    balrog_password = config["balrog"]["password"]
+    extra_balrog_submitter_params = config["balrog"]["extra_balrog_submitter_params"]
+    beetmover_aws_access_key_id = config["beetmover"]["aws_access_key_id"]
+    beetmover_aws_secret_access_key = config["beetmover"]["aws_secret_access_key"]
+    gpg_key_path = config["signing"]["gpg_key_path"]
 
-    # TODO: replace release sanity with direct checks of en-US and l10n revisions (and other things if needed)
+    # TODO: replace release sanity with direct checks of en-US and l10n
+    # revisions (and other things if needed)
 
     rr = ReleaseRunner(api_root=api_root, username=username, password=password)
     scheduler = Scheduler(retrying_tc_config)
     index = Index(tc_config)
     queue = Queue(tc_config)
 
     # Main loop waits for new releases, processes them and exits.
     while True:
         try:
             log.debug('Fetching release requests')
-            rr.get_release_requests()
+            rr.get_release_requests([r['pattern'] for r in config['releases']])
             if rr.new_releases:
-                new_releases = run_prebuild_sanity_checks(rr)
+                new_releases = run_prebuild_sanity_checks(
+                    rr, config['releases'])
                 break
             else:
                 log.debug('Sleeping for %d seconds before polling again' %
                           sleeptime)
                 time.sleep(sleeptime)
         except:
             log.error("Caught exception when polling:", exc_info=True)
             sys.exit(5)
 
-    retry(mercurial, args=(buildbot_configs, CONFIGS_WORKDIR), kwargs=dict(branch=buildbot_configs_branch))
+    retry(mercurial, args=(buildbot_configs, CONFIGS_WORKDIR),
+          kwargs=dict(branch=buildbot_configs_branch))
 
-    if 'symlinks' in config.sections():
+    if 'symlinks' in config:
         format_dict = dict(buildbot_configs=CONFIGS_WORKDIR)
-        for target in config.options('symlinks'):
-            symlink = config.get('symlinks', target).format(**format_dict)
+        for target in config['symlinks']:
+            symlink = config['symlinks'].get(target).format(**format_dict)
             if path.exists(symlink):
                 log.warning("Skipping %s -> %s symlink" % (symlink, target))
             else:
                 log.info("Adding %s -> %s symlink" % (symlink, target))
                 os.symlink(target, symlink)
     rc = 0
     for release in new_releases:
         branchConfig = get_branch_config(release)
@@ -359,37 +389,43 @@ def main(options):
                 c for c in release_channels if c not in branchConfig.get('mirror_requiring_channels', [])
             ]
             publish_to_balrog_channels = [
                 c for c in release_channels if c not in branchConfig.get('mirror_requiring_channels', [])
             ]
             push_to_releases_enabled = False
             postrelease_mark_as_shipped_enabled = False
         else:
-            postrelease_enabled = branchConfig['postrelease_version_bump_enabled']
-            postrelease_bouncer_aliases_enabled = branchConfig['postrelease_bouncer_aliases_enabled']
-            postrelease_mark_as_shipped_enabled = branchConfig['postrelease_mark_as_shipped_enabled']
+            postrelease_enabled = branchConfig[
+                'postrelease_version_bump_enabled']
+            postrelease_bouncer_aliases_enabled = branchConfig[
+                'postrelease_bouncer_aliases_enabled']
+            postrelease_mark_as_shipped_enabled = branchConfig[
+                'postrelease_mark_as_shipped_enabled']
             final_verify_channels = release_channels
             publish_to_balrog_channels = release_channels
             push_to_releases_enabled = True
 
         ship_it_product_name = release['product']
         tc_product_name = branchConfig['stage_product'][ship_it_product_name]
         # XXX: Doesn't work with neither Fennec nor Thunderbird
         platforms = branchConfig['release_platforms']
 
         try:
             if not are_en_us_builds_completed(index, release_name=release['name'], submitted_at=release['submittedAt'],
-                                              branch=release['branchShortName'], revision=release['mozillaRevision'],
+                                              branch=release['branchShortName'], revision=release[
+                                                  'mozillaRevision'],
                                               tc_product_name=tc_product_name, platforms=platforms, queue=queue):
-                log.info('Builds are not completed yet, skipping release "%s" for now', release['name'])
+                log.info(
+                    'Builds are not completed yet, skipping release "%s" for now', release['name'])
                 rr.update_status(release, 'Waiting for builds to be completed')
                 continue
 
-            log.info('Every build is completed for release: %s', release['name'])
+            log.info('Every build is completed for release: %s',
+                     release['name'])
             graph_id = slugId()
 
             rr.update_status(release, 'Generating task graph')
 
             kwargs = {
                 "public_key": docker_worker_key,
                 "version": release["version"],
                 # ESR should not use "esr" suffix here:
@@ -398,28 +434,30 @@ def main(options):
                 "buildNumber": release["buildNumber"],
                 "source_enabled": True,
                 "checksums_enabled": True,
                 "repo_path": release["branch"],
                 "revision": release["mozillaRevision"],
                 "product": release["product"],
                 # if mozharness_revision is not passed, use 'revision'
                 "mozharness_changeset": release.get('mh_changeset') or release['mozillaRevision'],
-                "partial_updates": release['partial_updates'],
+                "partial_updates": release.get('partial_updates', list()),
                 "branch": release['branchShortName'],
                 "updates_enabled": bool(release["partials"]),
                 "l10n_config": get_l10n_config(
-                    index=index, product=release["product"], branch=release['branchShortName'],
+                    index=index, product=release[
+                        "product"], branch=release['branchShortName'],
                     revision=release['mozillaRevision'],
                     platforms=branchConfig['platforms'],
                     l10n_platforms=branchConfig['l10n_release_platforms'],
                     l10n_changesets=release['l10n_changesets']
                 ),
                 "en_US_config": get_en_US_config(
-                    index=index, product=release["product"], branch=release['branchShortName'],
+                    index=index, product=release[
+                        "product"], branch=release['branchShortName'],
                     revision=release['mozillaRevision'],
                     platforms=branchConfig['release_platforms']
                 ),
                 "verifyConfigs": {},
                 "balrog_api_root": branchConfig["balrog_api_root"],
                 "funsize_balrog_api_root": branchConfig["funsize_balrog_api_root"],
                 "balrog_username": balrog_username,
                 "balrog_password": balrog_password,
@@ -470,17 +508,18 @@ def main(options):
         except Exception as exception:
             # We explicitly do not raise an error here because there's no
             # reason not to start other releases if creating the Task Graph
             # fails for another one. We _do_ need to set this in order to exit
             # with the right code, though.
             rc = 2
             rr.mark_as_failed(
                 release,
-                'Failed to start release promotion (graph ID: %s). Error(s): %s' % (graph_id, exception)
+                'Failed to start release promotion (graph ID: %s). Error(s): %s' % (
+                    graph_id, exception)
             )
             log.exception('Failed to start release "%s" promotion for graph %s. Error(s): %s',
                           release['name'], graph_id, exception)
             log.debug('Release failed: %s', release)
 
     if rc != 0:
         sys.exit(rc)
 
new file mode 100644
--- /dev/null
+++ b/buildfarm/release/release-runner.yml.example
@@ -0,0 +1,43 @@
+api:
+  api_root: http://dev-master1.srv.releng.scl3.mozilla.com:5000
+  username: user
+  password: password
+
+release-runner:
+  notify_from: Release Eng <DONOTreleaseME@mozilla.com>
+  notify_to: Release Duty <you@example.com>
+  notify_to_announce: Release Drivers <the-mailing-list@mozilla.org>
+  smtp_server: localhost
+  hg_host: hg.mozilla.org
+  hg_username: ffxbld
+  hg_ssh_key: ~/.ssh/ffxbld_rsa
+  buildbot_configs: https://hg.mozilla.org/build/buildbot-configs
+  buildbot_configs_branch: production
+  buildbotcustom: https://hg.mozilla.org/build/buildbotcustom
+  buildbotcustom_branch: production-0.8
+  tools: https://hg.mozilla.org/build/tools
+  tools_branch: default
+  masters_json: https://hg.mozilla.org/build/tools/raw-file/default/buildfarm/maintenance/production-masters.json
+  sendchange_master: localhost:9001
+  staging: false
+  verbose: true
+  sleeptime: 30
+  ssh_username: cltbld
+  ssh_key: /home/cltbld/.ssh/release-runner
+  docker_worker_key: /home/cltbld/docker-worker-public.pem
+  
+symlinks:
+  production_config.py: "{buildbot_configs}/mozilla/localconfig.py"
+  thunderbird_production_config.py: "{buildbot_configs}/mozilla/thunderbird_localconfig.py"
+
+releases:
+  - product: firefox
+    pattern: Firefox-.*
+    checks:
+      - long_revision
+      - l10n_changesets
+      - partial_updates
+  - product: fennec
+    pattern: Fennec-.*
+    checks:
+      - long_revision
--- a/lib/python/kickoff/__init__.py
+++ b/lib/python/kickoff/__init__.py
@@ -10,21 +10,22 @@ from util.retry import retry
 
 log = logging.getLogger(__name__)
 
 # temporary regex to filter out anything but mozilla-beta and mozilla-release
 # within release promotion. Once migration to release promotion is completed
 # for all types of releases, we will backout this filtering
 # regex beta tracking bug is 1252333,
 # regex release tracking bug is 1263976
+# This is now a default
 RELEASE_PATTERNS = [
-    r"Firefox-.*"
+    r"Firefox-.*",
+#     r"Fennec-.*",
 ]
 
-
 def matches(name, patterns):
     return any([re.search(p, name) for p in patterns])
 
 
 def long_revision(repo, revision):
     """Convert short revision to long using JSON API
 
     >>> long_revision("releases/mozilla-beta", "59f372c35b24")
@@ -49,23 +50,23 @@ class ReleaseRunner(object):
         self.new_releases = []
         self.releases_api = Releases((username, password), api_root=api_root,
                                      timeout=timeout)
         self.release_api = Release((username, password), api_root=api_root,
                                    timeout=timeout)
         self.release_l10n_api = ReleaseL10n((username, password),
                                             api_root=api_root, timeout=timeout)
 
-    def get_release_requests(self):
+    def get_release_requests(self, release_patterns=RELEASE_PATTERNS):
         new_releases = self.releases_api.getReleases()
         if new_releases['releases']:
             new_releases = [self.release_api.getRelease(name) for name in
                             new_releases['releases']]
             our_releases = [r for r in new_releases if
-                            matches(r['name'], RELEASE_PATTERNS)]
+                            matches(r['name'], release_patterns)]
             if our_releases:
                 self.new_releases = our_releases
                 log.info("Releases to handle are %s", self.new_releases)
                 return True
             else:
                 log.info("No releases to handle in %s", new_releases)
                 return False
         else: