Remove code moved to balrogscript; r=mtabara
authorTom Prince <mozilla@hocat.ca>
Thu, 22 Nov 2018 18:27:30 +0000
changeset 8466 9559bd23773b
parent 8465 0db2c26309fb
child 8467 4c437eb11fd1
push id6189
push usermozilla@hocat.ca
push dateFri, 23 Nov 2018 16:10:30 +0000
reviewersmtabara
Remove code moved to balrogscript; r=mtabara This removes the code that was moved to balrogscript in https://github.com/mozilla-releng/balrogscript/pull/40 as well as some dead code that called that code. Differential Revision: https://phabricator.services.mozilla.com/D12690
lib/python/balrog/__init__.py
lib/python/balrog/submitter/__init__.py
lib/python/balrog/submitter/api.py
lib/python/balrog/submitter/cli.py
lib/python/balrog/submitter/updates.py
lib/python/build/l10n.py
lib/python/build/versions.py
lib/python/mozilla_buildtools/test/test_balrog_submitter_cli.py
lib/python/mozilla_buildtools/test/test_build_versions.py
lib/python/mozilla_buildtools/test/test_release_paths.py
lib/python/release/info.py
lib/python/release/l10n.py
lib/python/release/paths.py
lib/python/release/platforms.py
lib/python/release/versions.py
lib/python/util/algorithms.py
lib/python/vendor/balrogclient-0.0.4/balrogclient/__init__.py
lib/python/vendor/balrogclient-0.0.4/balrogclient/api.py
lib/python/vendor/balrogclient-0.0.4/balrogclient/test/__init__.py
lib/python/vendor/balrogclient-0.0.4/balrogclient/test/test_balrog_api.py
lib/python/vendor/balrogclient-0.0.4/run-tests.sh
lib/python/vendor/balrogclient-0.0.4/setup.py
lib/python/vendor/balrogclient-0.0.4/tox.ini
lib/python/vendorlibs.pth
deleted file mode 100644
deleted file mode 100644
deleted file mode 100644
--- a/lib/python/balrog/submitter/api.py
+++ /dev/null
@@ -1,8 +0,0 @@
-import os
-import site
-
-site.addsitedir(os.path.join(os.path.dirname(__file__), "../.."))
-
-from balrogclient import is_csrf_token_expired, SingleLocale, Release, Rule, ScheduledRuleChange
-
-__all__ = [ 'is_csrf_token_expired', 'SingleLocale', 'Release', 'Rule', 'ScheduledRuleChange' ]
deleted file mode 100644
--- a/lib/python/balrog/submitter/cli.py
+++ /dev/null
@@ -1,696 +0,0 @@
-import arrow
-try:
-    import simplejson as json
-except ImportError:
-    import json
-
-from release.info import getProductDetails
-from release.paths import makeCandidatesDir
-from release.platforms import buildbot2updatePlatforms, buildbot2bouncer, \
-  buildbot2ftp
-from release.versions import getPrettyVersion
-from balrog.submitter.api import Release, SingleLocale, Rule, ScheduledRuleChange
-from balrog.submitter.updates import merge_partial_updates
-from util.algorithms import recursive_update
-from util.retry import retry
-import logging
-from requests.exceptions import HTTPError
-
-log = logging.getLogger(__name__)
-
-
-def get_nightly_blob_name(productName, branch, build_type, suffix, dummy=False):
-    if dummy:
-        branch = '%s-dummy' % branch
-    return '%s-%s-%s-%s' % (productName, branch, build_type, suffix)
-
-
-def get_release_blob_name(productName, version, build_number, suffix=None):
-    if suffix is None:
-        suffix = ""
-    return '%s-%s-build%s%s' % (productName, version, build_number, suffix)
-
-
-class ReleaseCreatorBase(object):
-    def __init__(self, api_root, auth, dummy=False, suffix="",
-                 from_suffix="",
-                 complete_mar_filename_pattern=None,
-                 complete_mar_bouncer_product_pattern=None):
-        self.api_root = api_root
-        self.auth = auth
-        self.suffix = suffix
-        self.from_suffix = from_suffix
-        if dummy:
-            self.suffix += "-dummy"
-        self.complete_mar_filename_pattern = complete_mar_filename_pattern or '%s-%s.complete.mar'
-        self.complete_mar_bouncer_product_pattern = complete_mar_bouncer_product_pattern or '%s-%s-complete'
-
-    def generate_data(self, appVersion, productName, version, buildNumber,
-                      updateChannels, ftpServer, bouncerServer,
-                      enUSPlatforms, schemaVersion, openURL=None,
-                      **updateKwargs):
-        assert schemaVersion in (3, 4), 'Unhandled schema version %s' % schemaVersion
-        details_product = productName.lower()
-        if details_product == "devedition":
-            details_product = "firefox"
-
-        data = {
-            'detailsUrl': getProductDetails(details_product, appVersion),
-            'platforms': {},
-            'fileUrls': {},
-            'appVersion': appVersion,
-            'platformVersion': appVersion,
-            'displayVersion': getPrettyVersion(version)
-        }
-
-        actions = []
-        if openURL:
-            actions.append("showURL")
-            data["openURL"] = openURL
-
-        if actions:
-            data["actions"] = " ".join(actions)
-
-        fileUrls = self._getFileUrls(productName, version, buildNumber,
-                                     updateChannels, ftpServer,
-                                     bouncerServer, **updateKwargs)
-        if fileUrls:
-            data.update(fileUrls)
-
-        updateData = self._get_update_data(productName, version, **updateKwargs)
-        if updateData:
-            data.update(updateData)
-
-        for platform in enUSPlatforms:
-            updatePlatforms = buildbot2updatePlatforms(platform)
-            bouncerPlatform = buildbot2bouncer(platform)
-            ftpPlatform = buildbot2ftp(platform)
-            data['platforms'][updatePlatforms[0]] = {
-                'OS_BOUNCER': bouncerPlatform,
-                'OS_FTP': ftpPlatform
-            }
-            for aliasedPlatform in updatePlatforms[1:]:
-                data['platforms'][aliasedPlatform] = {
-                    'alias': updatePlatforms[0]
-                }
-
-        return data
-
-    def run(self, appVersion, productName, version, buildNumber,
-            updateChannels, ftpServer, bouncerServer,
-            enUSPlatforms, hashFunction, schemaVersion, openURL=None,
-            **updateKwargs):
-        data = self.generate_data(appVersion, productName, version,
-                                  buildNumber, updateChannels,
-                                  ftpServer, bouncerServer, enUSPlatforms,
-                                  schemaVersion, openURL, **updateKwargs)
-        name = get_release_blob_name(productName, version, buildNumber,
-                                     self.suffix)
-        api = Release(name=name, auth=self.auth, api_root=self.api_root)
-        try:
-            current_data, data_version = api.get_data()
-        except HTTPError, e:
-            if e.response.status_code == 404:
-                log.warning("Release blob doesn't exist, using empty data...")
-                current_data, data_version = {}, None
-            else:
-                raise
-
-        data = recursive_update(current_data, data)
-        api.update_release(product=productName,
-                           hashFunction=hashFunction,
-                           releaseData=json.dumps(data),
-                           schemaVersion=schemaVersion,
-                           data_version=data_version)
-
-
-class ReleaseCreatorV3(ReleaseCreatorBase):
-    def run(self, *args, **kwargs):
-        return ReleaseCreatorBase.run(self, *args, schemaVersion=3, **kwargs)
-
-    def _getFileUrls(self, productName, version, buildNumber, updateChannels,
-                     ftpServer, bouncerServer, partialUpdates):
-        data = {}
-
-        for channel in updateChannels:
-            if channel in ('betatest', 'esrtest') or "localtest" in channel:
-                dir_ = makeCandidatesDir(productName.lower(), version,
-                                         buildNumber, server=ftpServer, protocol='http')
-                data["fileUrls"][channel] = '%supdate/%%OS_FTP%%/%%LOCALE%%/%%FILENAME%%' % dir_
-            else:
-                url = 'http://%s/?product=%%PRODUCT%%&os=%%OS_BOUNCER%%&lang=%%LOCALE%%' % bouncerServer
-                data["fileUrls"][channel] = url
-
-        return data
-
-    def _get_update_data(self, productName, version, partialUpdates):
-        file_prefix = productName.lower()
-        if file_prefix == "devedition":
-            file_prefix = "firefox"
-
-        data = {
-            "ftpFilenames": {
-                "completes": {
-                    "*": self.complete_mar_filename_pattern % (file_prefix, version),
-                }
-            },
-            "bouncerProducts": {
-                "completes": {
-                    "*": "%s-%s-complete" % (file_prefix, version),
-                }
-            }
-        }
-
-        if partialUpdates:
-            data["ftpFilenames"]["partials"] = {}
-            data["bouncerProducts"]["partials"] = {}
-            for previousVersion, previousInfo in partialUpdates.iteritems():
-                from_ = get_release_blob_name(productName, previousVersion,
-                                              previousInfo["buildNumber"],
-                                              self.from_suffix)
-                filename = "%s-%s-%s.partial.mar" % (file_prefix, previousVersion, version)
-                bouncerProduct = "%s-%s-partial-%s" % (productName.lower(), version, previousVersion)
-                data["ftpFilenames"]["partials"][from_] = filename
-                data["bouncerProducts"]["partials"][from_] = bouncerProduct
-
-        return data
-
-
-class ReleaseCreatorFileUrlsMixin(object):
-    def _getFileUrls(self, productName, version, buildNumber, updateChannels,
-                     ftpServer, bouncerServer, partialUpdates,
-                     requiresMirrors=True):
-        data = {"fileUrls": {}}
-        file_prefix = productName.lower()
-        if file_prefix == "devedition":
-            file_prefix = "firefox"
-
-        # "*" is for the default set of fileUrls, which generally points at
-        # bouncer. It's helpful to have this to reduce duplication between
-        # the live channel and the cdntest channel (which eliminates the
-        # possibility that those two channels serve different contents).
-        uniqueChannels = ["*"]
-        for c in updateChannels:
-            # localtest channels are different than the default because they
-            # point directly at FTP rather than Bouncer.
-            if "localtest" in c:
-                uniqueChannels.append(c)
-            # beta and beta-cdntest are special, but only if requiresMirrors is
-            # set to False. This is typically used when generating beta channel
-            # updates as part of RC builds, which get shipped prior to the
-            # release being pushed to mirrors. This is a bit of a hack.
-            if not requiresMirrors and c in ("beta", "beta-cdntest"):
-                uniqueChannels.append(c)
-
-        for channel in uniqueChannels:
-            data["fileUrls"][channel] = {
-                "completes": {}
-            }
-            if "localtest" in channel:
-                dir_ = makeCandidatesDir(productName.lower(), version,
-                                         buildNumber, server=ftpServer,
-                                         protocol='http')
-                filename = self.complete_mar_filename_pattern % (file_prefix, version)
-                data["fileUrls"][channel]["completes"]["*"] = "%supdate/%%OS_FTP%%/%%LOCALE%%/%s" % (dir_, filename)
-            else:
-                # See comment above about these channels for explanation.
-                if not requiresMirrors and channel in ("beta", "beta-cdntest"):
-                    bouncerProduct = "%s-%sbuild%s-complete" % (productName.lower(), version, buildNumber)
-                else:
-                    if productName.lower() == "fennec":
-                        bouncerProduct = "%s-%s" % (productName.lower(), version)
-                    else:
-                        bouncerProduct = self.complete_mar_bouncer_product_pattern % (productName.lower(), version)
-                url = 'http://%s/?product=%s&os=%%OS_BOUNCER%%&lang=%%LOCALE%%' % (bouncerServer, bouncerProduct)
-                data["fileUrls"][channel]["completes"]["*"] = url
-
-        if not partialUpdates:
-            return data
-
-        for channel in uniqueChannels:
-            data["fileUrls"][channel]["partials"] = {}
-            for previousVersion, previousInfo in partialUpdates.iteritems():
-                from_ = get_release_blob_name(productName, previousVersion,
-                                              previousInfo["buildNumber"],
-                                              self.from_suffix)
-                if "localtest" in channel:
-                    dir_ = makeCandidatesDir(productName.lower(), version,
-                                            buildNumber, server=ftpServer,
-                                            protocol='http')
-                    filename = "%s-%s-%s.partial.mar" % (file_prefix, previousVersion, version)
-                    data["fileUrls"][channel]["partials"][from_] = "%supdate/%%OS_FTP%%/%%LOCALE%%/%s" % (dir_, filename)
-                else:
-                    # See comment above about these channels for explanation.
-                    if not requiresMirrors and channel in ("beta", "beta-cdntest"):
-                        bouncerProduct = "%s-%sbuild%s-partial-%sbuild%s" % (productName.lower(), version, buildNumber, previousVersion, previousInfo["buildNumber"])
-                    else:
-                        bouncerProduct = "%s-%s-partial-%s" % (productName.lower(), version, previousVersion)
-                    url = 'http://%s/?product=%s&os=%%OS_BOUNCER%%&lang=%%LOCALE%%' % (bouncerServer, bouncerProduct)
-                    data["fileUrls"][channel]["partials"][from_] = url
-
-        return data
-
-
-class ReleaseCreatorV4(ReleaseCreatorBase, ReleaseCreatorFileUrlsMixin):
-    def run(self, *args, **kwargs):
-        return ReleaseCreatorBase.run(self, *args, schemaVersion=4, **kwargs)
-
-    # Replaced by _get_fileUrls
-    def _get_update_data(self, *args, **kwargs):
-        return None
-
-
-class ReleaseCreatorV9(ReleaseCreatorFileUrlsMixin):
-    schemaVersion=9
-
-    def __init__(self, api_root, auth, dummy=False, suffix="",
-                 from_suffix="",
-                 complete_mar_filename_pattern=None,
-                 complete_mar_bouncer_product_pattern=None):
-        self.api_root = api_root
-        self.auth = auth
-        self.suffix = suffix
-        self.from_suffix = from_suffix
-        if dummy:
-            self.suffix += "-dummy"
-        self.complete_mar_filename_pattern = complete_mar_filename_pattern or '%s-%s.complete.mar'
-        self.complete_mar_bouncer_product_pattern = complete_mar_bouncer_product_pattern or '%s-%s-complete'
-
-    def generate_data(self, appVersion, productName, version, buildNumber,
-                      updateChannels, ftpServer, bouncerServer,
-                      enUSPlatforms, **updateKwargs):
-        details_product = productName.lower()
-        if details_product == "devedition":
-            details_product = "firefox"
-
-        data = {
-            'platforms': {},
-            'fileUrls': {},
-            'appVersion': appVersion,
-            'displayVersion': getPrettyVersion(version),
-            'updateLine': [
-                {
-                    'for': {},
-                    'fields': {
-                        'detailsURL': getProductDetails(details_product, appVersion),
-                        'type': 'minor',
-                    },
-                },
-            ]
-        }
-
-        actions = []
-
-        fileUrls = self._getFileUrls(productName, version, buildNumber,
-                                     updateChannels, ftpServer,
-                                     bouncerServer, **updateKwargs)
-        if fileUrls:
-            data.update(fileUrls)
-
-        for platform in enUSPlatforms:
-            updatePlatforms = buildbot2updatePlatforms(platform)
-            bouncerPlatform = buildbot2bouncer(platform)
-            ftpPlatform = buildbot2ftp(platform)
-            data['platforms'][updatePlatforms[0]] = {
-                'OS_BOUNCER': bouncerPlatform,
-                'OS_FTP': ftpPlatform
-            }
-            for aliasedPlatform in updatePlatforms[1:]:
-                data['platforms'][aliasedPlatform] = {
-                    'alias': updatePlatforms[0]
-                }
-
-        return data
-
-    def run(self, appVersion, productName, version, buildNumber,
-            updateChannels, ftpServer, bouncerServer,
-            enUSPlatforms, hashFunction, **updateKwargs):
-        data = self.generate_data(appVersion, productName, version,
-                                  buildNumber, updateChannels,
-                                  ftpServer, bouncerServer, enUSPlatforms,
-                                  **updateKwargs)
-        name = get_release_blob_name(productName, version, buildNumber,
-                                     self.suffix)
-        api = Release(name=name, auth=self.auth, api_root=self.api_root)
-        try:
-            current_data, data_version = api.get_data()
-        except HTTPError, e:
-            if e.response.status_code == 404:
-                log.warning("Release blob doesn't exist, using empty data...")
-                current_data, data_version = {}, None
-            else:
-                raise
-
-        data = recursive_update(current_data, data)
-        api.update_release(product=productName,
-                           hashFunction=hashFunction,
-                           releaseData=json.dumps(data),
-                           schemaVersion=self.schemaVersion,
-                           data_version=data_version)
-
-
-class NightlySubmitterBase(object):
-    build_type = 'nightly'
-
-    def __init__(self, api_root, auth, dummy=False, url_replacements=None):
-        self.api_root = api_root
-        self.auth = auth
-        self.dummy = dummy
-        self.url_replacements = url_replacements
-
-    def _replace_canocical_url(self, url):
-        if self.url_replacements:
-            for string_from, string_to in self.url_replacements:
-                if string_from in url:
-                    new_url = url.replace(string_from, string_to)
-                    log.warning("Replacing %s with %s", url, new_url)
-                    return new_url
-
-        return url
-
-    def run(self, platform, buildID, productName, branch, appVersion, locale,
-            hashFunction, extVersion, schemaVersion, isOSUpdate=None, **updateKwargs):
-        assert schemaVersion in (3,4), 'Unhandled schema version %s' % schemaVersion
-        targets = buildbot2updatePlatforms(platform)
-        build_target = targets[0]
-        alias = None
-        if len(targets) > 1:
-            alias = targets[1:]
-        data = {
-            'buildID': buildID,
-            'appVersion': appVersion,
-            'platformVersion': extVersion,
-            'displayVersion': appVersion,
-        }
-        if isOSUpdate:
-            data['isOSUpdate'] = isOSUpdate
-
-        data.update(self._get_update_data(productName, branch, **updateKwargs))
-
-        if 'old-id' in platform:
-            # bug 1366034: support old-id builds
-            # Like 1055305, this is a hack to support two builds with same build target that
-            # require differed't release blobs and rules
-            build_type = 'old-id-%s' % self.build_type
-        else:
-            build_type = self.build_type
-
-        name = get_nightly_blob_name(productName, branch, build_type, buildID,
-                                     self.dummy)
-        api = SingleLocale(name=name, build_target=build_target, locale=locale,
-                           auth=self.auth, api_root=self.api_root)
-
-        # wrap operations into "atomic" functions that can be retried
-        def update_dated():
-            current_data, data_version = api.get_data()
-            # If the  partials are already a subset of the blob and the
-            # complete MAR is the same, skip the submission
-            skip_submission = bool(
-                current_data and
-                current_data.get("completes") == data.get("completes") and
-                all(p in current_data.get("partials", [])
-                    for p in data.get("partials", [])))
-            if skip_submission:
-                log.warn("Dated data didn't change, skipping update")
-                return
-            # explicitly pass data version
-            api.update_build(
-                product=productName,
-                hashFunction=hashFunction,
-                buildData=json.dumps(merge_partial_updates(current_data,
-                                                           data)),
-                alias=json.dumps(alias),
-                schemaVersion=schemaVersion, data_version=data_version)
-
-        # Most retries are caused by losing a data race. In these cases,
-        # there's no point in waiting a long time to retry, so we reduce
-        # sleeptime and increase the number of attempts instead.
-        retry(update_dated, sleeptime=2, max_sleeptime=2, attempts=10)
-
-        latest = SingleLocale(
-            api_root=self.api_root, auth=self.auth,
-            name=get_nightly_blob_name(productName, branch, build_type,
-                                       'latest', self.dummy),
-            build_target=build_target, locale=locale)
-
-        def update_latest():
-            # copy everything over using target release's data version
-            latest_data, latest_data_version = latest.get_data()
-            source_data, _ = api.get_data()
-            if source_data == latest_data:
-                log.warn("Latest data didn't change, skipping update")
-                return
-            latest.update_build(
-                product=productName,
-                hashFunction=hashFunction, buildData=json.dumps(source_data),
-                alias=json.dumps(alias), schemaVersion=schemaVersion,
-                data_version=latest_data_version)
-
-        retry(update_latest, sleeptime=2, max_sleeptime=2, attempts=10)
-
-
-class MultipleUpdatesNightlyMixin(object):
-
-    def _get_update_data(self, productName, branch, completeInfo=None,
-                         partialInfo=None):
-        data = {}
-
-        if completeInfo:
-            data["completes"] = []
-            for info in completeInfo:
-                if "from_buildid" in info:
-                    from_ = get_nightly_blob_name(productName, branch,
-                                                  self.build_type,
-                                                  info["from_buildid"],
-                                                  self.dummy)
-                else:
-                    from_ = "*"
-                data["completes"].append({
-                    "from": from_,
-                    "filesize": info["size"],
-                    "hashValue": info["hash"],
-                    "fileUrl": self._replace_canocical_url(info["url"]),
-                })
-        if partialInfo:
-            data["partials"] = []
-            for info in partialInfo:
-                data["partials"].append({
-                    "from": get_nightly_blob_name(productName, branch,
-                                                  self.build_type,
-                                                  info["from_buildid"],
-                                                  self.dummy),
-                    "filesize": info["size"],
-                    "hashValue": info["hash"],
-                    "fileUrl": self._replace_canocical_url(info["url"]),
-                })
-
-        return data
-
-
-class NightlySubmitterV3(NightlySubmitterBase, MultipleUpdatesNightlyMixin):
-    def run(self, *args, **kwargs):
-        return NightlySubmitterBase.run(self, *args, schemaVersion=3, **kwargs)
-
-
-class NightlySubmitterV4(NightlySubmitterBase, MultipleUpdatesNightlyMixin):
-    def run(self, *args, **kwargs):
-        return NightlySubmitterBase.run(self, *args, schemaVersion=4, **kwargs)
-
-
-class ReleaseSubmitterBase(object):
-    def __init__(self, api_root, auth, dummy=False, suffix="", from_suffix=""):
-        self.api_root = api_root
-        self.auth = auth
-        self.suffix = suffix
-        if dummy:
-            self.suffix += "-dummy"
-        self.from_suffix = from_suffix
-
-    def run(self, platform, productName, appVersion, version, build_number, locale,
-            hashFunction, extVersion, buildID, schemaVersion, **updateKwargs):
-        assert schemaVersion in (3, 4), 'Unhandled schema version %s' % schemaVersion
-        targets = buildbot2updatePlatforms(platform)
-        # Some platforms may have alias', but those are set-up elsewhere
-        # for release blobs.
-        build_target = targets[0]
-
-        name = get_release_blob_name(productName, version, build_number,
-                                     self.suffix)
-        data = {
-            'buildID': buildID,
-            'appVersion': appVersion,
-            'platformVersion': extVersion,
-            'displayVersion': getPrettyVersion(version)
-        }
-
-        data.update(self._get_update_data(productName, version, build_number,
-                                          **updateKwargs))
-
-        api = SingleLocale(name=name, build_target=build_target, locale=locale,
-                           auth=self.auth, api_root=self.api_root)
-        current_data, data_version = api.get_data()
-        api.update_build(
-            data_version=data_version,
-            product=productName, hashFunction=hashFunction,
-            buildData=json.dumps(merge_partial_updates(current_data, data)),
-            schemaVersion=schemaVersion)
-
-
-class MultipleUpdatesReleaseMixin(object):
-    def _get_update_data(self, productName, version, build_number,
-                         completeInfo=None, partialInfo=None):
-        data = {}
-
-        if completeInfo:
-            data["completes"] = []
-            for info in completeInfo:
-                if "previousVersion" in info:
-                    from_ = get_release_blob_name(productName, version,
-                                                  build_number, self.from_suffix)
-                else:
-                    from_ = "*"
-                data["completes"].append({
-                    "from": from_,
-                    "filesize": info["size"],
-                    "hashValue": info["hash"],
-                })
-        if partialInfo:
-            data["partials"] = []
-            for info in partialInfo:
-                data["partials"].append({
-                    "from": get_release_blob_name(productName,
-                                                  info["previousVersion"],
-                                                  info["previousBuildNumber"],
-                                                  self.from_suffix),
-                    "filesize": info["size"],
-                    "hashValue": info["hash"],
-                })
-
-        return data
-
-
-class ReleaseSubmitterV3(ReleaseSubmitterBase, MultipleUpdatesReleaseMixin):
-    def run(self, *args, **kwargs):
-        return ReleaseSubmitterBase.run(self, *args, schemaVersion=3, **kwargs)
-
-
-class ReleaseSubmitterV4(ReleaseSubmitterBase, MultipleUpdatesReleaseMixin):
-    def run(self, *args, **kwargs):
-        return ReleaseSubmitterBase.run(self, *args, schemaVersion=4, **kwargs)
-
-
-class ReleaseSubmitterV9(MultipleUpdatesReleaseMixin):
-    def __init__(self, api_root, auth, dummy=False, suffix="", from_suffix=""):
-        self.api_root = api_root
-        self.auth = auth
-        self.suffix = suffix
-        if dummy:
-            self.suffix += "-dummy"
-        self.from_suffix = from_suffix
-
-    def run(self, platform, productName, appVersion, version, build_number, locale,
-            hashFunction, extVersion, buildID, **updateKwargs):
-        targets = buildbot2updatePlatforms(platform)
-        # Some platforms may have alias', but those are set-up elsewhere
-        # for release blobs.
-        build_target = targets[0]
-
-        name = get_release_blob_name(productName, version, build_number,
-                                     self.suffix)
-        data = {
-            'buildID': buildID,
-            'appVersion': appVersion,
-            'displayVersion': getPrettyVersion(version)
-        }
-
-        data.update(self._get_update_data(productName, version, build_number,
-                                          **updateKwargs))
-
-        api = SingleLocale(name=name, build_target=build_target, locale=locale,
-                           auth=self.auth, api_root=self.api_root)
-        current_data, data_version = api.get_data()
-        api.update_build(
-            data_version=data_version,
-            product=productName, hashFunction=hashFunction,
-            buildData=json.dumps(merge_partial_updates(current_data, data)),
-            schemaVersion=9)
-
-
-class ReleasePusher(object):
-    def __init__(self, api_root, auth, dummy=False, suffix=""):
-        self.api_root = api_root
-        self.auth = auth
-        self.suffix = suffix
-        if dummy:
-            self.suffix += "-dummy"
-
-    def run(self, productName, version, build_number, rule_ids, backgroundRate=None):
-        name = get_release_blob_name(productName, version, build_number,
-                                     self.suffix)
-        for rule_id in rule_ids:
-            data = {"mapping": name}
-            if backgroundRate:
-                data["backgroundRate"] = backgroundRate
-            Rule(api_root=self.api_root, auth=self.auth, rule_id=rule_id
-                 ).update_rule(**data)
-
-
-class ReleaseScheduler(object):
-    def __init__(self, api_root, auth, dummy=False, suffix=""):
-        self.api_root = api_root
-        self.auth = auth
-        self.suffix = suffix
-        if dummy:
-            self.suffix = "-dummy"
-
-    def run(self, productName, version, build_number, rule_ids, when=None, backgroundRate=None):
-        name = get_release_blob_name(productName, version, build_number,
-                                     self.suffix)
-
-        if when is not None:
-            when = arrow.get(when)
-
-        soon = arrow.now().shift(minutes=5)
-        if when is None or when < soon:
-            when = soon
-
-        for rule_id in rule_ids:
-            data, data_version = Rule(api_root=self.api_root, auth=self.auth, rule_id=rule_id).get_data()
-            # If the _currently_ shipped release is at a background rate of
-            # 100%, it's safe to set it as the fallback mapping. (Everyone
-            # was getting it anyways, so it's OK for them to fall back to
-            # it if they don't get the even newer one.)
-            # If it was _not_ shipped at 100%, we can't set it as the fallback.
-            # If we did, it would mean users on the wrong side of the die roll
-            # would either get the even newer release, or the release that
-            # previously wasn't shipped to everyone - which we can't assume is
-            # safe.
-            if data["backgroundRate"] == 100:
-                data["fallbackMapping"] = data["mapping"]
-            data["mapping"] = name
-            data["data_verison"] = data_version
-            data["rule_id"] = rule_id
-            data["change_type"] = "update"
-            # We receive an iso8601 datetime, but what Balrog needs is a to-the-millisecond epoch timestamp
-            data["when"] = when.timestamp * 1000
-            if backgroundRate:
-                data["backgroundRate"] = backgroundRate
-
-            ScheduledRuleChange(api_root=self.api_root, auth=self.auth, rule_id=rule_id
-                               ).add_scheduled_rule_change(**data)
-
-
-class BlobTweaker(object):
-    def __init__(self, api_root, auth):
-        self.api_root = api_root
-        self.auth = auth
-
-    def run(self, name, data):
-        api = Release(name=name, auth=self.auth, api_root=self.api_root)
-        current_data, data_version = api.get_data()
-        data = recursive_update(current_data, data)
-        api.update_release(
-            product=name.split('-')[0],
-            hashFunction=data['hashFunction'], releaseData=json.dumps(data),
-            data_version=data_version,
-            schemaVersion=current_data['schema_version'])
deleted file mode 100644
--- a/lib/python/balrog/submitter/updates.py
+++ /dev/null
@@ -1,26 +0,0 @@
-import site
-import os
-
-site.addsitedir(os.path.join(os.path.dirname(__file__), "..", ".."))
-import jsonmerge
-
-
-def merge_partial_updates(base_obj, new_obj):
-    """Merges 2 update objects, merging partials and replacing completes"""
-    schema = {
-        "properties": {
-            # Merge partials using "from" as an identifier field
-            "partials": {
-                "mergeStrategy": "arrayMergeById",
-                "mergeOptions": {
-                    "idRef": "from"
-                }
-            },
-            # Replace completes - we don't usually have more than one
-            "completes": {
-                "mergeStrategy": "overwrite"
-            }
-        }
-    }
-    merger = jsonmerge.Merger(schema=schema)
-    return merger.merge(base_obj, new_obj)
deleted file mode 100644
--- a/lib/python/build/l10n.py
+++ /dev/null
@@ -1,269 +0,0 @@
-from __future__ import with_statement
-
-import os
-from os import path
-import shutil
-import sys
-from urllib import urlretrieve
-from urllib2 import urlopen
-from urlparse import urljoin
-
-from release.platforms import getPlatformLocales, buildbot2ftp
-from release.paths import makeCandidatesDir
-from util.commands import get_output, run_cmd
-from util.hg import mercurial, update
-from util.paths import windows2msys, msys2windows
-from util.retry import retry
-
-import logging
-log = logging.getLogger(__name__)
-
-
-def getMakeCommand(usePymake, absSourceRepoPath):
-    if usePymake:
-        return [sys.executable, "%s/build/pymake/make.py" % absSourceRepoPath]
-    return ["make"]
-
-
-def getAllLocales(appName, sourceRepo, rev="default",
-                  hg="https://hg.mozilla.org"):
-    localeFile = "%s/raw-file/%s/%s/locales/all-locales" % \
-        (sourceRepo, rev, appName)
-    url = urljoin(hg, localeFile)
-    try:
-        sl = urlopen(url).read()
-    except:
-        log.error("Failed to retrieve %s", url)
-        raise
-    return sl
-
-
-def compareLocales(repo, locale, l10nRepoDir, localeSrcDir, l10nIni,
-                   revision="default", merge=True):
-    mercurial(repo, "compare-locales")
-    update("compare-locales", revision=revision)
-    mergeDir = path.join(localeSrcDir, "merged")
-    if path.exists(mergeDir):
-        log.info("Deleting %s" % mergeDir)
-        shutil.rmtree(mergeDir)
-    run_cmd(["python", path.join("compare-locales", "scripts",
-                                 "compare-locales"),
-             "-m", mergeDir,
-             l10nIni,
-             l10nRepoDir, locale],
-            env={"PYTHONPATH": path.join("compare-locales", "lib")})
-
-
-def l10nRepackPrep(sourceRepoName, objdir, mozconfigPath, srcMozconfigPath,
-                   l10nBaseRepoName, makeDirs, env,
-                   tooltoolManifest=None, tooltool_script=None,
-                   tooltool_urls=None):
-    if not path.exists(l10nBaseRepoName):
-        os.mkdir(l10nBaseRepoName)
-
-    if srcMozconfigPath:
-        shutil.copy(path.join(sourceRepoName, srcMozconfigPath),
-                    path.join(sourceRepoName, ".mozconfig"))
-    else:
-        shutil.copy(mozconfigPath, path.join(sourceRepoName, ".mozconfig"))
-    with open(path.join(sourceRepoName, ".mozconfig"), "a") as mozconfig:
-        mozconfig.write("ac_add_options --enable-official-branding")
-
-    run_cmd(["mkdir", "-p", "l10n"])
-
-    if tooltoolManifest:
-        cmd = ['sh', '../scripts/scripts/tooltool/tooltool_wrapper.sh',
-               tooltoolManifest,
-               tooltool_urls[0],  # TODO: pass all urls when tooltool ready
-               'setup.sh']
-        cmd.extend(tooltool_script)
-        run_cmd(cmd, cwd=sourceRepoName)
-
-    absSourceRepoPath = os.path.join(os.getcwd(), sourceRepoName)
-    make = getMakeCommand(env.get("USE_PYMAKE"), absSourceRepoPath)
-    run_cmd(make + ["-f", "client.mk", "configure"], cwd=sourceRepoName,
-            env=env)
-    # we'll get things like (config, tier_base) for Firefox releases
-    # and (mozilla/config, mozilla/tier_base) for Thunderbird releases
-    for dir in makeDirs:
-        if path.basename(dir).startswith("tier"):
-            run_cmd(make + [path.basename(dir)],
-                    cwd=path.join(sourceRepoName, objdir, path.dirname(dir)),
-                    env=env)
-        else:
-            target = []
-            if path.basename(dir) == 'config':
-                # context: https://bugzil.la/1169937
-                target = ['export']
-            run_cmd(make + target,
-                    cwd=path.join(sourceRepoName, objdir, dir),
-                    env=env)
-
-
-def repackLocale(locale, l10nRepoDir, l10nBaseRepo, revision, localeSrcDir,
-                 l10nIni, compareLocalesRepo, env, absObjdir, merge=True,
-                 productName=None, platform=None,
-                 version=None, partialUpdates=None,
-                 buildNumber=None, stageServer=None,
-                 mozillaDir=None, mozillaSrcDir=None,
-                 marSignatureFormat='mar'):
-    repo = "/".join([l10nBaseRepo, locale])
-    localeDir = path.join(l10nRepoDir, locale)
-    mercurial(repo, localeDir)
-    update(localeDir, revision=revision)
-
-    # It's a bad assumption to make, but the source dir is currently always
-    # one level above the objdir.
-    absSourceRepoPath = path.split(absObjdir)[0]
-    use_pymake = env.get("USE_PYMAKE", False)
-    make = getMakeCommand(use_pymake, absSourceRepoPath)
-
-    env["AB_CD"] = locale
-    env["LOCALE_MERGEDIR"] = path.abspath(path.join(localeSrcDir, "merged"))
-    if sys.platform.startswith('win'):
-        if use_pymake:
-            env["LOCALE_MERGEDIR"] = msys2windows(env["LOCALE_MERGEDIR"])
-        else:
-            env["LOCALE_MERGEDIR"] = windows2msys(env["LOCALE_MERGEDIR"])
-    if sys.platform.startswith('darwin'):
-        env["MOZ_PKG_PLATFORM"] = "mac"
-    UPLOAD_EXTRA_FILES = []
-    if mozillaDir:
-        nativeDistDir = path.normpath(path.abspath(
-            path.join(localeSrcDir, '../../%s/dist' % mozillaDir)))
-    else:
-        nativeDistDir = path.normpath(path.abspath(
-            path.join(localeSrcDir, '../../dist')))
-    posixDistDir = windows2msys(nativeDistDir)
-    mar = '%s/host/bin/mar' % posixDistDir
-    mbsdiff = '%s/host/bin/mbsdiff' % posixDistDir
-    if platform.startswith('win'):
-        mar += ".exe"
-        mbsdiff += ".exe"
-    current = '%s/current' % posixDistDir
-    previous = '%s/previous' % posixDistDir
-    updateDir = 'update/%s/%s' % (buildbot2ftp(platform), locale)
-    updateAbsDir = '%s/%s' % (posixDistDir, updateDir)
-    current_mar = '%s/%s-%s.complete.mar' % (
-        updateAbsDir, productName, version)
-    unwrap_full_update = '../../../tools/update-packaging/unwrap_full_update.pl'
-    make_incremental_update = '../../tools/update-packaging/make_incremental_update.sh'
-    prevMarDir = '../../../../'
-    if mozillaSrcDir:
-        # Compensate for having the objdir or not.
-        additionalParent = ''
-        if mozillaDir:
-            additionalParent = '../'
-
-        unwrap_full_update = '../../../%s%s/tools/update-packaging/unwrap_full_update.pl' % (additionalParent, mozillaSrcDir)
-        make_incremental_update = '../../%s%s/tools/update-packaging/make_incremental_update.sh' % (additionalParent, mozillaSrcDir)
-        prevMarDir = '../../../../%s' % additionalParent
-    env['MAR'] = mar
-    env['MBSDIFF'] = mbsdiff
-
-    log.info("Download mar tools")
-    if stageServer:
-        candidates_dir = makeCandidatesDir(productName, version, buildNumber,
-                                           protocol="http", server=stageServer)
-        if not path.isfile(msys2windows(mar)):
-            marUrl = '/'.join([p.strip('/') for p in [candidates_dir, 'mar-tools',
-                                                      platform, path.basename(mar)]])
-            run_cmd(['mkdir', '-p', path.dirname(mar)])
-            log.info("Downloading %s to %s", marUrl, mar)
-            urlretrieve(marUrl, msys2windows(mar))
-            if not sys.platform.startswith('win'):
-                run_cmd(['chmod', '755', mar])
-        if not path.isfile(msys2windows(mbsdiff)):
-            mbsdiffUrl = '/'.join([p.strip('/') for p in [candidates_dir, 'mar-tools',
-                                                          platform, path.basename(mbsdiff)]])
-            run_cmd(['mkdir', '-p', path.dirname(mbsdiff)])
-            log.info("Downloading %s to %s", mbsdiffUrl, mbsdiff)
-            urlretrieve(mbsdiffUrl, msys2windows(mbsdiff))
-            if not sys.platform.startswith('win'):
-                run_cmd(['chmod', '755', mbsdiff])
-    else:
-        log.warning('stageServer not set. mar tools will *not* be downloaded.')
-
-    compareLocales(compareLocalesRepo, locale, l10nRepoDir, localeSrcDir,
-                   l10nIni, revision=revision, merge=merge)
-
-    make_installers_env = env.copy()
-    make_installers_env['MOZ_OBJDIR'] = absObjdir
-    run_cmd(make + ["installers-%s" % locale], cwd=localeSrcDir, env=make_installers_env)
-
-    # Our Windows-native rm from bug 727551 requires Windows-style paths
-    run_cmd(['rm', '-rf', msys2windows(current)])
-    run_cmd(['mkdir', current])
-    run_cmd(['perl', unwrap_full_update, current_mar],
-            cwd=path.join(nativeDistDir, 'current'), env=env)
-    for oldVersion in partialUpdates:
-        prevMar = partialUpdates[oldVersion]['mar']
-        if prevMar:
-            partial_mar_name = '%s-%s-%s.partial.mar' % (productName, oldVersion,
-                                                         version)
-            partial_mar = '%s/%s' % (updateAbsDir, partial_mar_name)
-            UPLOAD_EXTRA_FILES.append('%s/%s' % (updateDir, partial_mar_name))
-            # Our Windows-native rm from bug 727551 requires Windows-style paths
-            run_cmd(['rm', '-rf', msys2windows(previous)])
-            run_cmd(['mkdir', previous])
-            run_cmd(
-                ['perl', unwrap_full_update, '%s/%s' % (prevMarDir, prevMar)],
-                cwd=path.join(nativeDistDir, 'previous'), env=env)
-            run_cmd(['bash', make_incremental_update, partial_mar, previous,
-                    current], cwd=nativeDistDir, env=env)
-            if os.environ.get('MOZ_SIGN_CMD'):
-                run_cmd(['bash', '-c',
-                        '%s -f %s "%s"' %
-                        (os.environ['MOZ_SIGN_CMD'], marSignatureFormat, partial_mar)],
-                        env=env)
-                UPLOAD_EXTRA_FILES.append(
-                    '%s/%s.asc' % (updateDir, partial_mar_name))
-        else:
-            log.warning(
-                "Skipping partial MAR creation for %s %s" % (oldVersion,
-                                                             locale))
-
-    env['UPLOAD_EXTRA_FILES'] = ' '.join(UPLOAD_EXTRA_FILES)
-    retry(run_cmd,
-          args=(make + ["upload", "AB_CD=%s" % locale], ),
-          kwargs={'cwd': localeSrcDir, 'env': env})
-
-    # return the location of the checksums file, because consumers may want
-    # some information about the files that were generated.
-    # Some versions of make that we use (at least pymake) imply --print-directory
-    # We need to turn it off to avoid getting extra output that mess up our
-    # parsing of the checksum file path.
-    curdir = os.getcwd()
-    try:
-        os.chdir(localeSrcDir)
-        relative_checksums = get_output(make +
-                                        ["--no-print-directory", "echo-variable-CHECKSUM_FILE", "AB_CD=%s" % locale],
-                                        env=env, cwd=localeSrcDir).strip("\"'\n\r")
-        return path.normpath(path.join(localeSrcDir, relative_checksums))
-    finally:
-        os.chdir(curdir)
-
-
-def getLocalesForChunk(possibleLocales, chunks, thisChunk):
-    if 'en-US' in possibleLocales:
-        possibleLocales.remove('en-US')
-    possibleLocales = sorted(possibleLocales)
-    nLocales = len(possibleLocales)
-    for c in range(1, chunks + 1):
-        n = nLocales / chunks
-        # If the total number of locales isn't evenly divisible by the number
-        # of chunks we need to append one more onto some chunks
-        if c <= (nLocales % chunks):
-            n += 1
-        if c == thisChunk:
-            return possibleLocales[0:n]
-        del possibleLocales[0:n]
-
-
-def getNightlyLocalesForChunk(appName, sourceRepo, platform, chunks, thisChunk,
-                              hg="https://hg.mozilla.org"):
-    possibleLocales = getPlatformLocales(
-        getAllLocales(appName, sourceRepo, hg=hg),
-        (platform,)
-    )[platform]
-    return getLocalesForChunk(possibleLocales, chunks, thisChunk)
--- a/lib/python/build/versions.py
+++ b/lib/python/build/versions.py
@@ -1,130 +1,6 @@
-import re
-
-from release.info import isFinalRelease
-
-
-class BuildVersionsException(Exception):
-    pass
-
-# Versions that match this should not be bumped
-DO_NOT_BUMP_REGEX = '^\d\.\d(pre)?$'
-
 # Regex that matches all possible versions and milestones
 ANY_VERSION_REGEX =\
     ('(\d+\.\d[\d\.]*)'    # A version number
      '((a|b)\d+)?'        # Might be an alpha or beta
      '(esr)?'             # Might be an esr
      '(pre)?')            # Might be a 'pre' (nightly) version
-
-# NB: If a file could match more than one of the regexes below the behaviour
-# will be undefined, because the key order will differ from system to system
-# and possibly run to run. Try to avoid this.
-BUMP_FILES = {
-    r'^.*(version.*\.txt|milestone\.txt)$': '^%(version)s$',
-    r'^.*confvars\.sh$': '^MOZ_APP_VERSION=%(version)s$'
-}
-
-
-def bumpFile(filename, contents, version):
-    # First, find the right regex for this file
-    newContents = []
-    for fileRegex, versionRegex in BUMP_FILES.iteritems():
-        if re.match(fileRegex, filename):
-            # Second, find the line with the version in it
-            for line in contents.splitlines():
-                regex = versionRegex % {'version': ANY_VERSION_REGEX}
-                match = re.match(regex, line)
-                # If this is the version line, and the file doesn't have
-                # the correct version, change it.
-                if match and match.group() != version:
-                    newContents.append(
-                        re.sub(ANY_VERSION_REGEX, version, line))
-                # If it's not the version line, or the version is correct,
-                # don't do anything
-                else:
-                    newContents.append(line)
-            newContents = "\n".join(newContents)
-            # Be sure to preserve trailing newlines, if they exist
-            if contents.endswith("\n"):
-                newContents += "\n"
-            break
-    if len(newContents) == 0:
-        raise BuildVersionsException("Don't know how to bump %s" % filename)
-    return newContents
-
-
-def nextVersion(version, pre=False):
-    """Returns the version directly after `version', optionally with "pre"
-       appended to it."""
-    if re.match(DO_NOT_BUMP_REGEX, version):
-        bumped = version
-    else:
-        bumped = increment(version)
-    if pre:
-        bumped += "pre"
-    return bumped
-
-# The following function was copied from http://code.activestate.com/recipes/442460/
-# Written by Chris Olds
-lastNum = re.compile(r'(?:[^\d]*(\d+)[^\d]*)+')
-
-
-def increment(s):
-    """ look for the last sequence of number(s) in a string and increment """
-    m = lastNum.search(s)
-    if m:
-        next = str(int(m.group(1)) + 1)
-        start, end = m.span(1)
-        s = s[:max(end - len(next), start)] + next + s[end:]
-    return s
-
-
-def getPossibleNextVersions(version):
-    """Return possibly next versions for a given version.
-       There's a few distinct cases here:
-       * ESRs:  The only possible next version is the next minor version.
-                Eg: 17.0.3esr -> 17.0.4esr
-       * Betas: The next beta with the same major version and also the next
-                major version's beta 1. Eg: 18.0b4 -> 18.0b5, 19.0b1
-       * Other: The next major version's .0 release and the next minor version.
-                Eg: 15.0 -> 15.0.1, 16.0; 17.0.2 -> 17.0.3, 18.0
-
-       Versions with 'pre' are deprecated, and explicitly not supported.
-    """
-    ret = set()
-    # Get the parts we care about from the version. The last group is the 'pre'
-    # tag, which doesn't affect our work.
-    m = re.match(ANY_VERSION_REGEX, version)
-    if not m:
-        return ret
-    base, beta, _, esr = m.groups()[:4]
-    # The next major version is used in a couple of places, so we figure it out
-    # ahead of time. Eg: 17.0 -> 18.0 or 15.0.3 -> 16.0
-    nextMajorVersion = increment(base.split('.')[0]) + '.0'
-    # Modern ESRs have two possibilities:
-    # 1) Bump the second digit for a planned release and reset the third digit
-    #    to 0.
-    # 2) Bump the last digit for an unexpected release
-    #
-    # Prior to ESR 24 we did #2 for all types of releases.
-    if esr:
-        # if version is like N.0esr, add an extra 0 to make it bump properly
-        if version.count('.') < 2:
-            version = version.replace('esr', '.0esr')
-        first, second, _ = version.split('.', 2)
-        if int(first) >= 24:
-            ret.add('%s.%s.0esr' % (first, increment(second)))
-        ret.add(increment(version))
-    # Betas are similar, except we need the next major version's beta 1, too.
-    elif beta:
-        ret.add(increment(version))
-        ret.add('%sb1' % nextMajorVersion)
-    # Other releases are a bit more complicated, because we need to handle
-    # going from a x.y -> x.y.z version number.
-    else:
-        ret.add(nextMajorVersion)
-        if isFinalRelease(version):
-            ret.add('%s.1' % version)
-        else:
-            ret.add(increment(version))
-    return ret
deleted file mode 100644
--- a/lib/python/mozilla_buildtools/test/test_balrog_submitter_cli.py
+++ /dev/null
@@ -1,288 +0,0 @@
-try:
-    # Python 2.6 backport with assertDictEqual()
-    import unittest2 as unittest
-except ImportError:
-    import unittest
-from balrog.submitter.cli import NightlySubmitterBase, NightlySubmitterV4
-from balrog.submitter.updates import merge_partial_updates
-from balrog.submitter.api import SingleLocale
-from mock import patch, call
-
-
-class TestNightlySubmitterBase(unittest.TestCase):
-
-    def test_replace_canocical_url(self):
-        url_replacements = [
-            ("ftp.mozilla.org", "download.cdn.mozilla.net")
-        ]
-        submitter = NightlySubmitterBase(api_root=None, auth=None,
-                                         url_replacements=url_replacements)
-        self.assertEqual(
-            'http://download.cdn.mozilla.net/pub/mozilla.org/some/file',
-            submitter._replace_canocical_url(
-                'http://ftp.mozilla.org/pub/mozilla.org/some/file')
-        )
-
-
-class TestNightlySubmitterV4(unittest.TestCase):
-
-    def test_canonical_ur_replacement(self):
-        url_replacements = [
-            ("ftp.mozilla.org", "download.cdn.mozilla.net")
-        ]
-        submitter = NightlySubmitterV4(api_root=None, auth=None,
-                                       url_replacements=url_replacements)
-        completeInfo = [{
-            'size': 123,
-            'hash': 'abcd',
-            'url': 'http://ftp.mozilla.org/url'
-        }]
-        data = submitter._get_update_data("prod", "brnch", completeInfo)
-        self.assertDictEqual(
-            data,
-            {'completes': [{
-                'fileUrl': 'http://download.cdn.mozilla.net/url',
-                'filesize': 123,
-                'from': '*',
-                'hashValue': 'abcd'
-            }]})
-
-    def test_no_canonical_ur_replacement(self):
-        submitter = NightlySubmitterV4(api_root=None, auth=None,
-                                       url_replacements=None)
-        completeInfo = [{
-            'size': 123,
-            'hash': 'abcd',
-            'url': 'http://ftp.mozilla.org/url'
-        }]
-        data = submitter._get_update_data("prod", "brnch", completeInfo)
-        self.assertDictEqual(
-            data,
-            {'completes': [{
-                'fileUrl': 'http://ftp.mozilla.org/url',
-                'filesize': 123,
-                'from': '*',
-                'hashValue': 'abcd'
-            }]})
-
-
-class TestUpdateMerger(unittest.TestCase):
-    # print the diff between large dicts
-    maxDiff = None
-
-    def test_merge_updates(self):
-        old_data = {
-            'some_other_field': "123",
-            'some_other_field2': {"a": "b", "c": 1},
-            'some_other_list': [1, 2, 3],
-            'completes': [
-                {
-                    'fileUrl': 'https://complete1',
-                    'filesize': 123,
-                    'from': '*',
-                    'hashValue': '123abcdef'
-                },
-            ],
-            'partials': [
-                {
-                    'fileUrl': 'https://partial1',
-                    'filesize': 111,
-                    'from': '111',
-                    'hashValue': '123abc'
-                },
-                {
-                    'fileUrl': 'https://partial2',
-                    'filesize': 112,
-                    'from': '112',
-                    'hashValue': '223abc'
-                },
-            ]
-        }
-        new_data = {
-            'completes': [
-                {
-                    'fileUrl': 'https://complete2',
-                    'filesize': 122,
-                    'from': '*',
-                    'hashValue': '122abcdef'
-                },
-            ],
-            'partials': [
-                {
-                    'fileUrl': 'https://partial2/differenturl',
-                    'filesize': 112,
-                    'from': '112',
-                    'hashValue': '223abcd'
-                },
-                {
-                    'fileUrl': 'https://partial3',
-                    'filesize': 113,
-                    'from': '113',
-                    'hashValue': '323abc'
-                },
-            ]
-        }
-        merged = merge_partial_updates(old_data, new_data)
-        expected_merged = {
-            'some_other_field': "123",
-            'some_other_field2': {"a": "b", "c": 1},
-            'some_other_list': [1, 2, 3],
-            'completes': [
-                {
-                    'fileUrl': 'https://complete2',
-                    'filesize': 122,
-                    'from': '*',
-                    'hashValue': '122abcdef'
-                },
-            ],
-            'partials': [
-                {
-                    'fileUrl': 'https://partial1',
-                    'filesize': 111,
-                    'from': '111',
-                    'hashValue': '123abc'
-                },
-                {
-                    'fileUrl': 'https://partial2/differenturl',
-                    'filesize': 112,
-                    'from': '112',
-                    'hashValue': '223abcd'
-                },
-                {
-                    'fileUrl': 'https://partial3',
-                    'filesize': 113,
-                    'from': '113',
-                    'hashValue': '323abc'
-                },
-            ]
-        }
-        self.assertDictEqual(merged, expected_merged)
-
-
-class TestUpdateIdempotency(unittest.TestCase):
-
-    @patch.object(SingleLocale, 'update_build')
-    @patch.object(SingleLocale, 'get_data')
-    def test_new_data(self, get_data, update_build):
-        """SingleLocale.update_build() should be called twice when new data
-        submitted"""
-        get_data.side_effect = [
-            # first call, the dated blob, assume there is no data yet
-            ({}, None),
-            # second call, get the "latest" blob's data
-            ({}, 100),
-            # Third call, get data from the dated blob
-            ({"buildID": "b1", "appVersion": "a1", "displayVersion": "a1",
-              "partials": [{"fileUrl": "p_url1", "from": "pr1-b1-nightly-b0",
-                            "hashValue": "p_hash1", "filesize": 1}],
-              "platformVersion": "v1",
-              "completes": [{"fileUrl": "c_url1", "hashValue": "c_hash1",
-                             "from": "*", "filesize": 2}]}, 1)
-        ]
-        partial_info = [{
-            "url": "p_url1",
-            "hash": "p_hash1",
-            "size": 1,
-            "from_buildid": "b0"
-        }]
-        complete_info = [{
-            "url": "c_url1",
-            "hash": "c_hash1",
-            "size": 2,
-        }]
-        submitter = NightlySubmitterV4("api_root", auth=None)
-        submitter.run(platform="linux64", buildID="b1", productName="pr1",
-                      branch="b1", appVersion="a1", locale="l1",
-                      hashFunction='sha512', extVersion="v1",
-                      partialInfo=partial_info, completeInfo=complete_info)
-        self.assertEqual(update_build.call_count, 2)
-
-    @patch.object(SingleLocale, 'update_build')
-    @patch.object(SingleLocale, 'get_data')
-    def test_same_dated_data(self, get_data, update_build):
-        partials = [
-            {
-                "from": "pr1-b1-nightly-b0", "filesize": 1,
-                "hashValue": "p_hash1", "fileUrl": "p_url1"
-            },
-            {
-                "from": "pr1-b1-nightly-b1000", "filesize": 1000,
-                "hashValue": "p_hash1000", "fileUrl": "p_url1000"
-            },
-        ]
-        completes = [{
-            "from": "*", "filesize": 2, "hashValue": "c_hash1",
-            "fileUrl": "c_url1"
-        }]
-        partial_info = [{
-            "url": "p_url1",
-            "hash": "p_hash1",
-            "size": 1,
-            "from_buildid": "b0"
-        }]
-        complete_info = [{
-            "url": "c_url1",
-            "hash": "c_hash1",
-            "size": 2,
-            "from": "*"
-        }]
-        data = {"buildID": "b1", "appVersion": "a1", "displayVersion": "a1",
-                "platformVersion": "v1",
-                "partials": partials, "completes": completes}
-        get_data.side_effect = [
-            # first call, the dated blob, assume it contains the same data
-            (data, 1),
-            # second call, get the "latest" blob's data version, data itself is
-            # not important and discarded
-            ({}, 100),
-            # Third call, get data from the dated blob
-            (data, 1)]
-
-        submitter = NightlySubmitterV4("api_root", auth=None)
-        submitter.run(platform="linux64", buildID="b1", productName="pr1",
-                      branch="b1", appVersion="a1", locale="l1",
-                      hashFunction='sha512', extVersion="v1",
-                      partialInfo=partial_info, completeInfo=complete_info)
-        self.assertEqual(update_build.call_count, 1)
-
-    @patch.object(SingleLocale, 'update_build')
-    @patch.object(SingleLocale, 'get_data')
-    def test_same_latest_data(self, get_data, update_build):
-        partials = [{
-            "from": "pr1-b1-nightly-b0", "filesize": 1, "hashValue": "p_hash1",
-            "fileUrl": "p_url1"
-        }]
-        completes = [{
-            "from": "*", "filesize": 2, "hashValue": "c_hash1",
-            "fileUrl": "c_url1"
-        }]
-        partial_info = [{
-            "url": "p_url1",
-            "hash": "p_hash1",
-            "size": 1,
-            "from_buildid": "b0"
-        }]
-        complete_info = [{
-            "url": "c_url1",
-            "hash": "c_hash1",
-            "size": 2,
-            "from": "*"
-        }]
-        data = {"buildID": "b1", "appVersion": "a1", "displayVersion": "a1",
-                "platformVersion": "v1",
-                "partials": partials, "completes": completes}
-        get_data.side_effect = [
-            # first call, the dated blob, assume it contains the same data
-            (data, 1),
-            # second call, get the "latest" blob's data version, data itself is
-            # not important and discarded
-            (data, 100),
-            # Third call, get data from the dated blob
-            (data, 1)]
-
-        submitter = NightlySubmitterV4("api_root", auth=None)
-        submitter.run(platform="linux64", buildID="b1", productName="pr1",
-                      branch="b1", appVersion="a1", locale="l1",
-                      hashFunction='sha512', extVersion="v1",
-                      partialInfo=partial_info, completeInfo=complete_info)
-        self.assertEqual(update_build.call_count, 0)
--- a/lib/python/mozilla_buildtools/test/test_build_versions.py
+++ b/lib/python/mozilla_buildtools/test/test_build_versions.py
@@ -1,13 +1,12 @@
 import re
 import unittest
 
-from build.versions import bumpFile, nextVersion, BuildVersionsException, \
-    ANY_VERSION_REGEX, getPossibleNextVersions
+from build.versions import ANY_VERSION_REGEX
 
 
 class TestAnyVersionRegex(unittest.TestCase):
     avr = '^%s$' % ANY_VERSION_REGEX
 
     def testAlpha(self):
         self.assertTrue(re.match(self.avr, '3.0a1'))
 
@@ -17,233 +16,8 @@ class TestAnyVersionRegex(unittest.TestC
     def testEsr(self):
         self.assertTrue(re.match(self.avr, '10.0.4esr'))
 
     def testEsrPre(self):
         self.assertTrue(re.match(self.avr, '10.0.5esrpre'))
 
     def testBad(self):
         self.assertFalse(re.match(self.avr, '3.0c'))
-
-
-class TestBuildVersions(unittest.TestCase):
-    def _doTest(self, original, expected):
-        got = nextVersion(original)
-        self.assertEquals(got, expected)
-
-    def testNextVersionAlpha(self):
-        self._doTest("4.1a2", "4.1a3")
-
-    def testNextVersionBeta(self):
-        self._doTest("3.5b3", "3.5b4")
-
-    def testNextVersion3Part(self):
-        self._doTest("4.0.1", "4.0.2")
-
-    def testNextVersion4Part(self):
-        self._doTest("5.0.0.4", "5.0.0.5")
-
-    def testNextVersionBigNumber(self):
-        self._doTest("5.0.0.24", "5.0.0.25")
-
-    def testNextVersionFinalVersion(self):
-        self._doTest("4.0", "4.0")
-
-    def testNextVersionAlphaPre(self):
-        self._doTest("4.3a4pre", "4.3a5pre")
-
-    def testNextVersionBetaPre(self):
-        self._doTest("5.6b2pre", "5.6b3pre")
-
-    def testNextVersion3PartPre(self):
-        self._doTest("2.0.3pre", "2.0.4pre")
-
-    def testNextVersion4PartPre(self):
-        self._doTest("6.0.0.2pre", "6.0.0.3pre")
-
-    def testNextVersionBigNumberPre(self):
-        self._doTest("78.2.42.510pre", "78.2.42.511pre")
-
-    def testNextVersionFinalVersionPre(self):
-        self._doTest("4.0pre", "4.0pre")
-
-unbumpedConfVarsSh = """\
-# ***** BEGIN LICENSE BLOCK *****
-# Version: MPL 1.1/GPL 2.0/LGPL 2.1
-#
-# The contents of this file are subject to the Mozilla Public License Version
-# 1.1 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-# http://www.mozilla.org/MPL/
-#
-# Software distributed under the License is distributed on an "AS IS" basis,
-# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
-# for the specific language governing rights and limitations under the
-# License.
-#
-# The Original Code is Mozilla.
-#
-# The Initial Developer of the Original Code is
-# the Mozilla Foundation <http://www.mozilla.org/>.
-# Portions created by the Initial Developer are Copyright (C) 2007
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Finkle <mfinkle@mozilla.com>
-#
-# Alternatively, the contents of this file may be used under the terms of
-# either the GNU General Public License Version 2 or later (the "GPL"), or
-# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
-# in which case the provisions of the GPL or the LGPL are applicable instead
-# of those above. If you wish to allow use of your version of this file only
-# under the terms of either the GPL or the LGPL, and not to allow others to
-# use your version of this file under the terms of the MPL, indicate your
-# decision by deleting the provisions above and replace them with the notice
-# and other provisions required by the GPL or the LGPL. If you do not delete
-# the provisions above, a recipient may use your version of this file under
-# the terms of any one of the MPL, the GPL or the LGPL.
-#
-# ***** END LICENSE BLOCK *****
-
-MOZ_APP_NAME=fennec
-MOZ_APP_UA_NAME=Fennec
-
-MOZ_APP_VERSION=4.0b5pre
-
-MOZ_BRANDING_DIRECTORY=mobile/branding/nightly
-MOZ_OFFICIAL_BRANDING_DIRECTORY=mobile/branding/official
-# MOZ_APP_DISPLAYNAME is set by branding/configure.sh
-
-MOZ_SERVICES_SYNC=1
-
-MOZ_ENABLE_LIBXUL=1
-MOZ_DISABLE_DOMCRYPTO=1
-
-if test "$LIBXUL_SDK"; then
-MOZ_XULRUNNER=1
-else
-MOZ_XULRUNNER=
-MOZ_MORK=
-MOZ_PLACES=1
-fi"""
-
-bumpedConfVarsSh = """\
-# ***** BEGIN LICENSE BLOCK *****
-# Version: MPL 1.1/GPL 2.0/LGPL 2.1
-#
-# The contents of this file are subject to the Mozilla Public License Version
-# 1.1 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-# http://www.mozilla.org/MPL/
-#
-# Software distributed under the License is distributed on an "AS IS" basis,
-# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
-# for the specific language governing rights and limitations under the
-# License.
-#
-# The Original Code is Mozilla.
-#
-# The Initial Developer of the Original Code is
-# the Mozilla Foundation <http://www.mozilla.org/>.
-# Portions created by the Initial Developer are Copyright (C) 2007
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Finkle <mfinkle@mozilla.com>
-#
-# Alternatively, the contents of this file may be used under the terms of
-# either the GNU General Public License Version 2 or later (the "GPL"), or
-# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
-# in which case the provisions of the GPL or the LGPL are applicable instead
-# of those above. If you wish to allow use of your version of this file only
-# under the terms of either the GPL or the LGPL, and not to allow others to
-# use your version of this file under the terms of the MPL, indicate your
-# decision by deleting the provisions above and replace them with the notice
-# and other provisions required by the GPL or the LGPL. If you do not delete
-# the provisions above, a recipient may use your version of this file under
-# the terms of any one of the MPL, the GPL or the LGPL.
-#
-# ***** END LICENSE BLOCK *****
-
-MOZ_APP_NAME=fennec
-MOZ_APP_UA_NAME=Fennec
-
-MOZ_APP_VERSION=4.0b6pre
-
-MOZ_BRANDING_DIRECTORY=mobile/branding/nightly
-MOZ_OFFICIAL_BRANDING_DIRECTORY=mobile/branding/official
-# MOZ_APP_DISPLAYNAME is set by branding/configure.sh
-
-MOZ_SERVICES_SYNC=1
-
-MOZ_ENABLE_LIBXUL=1
-MOZ_DISABLE_DOMCRYPTO=1
-
-if test "$LIBXUL_SDK"; then
-MOZ_XULRUNNER=1
-else
-MOZ_XULRUNNER=
-MOZ_MORK=
-MOZ_PLACES=1
-fi"""
-
-
-class TestBumpFile(unittest.TestCase):
-    def _doTest(self, filename, oldContents, expectedContents, version):
-        newContents = bumpFile(filename, oldContents, version)
-        self.assertEquals(newContents, expectedContents)
-
-    def testBumpVersionTxtNoChange(self):
-        self._doTest("browser/config/version.txt", "3.5.4", "3.5.4", "3.5.4")
-
-    def testBumpVersionTxt(self):
-        self._doTest("browser/config/version.txt", "4.0b5", "4.0b6", "4.0b6")
-
-    def testBumpVersionTxtToRC(self):
-        self._doTest("browser/config/version.txt", "4.0b10", "4.0", "4.0")
-
-    def testBumpDifferentlyNamedVersionTxt(self):
-        self._doTest("mail/config/version-192.txt", "3.1b2", "3.1b3", "3.1b3")
-
-    def testBumpMilestoneTxt(self):
-        self._doTest("config/milestone.txt", "1.9.2.2", "1.9.2.3", "1.9.2.3")
-
-    def testBumpMilestoneTxtPreVersion(self):
-        self._doTest("js/src/config/milestone.txt",
-                     "1.9.2.4pre", "1.9.2.5pre", "1.9.2.5pre")
-
-    def testBumpConfVarsSh(self):
-        self._doTest("confvars.sh", unbumpedConfVarsSh, bumpedConfVarsSh,
-                     "4.0b6pre")
-
-    def testBumpUnknownFile(self):
-        self.assertRaises(BuildVersionsException, bumpFile, "random.txt",
-                          "blahblah", "3.4.5")
-
-
-class TestGetPossibleNextVersions(unittest.TestCase):
-    def testBeta(self):
-        got = getPossibleNextVersions('17.0b1')
-        self.assertEquals(set(['17.0b2', '18.0b1']), got)
-
-    def testFinal(self):
-        got = getPossibleNextVersions('5.0')
-        self.assertEquals(set(['5.0.1', '6.0']), got)
-
-    def testFirstEsr(self):
-        got = getPossibleNextVersions('24.0esr')
-        self.assertEquals(set(['24.1.0esr', '24.0.1esr']), got)
-
-    def testEsr(self):
-        got = getPossibleNextVersions('24.2.0esr')
-        self.assertEquals(set(['24.2.1esr', '24.3.0esr']), got)
-
-    def testEsrResetLastDigit(self):
-        got = getPossibleNextVersions('24.4.1esr')
-        self.assertEquals(set(['24.4.2esr', '24.5.0esr']), got)
-
-    def testEsrDeprecatedStyle(self):
-        got = getPossibleNextVersions('10.0.4esr')
-        self.assertEquals(set(['10.0.5esr']), got)
-
-    def testPointRelease(self):
-        got = getPossibleNextVersions('15.0.2')
-        self.assertEquals(set(['15.0.3', '16.0']), got)
--- a/lib/python/mozilla_buildtools/test/test_release_paths.py
+++ b/lib/python/mozilla_buildtools/test/test_release_paths.py
@@ -16,32 +16,8 @@ class TestReleasesDir(unittest.TestCase)
     def testRemote(self):
         got = makeReleasesDir('yy', protocol='http', server='foo.bar')
         self.assertEquals('http://foo.bar/pub/yy/releases/', got)
 
     def testRemoteAndVersioned(self):
         got = makeReleasesDir('yx', '1.0', protocol='https', server='cee.dee')
         self.assertEquals(
             'https://cee.dee/pub/yx/releases/1.0/', got)
-
-
-class TestCandidatesDir(unittest.TestCase):
-
-    def test_base(self):
-        expected = "/pub/bbb/candidates/1.0-candidates/build2/"
-        got = makeCandidatesDir('bbb', '1.0', 2)
-        self.assertEquals(expected, got)
-
-    def test_fennec(self):
-        expected = "/pub/mobile/candidates/15.1-candidates/build3/"
-        got = makeCandidatesDir('fennec', '15.1', 3)
-        self.assertEquals(expected, got)
-
-    def test_remote(self):
-        expected = "http://foo.bar/pub/bbb/candidates/1.0-candidates/build5/"
-        got = makeCandidatesDir('bbb', '1.0', 5, protocol="http",
-                                server='foo.bar')
-        self.assertEquals(expected, got)
-
-    def test_ftp_root(self):
-        expected = "pub/bbb/candidates/1.0-candidates/build5/"
-        got = makeCandidatesDir('bbb', '1.0', 5, ftp_root="pub/")
-        self.assertEquals(expected, got)
--- a/lib/python/release/info.py
+++ b/lib/python/release/info.py
@@ -1,47 +1,27 @@
 from datetime import datetime
 import os
 from os import path
 import re
 import shutil
 import sys
-from urllib2 import urlopen
-
-from release.paths import makeCandidatesDir
 
 import logging
 log = logging.getLogger(__name__)
 
 # If version has two parts with no trailing specifiers like "rc", we
 # consider it a "final" release for which we only create a _RELEASE tag.
 FINAL_RELEASE_REGEX = "^\d+\.\d+$"
 
 
 class ConfigError(Exception):
     pass
 
 
-def getBuildID(platform, product, version, buildNumber, nightlyDir='nightly',
-               server='stage.mozilla.org'):
-    infoTxt = makeCandidatesDir(product, version, buildNumber, nightlyDir,
-                                protocol='http', server=server) + \
-        '%s_info.txt' % platform
-    try:
-        buildInfo = urlopen(infoTxt).read()
-    except:
-        log.error("Failed to retrieve %s" % infoTxt)
-        raise
-
-    for line in buildInfo.splitlines():
-        key, value = line.rstrip().split('=', 1)
-        if key == 'buildID':
-            return value
-
-
 def getReleaseConfigName(product, branch, version, staging=False):
     if product in ("firefox", "fennec") and branch == "mozilla-release" and "b" in version:
         cfg = "release-%s-mozilla-beta.py" % product
     else:
         cfg = 'release-%s-%s.py' % (product, branch)
     if staging:
         cfg = 'staging_%s' % cfg
     return cfg
@@ -186,12 +166,8 @@ def fileInfo(filepath, product):
             ret['platform'] = m.group(2)
             ret['locale'] = m.group(3)
             ret['product'] = m.group(4).lower()
             ret['version'] = m.group(5)
         else:
             raise ValueError("Unknown filetype for %s" % filepath)
 
         return ret
-
-
-def getProductDetails(product, appVersion):
-    return 'https://www.mozilla.org/%%LOCALE%%/%s/%s/releasenotes/' % (product, appVersion)
--- a/lib/python/release/l10n.py
+++ b/lib/python/release/l10n.py
@@ -1,144 +1,6 @@
-from urllib2 import urlopen
-from urlparse import urljoin
-try:
-    import simplejson as json
-except ImportError:
-    import json
-
-from build.l10n import getLocalesForChunk
-from release.platforms import buildbot2ftp, getPlatformLocales, \
-    getPlatformLocalesFromJson
-
-import logging
-log = logging.getLogger(__name__)
-
-
-def getShippedLocales(product, appName, version, buildNumber, sourceRepo,
-                      hg='https://hg.mozilla.org', revision=None):
-    if revision is not None:
-        tag = revision
-    else:
-        tag = '%s_%s_BUILD%s' % (product.upper(), version.replace('.', '_'),
-                                 str(buildNumber))
-    file = '%s/raw-file/%s/%s/locales/shipped-locales' % \
-        (sourceRepo, tag, appName)
-    url = urljoin(hg, file)
-    try:
-        sl = urlopen(url).read()
-    except:
-        log.error("Failed to retrieve %s", url)
-        raise
-    return sl
-
-
 def parsePlainL10nChangesets(changesets):
     ret = {}
     for line in changesets.splitlines():
         locale, revision = line.rstrip().split()
         ret[locale] = revision
     return ret
-
-
-def getL10nRepositories(changesets, l10nRepoPath, relbranch=None):
-    """Parses a list of locale names and revisions for their associated
-       repository from the 'changesets' string passed in."""
-    # urljoin() will strip the last part of l10nRepoPath it doesn't end with
-    # "/"
-    if not l10nRepoPath.endswith('/'):
-        l10nRepoPath = l10nRepoPath + '/'
-    repositories = {}
-    try:
-        for locale, data in json.loads(changesets).iteritems():
-            locale = urljoin(l10nRepoPath, locale)
-            repositories[locale] = {
-                'revision': data['revision'],
-                'relbranchOverride': relbranch,
-                'bumpFiles': []
-            }
-    except (TypeError, ValueError):
-        for locale, revision in parsePlainL10nChangesets(changesets).iteritems():
-            if revision == 'FIXME':
-                raise Exception('Found FIXME in changesets for locale "%s"' % locale)
-            locale = urljoin(l10nRepoPath, locale)
-            repositories[locale] = {
-                'revision': revision,
-                'relbranchOverride': relbranch,
-                'bumpFiles': []
-            }
-
-    return repositories
-
-
-def makeReleaseRepackUrls(productName, brandName, version, platform,
-                          locale='en-US', signed=False,
-                          exclude_secondary=False):
-    longVersion = version
-    builds = {}
-    platformDir = buildbot2ftp(platform)
-    if productName not in ('fennec',):
-        if platform.startswith('linux'):
-            filename = '%s.tar.bz2' % productName
-            builds[filename] = '/'.join([p.strip('/') for p in [
-                platformDir, locale, '%s-%s.tar.bz2' % (productName, version)]])
-        elif platform.startswith('macosx'):
-            filename = '%s.dmg' % productName
-            builds[filename] = '/'.join([p.strip('/') for p in [
-                platformDir, locale, '%s %s.dmg' % (brandName, longVersion)]])
-        elif platform.startswith('win'):
-            filename = '%s.zip' % productName
-            instname = '%s.exe' % productName
-            prefix = []
-            if not signed:
-                prefix.append('unsigned')
-            prefix.extend([platformDir, locale])
-            if not exclude_secondary:
-                builds[filename] = '/'.join(
-                    [p.strip('/') for p in
-                     prefix + ['%s-%s.zip' % (productName, version)]]
-                )
-            builds[instname] = '/'.join(
-                [p.strip('/') for p in
-                 prefix + ['%s Setup %s.exe' % (brandName, longVersion)]]
-            )
-        else:
-            raise "Unsupported platform"
-    else:
-        if platform.startswith('android'):
-            filename = '%s-%s.%s.android-arm.apk' % (
-                productName, version, locale)
-            prefix = []
-            if not signed:
-                prefix.append('unsigned')
-            prefix.extend([platformDir, locale])
-            builds[filename] = '/'.join(
-                [p.strip('/') for p in
-                 prefix + [filename]]
-            )
-        elif platform == 'linux':
-            filename = '%s.tar.bz2' % productName
-            builds[filename] = '/'.join([p.strip('/') for p in [
-                platform, locale, '%s-%s.%s.linux-i686.tar.bz2' % (productName, version, locale)]])
-        elif platform == 'macosx':
-            filename = '%s.dmg' % productName
-            builds[filename] = '/'.join([p.strip('/') for p in [
-                platform, locale, '%s-%s.%s.mac.dmg' % (brandName, version, locale)]])
-        elif platform == 'win32':
-            filename = '%s.zip' % productName
-            builds[filename] = '/'.join([p.strip('/') for p in [
-                platform, locale,
-                '%s-%s.%s.win32.zip' % (productName, version, locale)]])
-        else:
-            raise "Unsupported platform"
-
-    return builds
-
-
-def getReleaseLocalesForChunk(productName, appName, version, buildNumber,
-                              sourceRepo, platform, chunks, thisChunk,
-                              hg='https://hg.mozilla.org'):
-    possibleLocales = getPlatformLocales(
-        getShippedLocales(productName, appName, version, buildNumber,
-                          sourceRepo, hg),
-        (platform,)
-    )[platform]
-    return getLocalesForChunk(possibleLocales, chunks, thisChunk)
--- a/lib/python/release/paths.py
+++ b/lib/python/release/paths.py
@@ -1,32 +1,10 @@
 from urlparse import urlunsplit
 
-product_ftp_map = {
-    'fennec': 'mobile',
-}
-
-def product2ftp(product):
-    return product_ftp_map.get(product, product)
-
-
-def makeCandidatesDir(product, version, buildNumber, nightlyDir='candidates',
-                      protocol=None, server=None, ftp_root='/pub/'):
-    if protocol:
-        assert server is not None, "server is required with protocol"
-
-    product = product2ftp(product)
-    directory = ftp_root + product + '/' + nightlyDir + '/' + \
-        str(version) + '-candidates/build' + str(buildNumber) + '/'
-
-    if protocol:
-        return urlunsplit((protocol, server, directory, None, None))
-    else:
-        return directory
-
 
 def makeReleasesDir(product, version=None, protocol=None, server=None,
                     ftp_root='/pub/'):
     if protocol:
         assert server is not None, "server is required with protocol"
 
     directory = '%s%s/releases/' % (ftp_root, product)
     if version:
deleted file mode 100644
--- a/lib/python/release/platforms.py
+++ /dev/null
@@ -1,162 +0,0 @@
-# buildbot -> bouncer platform mapping
-# 64-bit windows
-try:
-    import simplejson as json
-except:
-    import json
-bouncer_platform_map = {'win32': 'win', 'win64': 'win64', 'macosx': 'osx',
-                        'linux': 'linux', 'linux64': 'linux64',
-                        'macosx64': 'osx'}
-# buildbot -> ftp platform mapping
-ftp_platform_map = {'win32': 'win32', 'win64': 'win64', 'macosx': 'mac',
-                    'linux': 'linux-i686', 'linux64': 'linux-x86_64',
-                    'macosx64': 'mac', 'linux-android': 'android',
-                    'linux-mobile': 'linux', 'macosx-mobile': 'macosx',
-                    'win32-mobile': 'win32', 'android': 'android',
-                    'android-xul': 'android-xul'}
-# buildbot -> shipped-locales platform mapping
-# TODO: make sure 'win64' is correct when shipped-locales becomes aware of it
-sl_platform_map = {'win32': 'win32', 'win64': 'win32', 'macosx': 'osx',
-                   'linux': 'linux', 'linux64': 'linux', 'macosx64': 'osx'}
-# buildbot -> update platform mapping
-update_platform_map = {
-    'android': ['Android_arm-eabi-gcc3'],
-    'android-api-11': ['Android_arm-eabi-gcc3'],
-    'android-api-15': ['Android_arm-eabi-gcc3'],
-    'android-api-15-old-id': ['Android_arm-eabi-gcc3'],
-    'android-api-16': ['Android_arm-eabi-gcc3'],
-    'android-api-16-old-id': ['Android_arm-eabi-gcc3'],
-    'android-x86': ['Android_x86-gcc3'],
-    'android-x86-old-id': ['Android_x86-gcc3'],
-    'android-aarch64': ['Android_aarch64-gcc3'],
-    'linux': ['Linux_x86-gcc3'],
-    'linux64': ['Linux_x86_64-gcc3'],
-    'linux64-asan-reporter': ['Linux_x86_64-gcc3-asan'],
-    'macosx64': ['Darwin_x86_64-gcc3-u-i386-x86_64',  # The main platofrm
-                 'Darwin_x86-gcc3-u-i386-x86_64',
-                # We don't ship builds with these build targets, but some users
-                # modify their builds in a way that has them report like these.
-                # See bug 1071576 for details.
-                 'Darwin_x86-gcc3', 'Darwin_x86_64-gcc3'],
-    'win32': ['WINNT_x86-msvc', 'WINNT_x86-msvc-x86', 'WINNT_x86-msvc-x64'],
-    'win64': ['WINNT_x86_64-msvc', 'WINNT_x86_64-msvc-x64'],
-    'win64-asan-reporter': ['WINNT_x86_64-msvc-x64-asan'],
-}
-
-# These FTP -> other mappings are provided so that things interpreting patcher
-# configs can figure update/bouncer platforms without using the Buildbot
-# platform as an intermediary. (It's difficult to do that, because ftp:buildbot
-# is a many:1 mapping, so some guesswork ends up being involved.) In the future
-# when we redesign the patcher configs we can use Buildbot platform in them and
-# rid ourselves of these mappings. (bug 778125)
-ftp_update_platform_map = {
-    'linux-i686': update_platform_map['linux'],
-    'linux-x86_64': update_platform_map['linux64'],
-    'mac': update_platform_map['macosx64'],
-    'win32': update_platform_map['win32'],
-    'win64': update_platform_map['win64'],
-}
-
-ftp_bouncer_platform_map = {
-    'linux-i686': 'linux',
-    'linux-x86_64': 'linux64',
-    'mac': 'osx',
-    'win32': 'win',
-    'win64': 'win64',
-}
-
-
-def buildbot2bouncer(platform):
-    return bouncer_platform_map.get(platform, platform)
-
-
-def buildbot2ftp(platform):
-    return ftp_platform_map.get(platform, platform)
-
-
-def buildbot2shippedlocales(platform):
-    return sl_platform_map.get(platform, platform)
-
-
-def shippedlocales2buildbot(platform):
-    matches = []
-    try:
-        [matches.append(
-            k) for k, v in sl_platform_map.iteritems() if v == platform][0]
-        return matches
-    except IndexError:
-        return [platform]
-
-
-def buildbot2updatePlatforms(platform):
-    return update_platform_map.get(platform, [platform])
-
-
-def ftp2updatePlatforms(platform):
-    return ftp_update_platform_map.get(platform, platform)
-
-
-def ftp2bouncer(platform):
-    return ftp_bouncer_platform_map.get(platform, platform)
-
-
-def getPlatformLocales(shipped_locales, platforms):
-    platform_locales = {}
-    for platform in platforms:
-        platform_locales[platform] = []
-    for line in shipped_locales.splitlines():
-        entry = line.strip().split()
-        locale = entry[0]
-        if len(entry) > 1:
-            for platform in entry[1:]:
-                for bb_platform in shippedlocales2buildbot(platform):
-                    if bb_platform in platforms:
-                        platform_locales[bb_platform].append(locale)
-        else:
-            for platform in platforms:
-                platform_locales[platform].append(locale)
-    return platform_locales
-
-
-def getLocaleListFromShippedLocales(shipped_locales):
-    """ return the list of locales in shipped_locales, without platform specific info """
-    shipped_locales_list = []
-    for line in shipped_locales.splitlines():
-        entry = line.strip().split()
-        shipped_locales_list.append(entry[0])
-    return shipped_locales_list
-
-
-def getPlatformLocalesFromJson(json_file, platforms):
-    platform_locales = {}
-    for platform in platforms:
-        platform_locales[platform] = []
-    fh = open(json_file)
-    json_contents = json.load(fh)
-    fh.close()
-    for locale in json_contents.keys():
-        for platform in json_contents[locale]["platforms"]:
-            if platform not in platform_locales:
-                platform_locales[platform] = []
-            platform_locales[platform].append(locale)
-    return platform_locales
-
-
-def getAllLocales(shipped_locales):
-    locales = []
-    f = open(shipped_locales)
-    for line in f.readlines():
-        entry = line.split()
-        locale = entry[0]
-        if locale:
-            locales.append(locale)
-    f.close()
-    return locales
-
-
-def getPlatforms():
-    return bouncer_platform_map.keys()
-
-
-def getSupportedPlatforms():
-    return ('linux', 'linux64', 'win32', 'win64', 'macosx', 'macosx64')
--- a/lib/python/release/versions.py
+++ b/lib/python/release/versions.py
@@ -1,36 +1,7 @@
 import re
 
 from build.versions import ANY_VERSION_REGEX
 
 
 def getAppVersion(version):
     return re.match(ANY_VERSION_REGEX, version).group(1)
-
-
-def getPrettyVersion(version):
-    version = re.sub(r'a([0-9]+)$', r' Alpha \1', version)
-    version = re.sub(r'b([0-9]+)$', r' Beta \1', version)
-    version = re.sub(r'rc([0-9]+)$', r' RC \1', version)
-    return version
-
-
-def getL10nDashboardVersion(version, product, parse_version=True):
-    if product == 'firefox':
-        ret = 'fx'
-    elif product == 'fennec':
-        ret = 'fennec'
-    elif product == 'thunderbird':
-        ret = 'tb'
-    elif product == 'seamonkey':
-        ret = 'sea'
-
-    if not parse_version:
-        ret += version
-    else:
-        parsed = re.match(ANY_VERSION_REGEX, version)
-        if parsed.group(2) and parsed.group(2).startswith('b'):
-            ret = '%s%s_beta_%s' % (
-                ret, version.split(".")[0], parsed.group(2))
-        else:
-            ret += version
-    return ret
--- a/lib/python/util/algorithms.py
+++ b/lib/python/util/algorithms.py
@@ -16,19 +16,8 @@ def getChunk(things, chunks, thisChunk):
         n = nThings / chunks
         # If our things aren't evenly divisible by the number of chunks
         # we need to append one more onto some of them
         if c <= (nThings % chunks):
             n += 1
         if c == thisChunk:
             return possibleThings[0:n]
         del possibleThings[0:n]
-
-
-# From https://stackoverflow.com/questions/3232943/update-value-of-a-nested-dictionary-of-varying-depth
-def recursive_update(d, u):
-    for k, v in u.iteritems():
-        if isinstance(v, collections.Mapping):
-            r = recursive_update(d.get(k, {}), v)
-            d[k] = r
-        else:
-            d[k] = u[k]
-    return d
deleted file mode 100644
--- a/lib/python/vendor/balrogclient-0.0.4/balrogclient/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from balrogclient.api import is_csrf_token_expired, SingleLocale, Release, Rule, ScheduledRuleChange
-
-__all__ = [ 'is_csrf_token_expired', 'SingleLocale', 'Release', 'Rule', 'ScheduledRuleChange' ]
deleted file mode 100644
--- a/lib/python/vendor/balrogclient-0.0.4/balrogclient/api.py
+++ /dev/null
@@ -1,254 +0,0 @@
-"""
-Balrog API wrapper
-"""
-
-import json
-import logging
-import time
-import requests
-
-
-def is_csrf_token_expired(token):
-    """Checks whether a CSRF token is still valid
-
-    Expects a token of the form "YYYYMMDDHHMMSS##..."
-
-    Returns:
-        True if the token has expired
-        False if the token is still valid
-    """
-    from datetime import datetime
-    expiry = token.split('##')[0]
-    if expiry <= datetime.now().strftime('%Y%m%d%H%M%S'):
-        return True
-    return False
-
-
-class API(object):
-    """A class that knows how to make requests to a Balrog server, including
-       pre-retrieving CSRF tokens and data versions.
-
-       url_template: The URL to submit to when request() is called. Standard
-                     Python string interpolation can be used here in
-                     combination with url_template_vars.
-       prerequest_url_template: Before submitting the real request, a HEAD
-                                operation will be done on this URL. If the
-                                HEAD request succeeds, it is expected that
-                                there will be X-CSRF-Token and X-Data-Version
-                                headers in the response. If the HEAD request
-                                results in a 404, another HEAD request to
-                                /csrf_token will be made in attempt to get a
-                                CSRF Token. This URL can use string
-                                interpolation the same way url_template can.
-                                In some cases this may be the same as the
-                                url_template.
-    """
-    verify = False
-    auth = None
-    url_template = None
-    prerequest_url_template = None
-    url_template_vars = None
-
-    def __init__(self, api_root='https://aus4-admin-dev.allizom.org/api',
-                 auth=None, ca_certs=True, timeout=60,
-                 raise_exceptions=True):
-        """ Creates an API object which wraps REST API of Balrog server.
-
-        api_root: API root URL of balrog server
-        auth    : a tuple of (username, password) or None
-        ca_certs: CA bundle. It follows python-requests `verify' usage.
-                  If set to False, no SSL verification is done.
-                  If set to True, it tries to load a CA bundle from certifi
-                  module.
-                  If set to string, puthon-requests uses it as a pth to path to
-                  CA bundle.
-        timeout : request timeout
-        raise_exceptions: controls exception handling of python-requests.
-        """
-        self.api_root = api_root.rstrip('/')
-        self.verify = ca_certs
-        assert isinstance(auth, tuple) or auth is None, \
-            "auth should be set to tuple or None"
-        self.auth = auth
-        self.timeout = timeout
-        self.raise_exceptions = raise_exceptions
-        self.session = requests.session()
-        self.csrf_token = None
-
-    def request(self, data=None, method='GET'):
-        url = self.api_root + self.url_template % self.url_template_vars
-        prerequest_url = self.api_root + \
-            self.prerequest_url_template % self.url_template_vars
-        # If we'll be modifying things, do a GET first to get a CSRF token
-        # and possibly a data_version.
-        if method != 'GET' and method != 'HEAD':
-            # Use the URL of the resource we're going to modify first,
-            # because we'll need a CSRF token, and maybe its data version.
-            try:
-                res = self.do_request(prerequest_url, None, 'HEAD')
-                # If a data_version was specified we shouldn't overwrite it
-                # because the caller may be acting on a modified version of
-                # a specific older version of the data.
-                if 'data_version' not in data:
-                    data['data_version'] = int(res.headers['X-Data-Version'])
-                # We may already have a non-expired CSRF token, but it's
-                # faster/easier just to set it again even if we do, since
-                # we've already made the request.
-                data['csrf_token'] = self.csrf_token = res.headers[
-                    'X-CSRF-Token']
-            except requests.HTTPError as excp:
-                # However, if the resource doesn't exist yet we may as well
-                # not bother doing another request solely for a token unless
-                # we don't have a valid one already.
-                if excp.response.status_code != 404:
-                    raise
-                if not self.csrf_token or is_csrf_token_expired(self.csrf_token):
-                    res = self.do_request(
-                        self.api_root + '/csrf_token', None, 'HEAD')
-                    data['csrf_token'] = self.csrf_token = res.headers[
-                        'X-CSRF-Token']
-
-        return self.do_request(url, data, method)
-
-    def do_request(self, url, data, method):
-        logging.debug('Balrog request to %s', url)
-        if data is not None and 'csrf_token' in data:
-            sanitised_data = data.copy()
-            del sanitised_data['csrf_token']
-            logging.debug('Data sent: %s', sanitised_data)
-        else:
-            logging.debug('Data sent: %s', data)
-        headers = {'Accept-Encoding': 'application/json',
-                   'Accept': 'application/json',
-                   'Content-Type': 'application/json'}
-        before = time.time()
-        req = self.session.request(
-            method=method, url=url, data=json.dumps(data), timeout=self.timeout,
-            verify=self.verify, auth=self.auth, headers=headers)
-        try:
-            if self.raise_exceptions:
-                req.raise_for_status()
-            return req
-        except requests.HTTPError as excp:
-            logging.error('Caught HTTPError: %s', excp.response.content)
-            raise
-        finally:
-            stats = {
-                "timestamp": time.time(),
-                "method": method,
-                "url": url,
-                "status_code": req.status_code,
-                "elapsed_secs": time.time() - before,
-            }
-            logging.debug('REQUEST STATS: %s', json.dumps(stats))
-
-    def get_data(self):
-        resp = self.request()
-        return (json.loads(resp.content), int(resp.headers['X-Data-Version']))
-
-
-class Release(API):
-    url_template = '/releases/%(name)s'
-    prerequest_url_template = '/releases/%(name)s'
-
-    def __init__(self, name, **kwargs):
-        super(Release, self).__init__(**kwargs)
-        self.name = name
-        self.url_template_vars = dict(name=name)
-
-    def update_release(self, product, hashFunction, releaseData,
-                       data_version=None, schemaVersion=None):
-        data = dict(name=self.name, product=product,
-                    hashFunction=hashFunction, data=releaseData)
-        if data_version:
-            data['data_version'] = data_version
-        if schemaVersion:
-            data['schema_version'] = schemaVersion
-        return self.request(method='POST', data=data)
-
-
-class SingleLocale(API):
-    url_template = '/releases/%(name)s/builds/%(build_target)s/%(locale)s'
-    prerequest_url_template = '/releases/%(name)s'
-
-    def __init__(self, name, build_target, locale, **kwargs):
-        super(SingleLocale, self).__init__(**kwargs)
-        self.name = name
-        self.build_target = build_target
-        self.locale = locale
-        self.url_template_vars = dict(name=name, build_target=build_target,
-                                      locale=locale)
-        # keep a copy to be used in get_data()
-        self.release_kwargs = kwargs
-
-    def get_data(self):
-        data, data_version = {}, None
-        # If the locale-specific API end point returns 404, we have to use the
-        # top level blob to get the data version. Because this requires 2 not
-        # atomic HTTP requests, we start with the top level blob and use its
-        # data version.
-        top_level = Release(name=self.name, **self.release_kwargs)
-        # Use data version from the top level blob
-        try:
-            _, data_version = top_level.get_data()
-        except requests.HTTPError as excp:
-            if excp.response.status_code == 404:
-                # top level blob doesn't exist, assume there is no data
-                return data, data_version
-            else:
-                raise
-        # Got data version. Try to get data from the locale specific blob.
-        # Using data version from the top level blob prevents possible race
-        # conditions if another client updates the locale blob between the
-        # first request and the call below.
-        try:
-            data, _ = super(SingleLocale, self).get_data()
-            return data, data_version
-        except requests.HTTPError as excp:
-            if excp.response.status_code == 404:
-                # locale blob doesn't exist, no data
-                return data, data_version
-            else:
-                raise
-
-    def update_build(self, product, hashFunction, buildData,
-                     alias=None, schemaVersion=None, data_version=None):
-        data = dict(product=product, data=buildData, hashFunction=hashFunction)
-        if alias:
-            data['alias'] = alias
-        if data_version:
-            data['data_version'] = data_version
-        if schemaVersion:
-            data['schema_version'] = schemaVersion
-
-        return self.request(method='PUT', data=data)
-
-
-class Rule(API):
-    """Update Balrog rules"""
-    url_template = '/rules/%(rule_id)s'
-    prerequest_url_template = '/rules/%(rule_id)s'
-
-    def __init__(self, rule_id, **kwargs):
-        super(Rule, self).__init__(**kwargs)
-        self.rule_id = rule_id
-        self.url_template_vars = dict(rule_id=rule_id)
-
-    def update_rule(self, **rule_data):
-        """wrapper for self.request"""
-        return self.request(method='POST', data=rule_data)
-
-
-class ScheduledRuleChange(API):
-    """Update Balrog rules"""
-    url_template = '/scheduled_changes/rules'
-    prerequest_url_template = '/rules/%(rule_id)s'
-
-    def __init__(self, rule_id, **kwargs):
-        super(ScheduledRuleChange, self).__init__(**kwargs)
-        self.rule_id = rule_id
-        self.url_template_vars = dict(rule_id=rule_id)
-
-    def add_scheduled_rule_change(self, **rule_data):
-        """wrapper for self.request"""
-        return self.request(method='POST', data=rule_data)
deleted file mode 100644
deleted file mode 100644
--- a/lib/python/vendor/balrogclient-0.0.4/balrogclient/test/test_balrog_api.py
+++ /dev/null
@@ -1,30 +0,0 @@
-
-try:
-    # Python 2.6 backport with assertDictEqual()
-    import unittest2 as unittest
-except ImportError:
-    import unittest
-
-from balrogclient import is_csrf_token_expired
-
-
-class TestCsrfTokenExpiry(unittest.TestCase):
-    """
-    is_csrf_token_expired expects a token
-    of the form %Y%m%d%H%M%S##foo
-    """
-
-    def _generate_date_string(self, days_offset=0):
-        from datetime import datetime, timedelta
-        return (datetime.now() + timedelta(days=days_offset)).strftime('%Y%m%d%H%M%S')
-
-    def test_valid_csrf_token_has_not_expired(self):
-        tomorrow = self._generate_date_string(days_offset=1)
-        self.assertFalse(is_csrf_token_expired(tomorrow))
-
-    def test_valid_csrf_token_has_expired(self):
-        yesterday = self._generate_date_string(days_offset=-1)
-        self.assertTrue(is_csrf_token_expired(yesterday))
-
-    def test_invalid_csrf_token(self):
-        pass
deleted file mode 100644
--- a/lib/python/vendor/balrogclient-0.0.4/run-tests.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/bash
-
-IMAGE='balrogclient-test'
-
-docker build -t ${IMAGE} -f Dockerfile.dev .
-
-if [ -n "${NO_VOLUME_MOUNT}" ]; then
-    echo "Running tests without volume mount"
-    docker run --rm ${IMAGE} tox -c /app/tox.ini $@
-else
-    echo "Running tests with volume mount"
-    docker run --rm -v $(pwd):/app ${IMAGE} tox -c /app/tox.ini $@
-fi
-
deleted file mode 100644
--- a/lib/python/vendor/balrogclient-0.0.4/setup.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#! /usr/bin/env python
-
-from setuptools import setup, find_packages
-
-setup(
-    name="balrogclient",
-    version="0.0.4",
-    description="Balrog Admin API Client",
-    author="Release Engineers",
-    author_email="release@mozilla.com",
-
-    packages=['balrogclient'],
-
-    test_suite='balrogclient.test',
-    install_requires=[
-        'requests',
-    ],
-    include_package_data=True,
-)
deleted file mode 100644
--- a/lib/python/vendor/balrogclient-0.0.4/tox.ini
+++ /dev/null
@@ -1,50 +0,0 @@
-[tox]
-envlist = py27,py35
-
-[base]
-deps =
-    nose==1.3.0
-    rednose==0.4.1
-    coverage==3.6
-    pep8==1.4.3
-    jinja2==2.6
-    mock==1.0.1
-    requests==2.11.1
-
-[testenv]
-
-commands =
-    coverage erase
-    coverage run --branch --source {toxinidir}/balrogclient {envbindir}/nosetests -v --with-xunit --rednose --force-color {toxinidir}/balrogclient {posargs}
-
-
-[testenv:py27]
-basepython = python2.7
-deps =
-    {[base]deps}
-    
-
-[testenv:py35]
-basepython = python3.5
-deps =
-    {[base]deps}
-
-
-[testenv:py27-coveralls]
-basepython = python2.7
-deps=
-    python-coveralls==2.4.3
-commands=
-    coveralls
-
-[testenv:py35-coveralls]
-basepython = python3.5
-deps=
-    python-coveralls==2.4.3
-commands=
-    coveralls
-
-[pep8]
-max-line-length = 159
-exclude = vendor,.tox,
-
--- a/lib/python/vendorlibs.pth
+++ b/lib/python/vendorlibs.pth
@@ -5,13 +5,12 @@ vendor/requests-2.7.0/
 vendor/mozpoolclient-0.1.6/
 vendor/Jinja2-2.7.3/
 vendor/MarkupSafe-0.23/
 vendor/redo-1.4.1/
 vendor/jsonmerge-1.1.0/
 vendor/jsonschema-2.5.1/
 vendor/functools32-3.2.3-2/
 vendor/repoze.lru-ef418de/
-vendor/balrogclient-0.0.4/
 vendor/certifi-2016.9.26/
 vendor/arrow-0.10.0/
 vendor/python-dateutil-2.6.0/
 vendor/six-1.10.0/