Bug 1541332 - Remove unused setup feature for file records. r=glandium
authorEmilio Cobos Álvarez <emilio@crisal.io>
Wed, 03 Apr 2019 07:31:05 +0000
changeset 467739 3cbe8df933a271205e2de3a2f253db94e0d52773
parent 467738 3134740d831cc24b5b931a8512584100fcc10471
child 467740 2d7fcd115d9fd9e5cf19b40dcf634d885dc3d2df
push id35810
push useraciure@mozilla.com
push dateThu, 04 Apr 2019 04:33:36 +0000
treeherdermozilla-central@b72c02e34261 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersglandium
bugs1541332, 1426785
milestone68.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1541332 - Remove unused setup feature for file records. r=glandium Seems unused since bug 1426785, looks like. Differential Revision: https://phabricator.services.mozilla.com/D25904
python/mozbuild/mozbuild/action/tooltool.py
python/mozbuild/mozbuild/mach_commands.py
--- a/python/mozbuild/mozbuild/action/tooltool.py
+++ b/python/mozbuild/mozbuild/action/tooltool.py
@@ -75,30 +75,29 @@ class DigestMismatchException(ExceptionW
 
 class MissingFileException(ExceptionWithFilename):
     pass
 
 
 class FileRecord(object):
 
     def __init__(self, filename, size, digest, algorithm, unpack=False,
-                 version=None, visibility=None, setup=None):
+                 version=None, visibility=None):
         object.__init__(self)
         if '/' in filename or '\\' in filename:
             log.error(
                 "The filename provided contains path information and is, therefore, invalid.")
             raise BadFilenameException(filename=filename)
         self.filename = filename
         self.size = size
         self.digest = digest
         self.algorithm = algorithm
         self.unpack = unpack
         self.version = version
         self.visibility = visibility
-        self.setup = setup
 
     def __eq__(self, other):
         if self is other:
             return True
         if self.filename == other.filename and \
            self.size == other.size and \
            self.digest == other.digest and \
            self.algorithm == other.algorithm and \
@@ -180,18 +179,16 @@ class FileRecordJSONEncoder(json.JSONEnc
                 'digest': obj.digest,
             }
             if obj.unpack:
                 rv['unpack'] = True
             if obj.version:
                 rv['version'] = obj.version
             if obj.visibility is not None:
                 rv['visibility'] = obj.visibility
-            if obj.setup:
-                rv['setup'] = obj.setup
             return rv
 
     def default(self, f):
         if issubclass(type(f), list):
             record_list = []
             for i in f:
                 record_list.append(self.encode_file_record(i))
             return record_list
@@ -227,20 +224,19 @@ class FileRecordJSONDecoder(json.JSONDec
                 if req not in obj:
                     missing = True
                     break
 
             if not missing:
                 unpack = obj.get('unpack', False)
                 version = obj.get('version', None)
                 visibility = obj.get('visibility', None)
-                setup = obj.get('setup')
                 rv = FileRecord(
                     obj['filename'], obj['size'], obj['digest'], obj['algorithm'],
-                    unpack, version, visibility, setup)
+                    unpack, version, visibility)
                 log.debug("materialized %s" % rv)
                 return rv
         return obj
 
     def decode(self, s):
         decoded = json.JSONDecoder.decode(self, s)
         rv = self.process_file_records(decoded)
         return rv
@@ -534,17 +530,17 @@ def _cache_checksum_matches(base_file, c
         return False
 
 
 def _compute_cache_checksum(filename):
     with open(filename, "rb") as f:
         return digest_file(f, "sha256")
 
 
-def unpack_file(filename, setup=None):
+def unpack_file(filename):
     """Untar `filename`, assuming it is uncompressed or compressed with bzip2,
     xz, gzip, or unzip a zip file. The file is assumed to contain a single
     directory with a name matching the base of the given filename.
     Xz support is handled by shelling out to 'tar'."""
 
     checksum = _compute_cache_checksum(filename)
 
     if tarfile.is_tarfile(filename):
@@ -576,18 +572,16 @@ def unpack_file(filename, setup=None):
         z.close()
     else:
         log.error("Unknown archive extension for filename '%s'" % filename)
         return False
 
     with open(base_file + CHECKSUM_SUFFIX, "wb") as f:
         f.write(checksum)
 
-    if setup and not execute(os.path.join(base_file, setup)):
-        return False
     return True
 
 
 def fetch_files(manifest_file, base_urls, filenames=[], cache_folder=None,
                 auth_file=None, region=None):
     # Lets load the manifest file
     try:
         manifest = open_manifest(manifest_file)
@@ -605,19 +599,16 @@ def fetch_files(manifest_file, base_urls
     # We want to track files that fail to be fetched as well as
     # files that are fetched
     failed_files = []
     fetched_files = []
 
     # Files that we want to unpack.
     unpack_files = []
 
-    # Setup for unpacked files.
-    setup_files = {}
-
     # Lets go through the manifest and fetch the files that we want
     for f in manifest.file_records:
         # case 1: files are already present
         if f.present():
             if f.validate():
                 present_files.append(f.filename)
                 if f.unpack:
                     unpack_files.append(f.filename)
@@ -667,23 +658,16 @@ def fetch_files(manifest_file, base_urls
             temp_file_name = fetch_file(base_urls, f, auth_file=auth_file, region=region)
             if temp_file_name:
                 fetched_files.append((f, temp_file_name))
             else:
                 failed_files.append(f.filename)
         else:
             log.debug("skipping %s" % f.filename)
 
-        if f.setup:
-            if f.unpack:
-                setup_files[f.filename] = f.setup
-            else:
-                log.error("'setup' requires 'unpack' being set for %s" % f.filename)
-                failed_files.append(f.filename)
-
     # lets ensure that fetched files match what the manifest specified
     for localfile, temp_file_name in fetched_files:
         # since I downloaded to a temp file, I need to perform all validations on the temp file
         # this is why filerecord_for_validation is created
 
         filerecord_for_validation = FileRecord(
             temp_file_name, localfile.size, localfile.digest, localfile.algorithm)
 
@@ -716,17 +700,17 @@ def fetch_files(manifest_file, base_urls
                                 (localfile.filename, cache_folder), exc_info=True)
         else:
             failed_files.append(localfile.filename)
             log.error("'%s'" % filerecord_for_validation.describe())
             os.remove(temp_file_name)
 
     # Unpack files that need to be unpacked.
     for filename in unpack_files:
-        if not unpack_file(filename, setup_files.get(filename)):
+        if not unpack_file(filename):
             failed_files.append(filename)
 
     # If we failed to fetch or validate a file, we need to fail
     if len(failed_files) > 0:
         log.error("The following files failed: '%s'" %
                   "', ".join(failed_files))
         return False
     return True
--- a/python/mozbuild/mozbuild/mach_commands.py
+++ b/python/mozbuild/mozbuild/mach_commands.py
@@ -1426,18 +1426,17 @@ class PackageFrontend(MachCommandBase):
         if tooltool_manifest:
             manifest = open_manifest(tooltool_manifest)
             for record in manifest.file_records:
                 url = '{}/{}/{}'.format(tooltool_url, record.algorithm,
                                         record.digest)
                 records[record.filename] = DownloadRecord(
                     url, record.filename, record.size, record.digest,
                     record.algorithm, unpack=record.unpack,
-                    version=record.version, visibility=record.visibility,
-                    setup=record.setup)
+                    version=record.version, visibility=record.visibility)
 
         if from_build:
             if 'MOZ_AUTOMATION' in os.environ:
                 self.log(logging.ERROR, 'artifact', {},
                          'Do not use --from-build in automation; all dependencies '
                          'should be determined in the decision task.')
                 return 1
             from taskgraph.optimize import IndexSearch
@@ -1572,17 +1571,17 @@ class PackageFrontend(MachCommandBase):
                         data = fh.read(1024 * 1024)
                         if not data:
                             break
                         h.update(data)
                 artifacts[record.url] = {
                     'sha256': h.hexdigest(),
                 }
             if record.unpack and not no_unpack:
-                unpack_file(local, record.setup)
+                unpack_file(local)
                 os.unlink(local)
 
         if not downloaded:
             self.log(logging.ERROR, 'artifact', {}, 'Nothing to download')
             if files:
                 return 1
 
         if artifacts: