☠☠ backed out by 02bd7db82457 ☠ ☠ | |
author | Chris Manchester <cmanchester@mozilla.com> |
Thu, 01 Dec 2016 14:28:55 -0800 | |
changeset 325149 | 09e996bdc9e4b356a673b5737c71e2793646d5e7 |
parent 325148 | bbb8642886b817f5cdd643368cde17469e53124c |
child 325150 | 02bd7db82457bef18f5d03d7544b6225655ba2e1 |
push id | 84610 |
push user | philringnalda@gmail.com |
push date | Sat, 03 Dec 2016 06:28:13 +0000 |
treeherder | mozilla-inbound@1b2237e0b5e0 [default view] [failures only] |
perfherder | [talos] [build metrics] [platform microbench] (compared to previous push) |
reviewers | gps |
bugs | 1308982 |
milestone | 53.0a1 |
first release with | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
last release without | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
--- a/python/mozbuild/mozbuild/config_status.py +++ b/python/mozbuild/mozbuild/config_status.py @@ -149,16 +149,19 @@ def config_status(topobjdir='.', topsrcd for the_backend in selected_backends: the_backend.consume(definitions) execution_time = 0.0 for obj in chain((reader, emitter), selected_backends): summary = obj.summary() print(summary, file=sys.stderr) execution_time += summary.execution_time + if hasattr(obj, 'gyp_summary'): + summary = obj.gyp_summary() + print(summary, file=sys.stderr) cpu_time = time.clock() - cpu_start wall_time = time.time() - time_start efficiency = cpu_time / wall_time if wall_time else 100 untracked = wall_time - execution_time print( 'Total wall time: {:.2f}s; CPU time: {:.2f}s; Efficiency: '
--- a/python/mozbuild/mozbuild/frontend/gyp_reader.py +++ b/python/mozbuild/mozbuild/frontend/gyp_reader.py @@ -3,16 +3,17 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. from __future__ import absolute_import, unicode_literals import gyp import gyp.msvs_emulation import sys import os +import time import types import mozpack.path as mozpath from mozpack.files import FileFinder from .sandbox import alphabetical_sorted from .context import ( ObjDirPath, SourcePath, TemplateContext, @@ -104,63 +105,21 @@ def handle_copies(copies, context): raise NotImplementedError('GYP copies to somewhere other than <(PRODUCT_DIR)/dist not supported: %s' % dest) dest_paths = dest[len(dist):].split('/') exports = context['EXPORTS'] while dest_paths: exports = getattr(exports, dest_paths.pop(0)) exports += sorted(copy['files'], key=lambda x: x.lower()) -def read_from_gyp(config, path, output, vars, no_chromium, no_unified, action_overrides, non_unified_sources = set()): - """Read a gyp configuration and emits GypContexts for the backend to - process. - - config is a ConfigEnvironment, path is the path to a root gyp configuration - file, output is the base path under which the objdir for the various gyp - dependencies will be, and vars a dict of variables to pass to the gyp - processor. - """ - - is_win = config.substs['OS_TARGET'] == 'WINNT' - is_msvc = bool(config.substs['_MSC_VER']) - # gyp expects plain str instead of unicode. The frontend code gives us - # unicode strings, so convert them. - path = encode(path) - str_vars = dict((name, encode(value)) for name, value in vars.items()) - if is_msvc: - # This isn't actually used anywhere in this generator, but it's needed - # to override the registry detection of VC++ in gyp. - os.environ['GYP_MSVS_OVERRIDE_PATH'] = 'fake_path' - os.environ['GYP_MSVS_VERSION'] = config.substs['MSVS_VERSION'] - - params = { - b'parallel': False, - b'generator_flags': {}, - b'build_files': [path], - b'root_targets': None, - } - - if no_chromium: - includes = [] - depth = mozpath.dirname(path) - else: - depth = chrome_src - # Files that gyp_chromium always includes - includes = [encode(mozpath.join(script_dir, 'common.gypi'))] - finder = FileFinder(chrome_src, find_executables=False) - includes.extend(encode(mozpath.join(chrome_src, name)) - for name, _ in finder.find('*/supplement.gypi')) - - # Read the given gyp file and its dependencies. - generator, flat_list, targets, data = \ - gyp.Load([path], format=b'mozbuild', - default_variables=str_vars, - includes=includes, - depth=encode(depth), - params=params) +def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output, + non_unified_sources, action_overrides): + flat_list, targets, data = gyp_result + no_chromium = gyp_dir_attrs.no_chromium + no_unified = gyp_dir_attrs.no_unified # Process all targets from the given gyp files and its dependencies. # The path given to AllTargets needs to use os.sep, while the frontend code # gives us paths normalized with forward slash separator. for target in gyp.common.AllTargets(flat_list, targets, path.replace(b'/', os.sep)): build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(target) # Each target is given its own objdir. The base of that objdir @@ -277,17 +236,17 @@ def read_from_gyp(config, path, output, if ext == '.s': use_defines_in_asflags = True # The context expects alphabetical order when adding sources context['SOURCES'] = alphabetical_sorted(sources) context['UNIFIED_SOURCES'] = alphabetical_sorted(unified_sources) defines = target_conf.get('defines', []) - if is_msvc and no_chromium: + if bool(config.substs['_MSC_VER']) and no_chromium: msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, {}) defines.extend(msvs_settings.GetComputedDefines(c)) for define in defines: if '=' in define: name, value = define.split('=', 1) context['DEFINES'][name] = value else: context['DEFINES'][define] = True @@ -357,14 +316,96 @@ def read_from_gyp(config, path, output, # Add some features to all contexts. Put here in case LOCAL_INCLUDES # order matters. context['LOCAL_INCLUDES'] += [ '!/ipc/ipdl/_ipdlheaders', '/ipc/chromium/src', '/ipc/glue', ] # These get set via VC project file settings for normal GYP builds. - if is_win: + if config.substs['OS_TARGET'] == 'WINNT': context['DEFINES']['UNICODE'] = True context['DEFINES']['_UNICODE'] = True context['DISABLE_STL_WRAPPING'] = True + context.update(gyp_dir_attrs.sandbox_vars) yield context + + +# A version of gyp.Load that doesn't return the generator (because module objects +# aren't Pickle-able, and we don't use it anyway). +def load_gyp(*args): + _, flat_list, targets, data = gyp.Load(*args) + return flat_list, targets, data + + +class GypProcessor(object): + """Reads a gyp configuration in the background using the given executor and + emits GypContexts for the backend to process. + + config is a ConfigEnvironment, path is the path to a root gyp configuration + file, and output is the base path under which the objdir for the various + gyp dependencies will be. gyp_dir_attrs are attributes set for the dir + from moz.build. + """ + def __init__(self, config, gyp_dir_attrs, path, output, executor, + action_overrides, non_unified_sources): + self._path = path + self._config = config + self._output = output + self._non_unified_sources = non_unified_sources + self._gyp_dir_attrs = gyp_dir_attrs + self._action_overrides = action_overrides + self.execution_time = 0.0 + self._results = [] + + # gyp expects plain str instead of unicode. The frontend code gives us + # unicode strings, so convert them. + path = encode(path) + if bool(config.substs['_MSC_VER']): + # This isn't actually used anywhere in this generator, but it's needed + # to override the registry detection of VC++ in gyp. + os.environ['GYP_MSVS_OVERRIDE_PATH'] = 'fake_path' + os.environ['GYP_MSVS_VERSION'] = config.substs['MSVS_VERSION'] + + params = { + b'parallel': False, + b'generator_flags': {}, + b'build_files': [path], + b'root_targets': None, + } + + if gyp_dir_attrs.no_chromium: + includes = [] + depth = mozpath.dirname(path) + else: + depth = chrome_src + # Files that gyp_chromium always includes + includes = [encode(mozpath.join(script_dir, 'common.gypi'))] + finder = FileFinder(chrome_src, find_executables=False) + includes.extend(encode(mozpath.join(chrome_src, name)) + for name, _ in finder.find('*/supplement.gypi')) + + str_vars = dict((name, encode(value)) for name, value in + gyp_dir_attrs.variables.items()) + self._gyp_loader_future = executor.submit(load_gyp, [path], b'mozbuild', + str_vars, includes, + encode(depth), params) + + @property + def results(self): + if self._results: + for res in self._results: + yield res + else: + # We report our execution time as the time spent blocked in a call + # to `result`, which is the only case a gyp processor will + # contribute significantly to total wall time. + t0 = time.time() + flat_list, targets, data = self._gyp_loader_future.result() + self.execution_time += time.time() - t0 + results = [] + for res in process_gyp_result((flat_list, targets, data), self._gyp_dir_attrs, + self._path, self._config, self._output, + self._non_unified_sources, self._action_overrides): + results.append(res) + yield res + self._results = results
--- a/python/mozbuild/mozbuild/frontend/reader.py +++ b/python/mozbuild/mozbuild/frontend/reader.py @@ -28,16 +28,17 @@ import time import traceback import types from collections import ( defaultdict, OrderedDict, ) from io import StringIO +from multiprocessing import cpu_count from mozbuild.util import ( EmptyValue, HierarchicalStringList, memoize, ReadOnlyDefaultDict, ) @@ -75,16 +76,18 @@ from .context import ( SourcePath, SPECIAL_VARIABLES, SUBCONTEXTS, SubContext, TemplateContext, ) from mozbuild.base import ExecutionSummary +from concurrent.futures.process import ProcessPoolExecutor + if sys.version_info.major == 2: text_type = unicode type_type = types.TypeType else: text_type = str type_type = type @@ -874,37 +877,58 @@ class BuildReader(object): def __init__(self, config, finder=default_finder): self.config = config self._log = logging.getLogger(__name__) self._read_files = set() self._execution_stack = [] self._finder = finder + max_workers = cpu_count() + self._gyp_worker_pool = ProcessPoolExecutor(max_workers=max_workers) + self._gyp_processors = [] self._execution_time = 0.0 self._file_count = 0 + self._gyp_execution_time = 0.0 + self._gyp_file_count = 0 def summary(self): return ExecutionSummary( 'Finished reading {file_count:d} moz.build files in ' '{execution_time:.2f}s', file_count=self._file_count, execution_time=self._execution_time) + def gyp_summary(self): + return ExecutionSummary( + 'Read {file_count:d} gyp files in parallel contributing ' + '{execution_time:.2f}s to total wall time', + file_count=self._gyp_file_count, + execution_time=self._gyp_execution_time) + def read_topsrcdir(self): """Read the tree of linked moz.build files. This starts with the tree's top-most moz.build file and descends into all linked moz.build files until all relevant files have been evaluated. This is a generator of Context instances. As each moz.build file is read, a new Context is created and emitted. """ path = mozpath.join(self.config.topsrcdir, 'moz.build') - return self.read_mozbuild(path, self.config) + for r in self.read_mozbuild(path, self.config): + yield r + all_gyp_paths = set() + for g in self._gyp_processors: + for gyp_context in g.results: + all_gyp_paths |= gyp_context.all_paths + yield gyp_context + self._gyp_execution_time += g.execution_time + self._gyp_file_count += len(all_gyp_paths) + self._gyp_worker_pool.shutdown() def all_mozbuild_paths(self): """Iterator over all available moz.build files. This method has little to do with the reader. It should arguably belong elsewhere. """ # In the future, we may traverse moz.build files by looking @@ -1131,57 +1155,49 @@ class BuildReader(object): # processing is performed. yield context # We need the list of directories pre-gyp processing for later. dirs = list(context.get('DIRS', [])) curdir = mozpath.dirname(path) - gyp_contexts = [] for target_dir in context.get('GYP_DIRS', []): gyp_dir = context['GYP_DIRS'][target_dir] for v in ('input', 'variables'): if not getattr(gyp_dir, v): raise SandboxValidationError('Missing value for ' 'GYP_DIRS["%s"].%s' % (target_dir, v), context) # The make backend assumes contexts for sub-directories are # emitted after their parent, so accumulate the gyp contexts. # We could emit the parent context before processing gyp # configuration, but we need to add the gyp objdirs to that context # first. - from .gyp_reader import read_from_gyp + from .gyp_reader import GypProcessor non_unified_sources = set() for s in gyp_dir.non_unified_sources: source = SourcePath(context, s) if not self._finder.get(source.full_path): raise SandboxValidationError('Cannot find %s.' % source, context) non_unified_sources.add(source) action_overrides = {} for action, script in gyp_dir.action_overrides.iteritems(): action_overrides[action] = SourcePath(context, script) - time_start = time.time() - for gyp_context in read_from_gyp(context.config, - mozpath.join(curdir, gyp_dir.input), - mozpath.join(context.objdir, - target_dir), - gyp_dir.variables, - gyp_dir.no_chromium, - gyp_dir.no_unified, - action_overrides, - non_unified_sources = non_unified_sources): - gyp_context.update(gyp_dir.sandbox_vars) - gyp_contexts.append(gyp_context) - self._file_count += len(gyp_context.all_paths) - self._execution_time += time.time() - time_start - for gyp_context in gyp_contexts: - sandbox.subcontexts.append(gyp_context) + gyp_processor = GypProcessor(context.config, + gyp_dir, + mozpath.join(curdir, gyp_dir.input), + mozpath.join(context.objdir, + target_dir), + self._gyp_worker_pool, + action_overrides, + non_unified_sources) + self._gyp_processors.append(gyp_processor) for subcontext in sandbox.subcontexts: yield subcontext # Traverse into referenced files. # It's very tempting to use a set here. Unfortunately, the recursive # make backend needs order preserved. Once we autogenerate all backend