Bug 1207893 - Refactor how build backend execution is summarized. r=gps
authorMike Hommey <mh+mozilla@glandium.org>
Tue, 15 Sep 2015 12:00:12 +0900
changeset 264845 af6ea1464daa5bed510b6ce8f319a389fb590a6e
parent 264844 66e58f784dbc4e2cb09e00544c2dc25f3432293d
child 264846 75c3e053bcfa70f2cf6dd0df22458d916c41fd6b
push id65759
push usermh@glandium.org
push dateTue, 29 Sep 2015 00:31:43 +0000
treeherdermozilla-inbound@6c9db53f98d5 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersgps
bugs1207893
milestone44.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1207893 - Refactor how build backend execution is summarized. r=gps - Make all backends report the time spent in their own execution - Change how the data is collected for the reader and emitter such that each of them is aware of its own data, instead of everything being tracked by the backend. This is meant to open the door to multiple backends running from the same execution of config.status.
python/mozbuild/mozbuild/backend/android_eclipse.py
python/mozbuild/mozbuild/backend/base.py
python/mozbuild/mozbuild/backend/cpp_eclipse.py
python/mozbuild/mozbuild/backend/recursivemake.py
python/mozbuild/mozbuild/backend/visualstudio.py
python/mozbuild/mozbuild/base.py
python/mozbuild/mozbuild/config_status.py
python/mozbuild/mozbuild/frontend/context.py
python/mozbuild/mozbuild/frontend/data.py
python/mozbuild/mozbuild/frontend/emitter.py
python/mozbuild/mozbuild/frontend/gyp_reader.py
python/mozbuild/mozbuild/frontend/reader.py
python/mozbuild/mozbuild/test/frontend/test_emitter.py
--- a/python/mozbuild/mozbuild/backend/android_eclipse.py
+++ b/python/mozbuild/mozbuild/backend/android_eclipse.py
@@ -19,46 +19,42 @@ import mozpack.path as mozpath
 from .common import CommonBackend
 from ..frontend.data import (
     AndroidEclipseProjectData,
     ContextDerived,
     ContextWrapped,
 )
 from ..makeutil import Makefile
 from ..util import ensureParentDir
+from mozbuild.base import ExecutionSummary
 
 
 def pretty_print(element):
     """Return a pretty-printed XML string for an Element.
     """
     s = ET.tostring(element, 'utf-8')
     # minidom wraps element in a Document node; firstChild strips it.
     return minidom.parseString(s).firstChild.toprettyxml(indent='  ')
 
 
 class AndroidEclipseBackend(CommonBackend):
     """Backend that generates Android Eclipse project files.
     """
 
-    def _init(self):
-        CommonBackend._init(self)
-
-        def detailed(summary):
-            s = 'Wrote {:d} Android Eclipse projects to {:s}; ' \
-                '{:d} created; {:d} updated'.format(
-                summary.created_count + summary.updated_count,
-                mozpath.join(self.environment.topobjdir, 'android_eclipse'),
-                summary.created_count,
-                summary.updated_count)
-
-            return s
-
-        # This is a little kludgy and could be improved with a better API.
-        self.summary.backend_detailed_summary = types.MethodType(detailed,
-            self.summary)
+    def summary(self):
+        return ExecutionSummary(
+            'AndroidEclipse backend executed in {execution_time:.2f}s\n'
+            'Wrote {projects:d} Android Eclipse projects to {path:s}; '
+            '{created:d} created; {updated:d} updated',
+            execution_time=self._execution_time,
+            projects=self._created_count + self._updated_count,
+            path=mozpath.join(self.environment.topobjdir, 'android_eclipse'),
+            created=self._created_count,
+            updated=self._updated_count,
+        )
 
     def consume_object(self, obj):
         """Write out Android Eclipse project files."""
 
         if not isinstance(obj, ContextDerived):
             return
 
         CommonBackend.consume_object(self, obj)
@@ -252,12 +248,12 @@ class AndroidEclipseBackend(CommonBacken
                 mozpath.join(finder.base, input_filename),
                 depfile_path=None,
                 marker='#',
                 defines=defines,
                 extra_depends={mozpath.join(finder.base, input_filename)}))
 
         # When we re-create the build backend, we kill everything that was there.
         if os.path.isdir(project_directory):
-            self.summary.updated_count += 1
+            self._updated_count += 1
         else:
-            self.summary.created_count += 1
+            self._created_count += 1
         copier.copy(project_directory, skip_if_older=False, remove_unaccounted=True)
--- a/python/mozbuild/mozbuild/backend/base.py
+++ b/python/mozbuild/mozbuild/backend/base.py
@@ -16,112 +16,19 @@ import time
 from contextlib import contextmanager
 
 from mach.mixin.logging import LoggingMixin
 
 import mozpack.path as mozpath
 from ..preprocessor import Preprocessor
 from ..pythonutil import iter_modules_in_path
 from ..util import FileAvoidWrite
-from ..frontend.data import (
-    ContextDerived,
-    ReaderSummary,
-)
+from ..frontend.data import ContextDerived
 from .configenvironment import ConfigEnvironment
-
-
-class BackendConsumeSummary(object):
-    """Holds state about what a backend did.
-
-    This is used primarily to print a summary of what the backend did
-    so people know what's going on.
-    """
-    def __init__(self):
-        # How many moz.build files were read. This includes included files.
-        self.mozbuild_count = 0
-
-        # The number of derived objects from the read moz.build files.
-        self.object_count = 0
-
-        # The number of backend files created.
-        self.created_count = 0
-
-        # The number of backend files updated.
-        self.updated_count = 0
-
-        # The number of unchanged backend files.
-        self.unchanged_count = 0
-
-        # The number of deleted backend files.
-        self.deleted_count = 0
-
-        # The total wall time this backend spent consuming objects. If
-        # the iterable passed into consume() is a generator, this includes the
-        # time spent to read moz.build files.
-        self.wall_time = 0.0
-
-        # CPU time spent by during the interval captured by wall_time.
-        self.cpu_time = 0.0
-
-        # The total wall time spent executing moz.build files. This is just
-        # the read and execute time. It does not cover consume time.
-        self.mozbuild_execution_time = 0.0
-
-        # The total wall time spent emitting objects from sandboxes.
-        self.emitter_execution_time = 0.0
-
-        # The total wall time spent in the backend. This counts the time the
-        # backend writes out files, etc.
-        self.backend_execution_time = 0.0
-
-        # How much wall time the system spent doing other things. This is
-        # wall_time - mozbuild_execution_time - emitter_execution_time -
-        # backend_execution_time.
-        self.other_time = 0.0
-
-        # Mapping of changed file paths to diffs of the changes.
-        self.file_diffs = {}
-
-    @property
-    def reader_summary(self):
-        return 'Finished reading {:d} moz.build files in {:.2f}s'.format(
-            self.mozbuild_count,
-            self.mozbuild_execution_time)
-
-    @property
-    def emitter_summary(self):
-        return 'Processed into {:d} build config descriptors in {:.2f}s'.format(
-            self.object_count, self.emitter_execution_time)
-
-    @property
-    def backend_summary(self):
-        return 'Backend executed in {:.2f}s'.format(self.backend_execution_time)
-
-    def backend_detailed_summary(self):
-        """Backend summary to be supplied by BuildBackend implementations."""
-        return None
-
-    @property
-    def total_summary(self):
-        efficiency_value = self.cpu_time / self.wall_time if self.wall_time else 100
-        return 'Total wall time: {:.2f}s; CPU time: {:.2f}s; Efficiency: ' \
-            '{:.0%}; Untracked: {:.2f}s'.format(
-                self.wall_time, self.cpu_time, efficiency_value,
-                self.other_time)
-
-    def summaries(self):
-        yield self.reader_summary
-        yield self.emitter_summary
-        yield self.backend_summary
-
-        detailed = self.backend_detailed_summary()
-        if detailed:
-            yield detailed
-
-        yield self.total_summary
+from mozbuild.base import ExecutionSummary
 
 
 class BuildBackend(LoggingMixin):
     """Abstract base class for build backends.
 
     A build backend is merely a consumer of the build configuration (the output
     of the frontend processing). It does something with said data. What exactly
     is the discretion of the specific implementation.
@@ -130,17 +37,16 @@ class BuildBackend(LoggingMixin):
     __metaclass__ = ABCMeta
 
     def __init__(self, environment):
         assert isinstance(environment, ConfigEnvironment)
 
         self.populate_logger()
 
         self.environment = environment
-        self.summary = BackendConsumeSummary()
 
         # Files whose modification should cause a new read and backend
         # generation.
         self.backend_input_files = set()
 
         # Files generated by the backend.
         self._backend_output_files = set()
 
@@ -150,112 +56,128 @@ class BuildBackend(LoggingMixin):
         self._backend_output_list = set()
         if os.path.exists(self._backend_output_list_file):
             l = open(self._backend_output_list_file).read().split('\n')
             self._backend_output_list.update(mozpath.normsep(p) for p in l)
 
         self._environments = {}
         self._environments[environment.topobjdir] = environment
 
+        # The number of backend files created.
+        self._created_count = 0
+
+        # The number of backend files updated.
+        self._updated_count = 0
+
+        # The number of unchanged backend files.
+        self._unchanged_count = 0
+
+        # The number of deleted backend files.
+        self._deleted_count = 0
+
+        # The total wall time spent in the backend. This counts the time the
+        # backend writes out files, etc.
+        self._execution_time = 0.0
+
+        # Mapping of changed file paths to diffs of the changes.
+        self.file_diffs = {}
+
         self._init()
 
+    def summary(self):
+        return ExecutionSummary(
+            self.__class__.__name__.replace('Backend', '') +
+            ' backend executed in {execution_time:.2f}s\n  '
+            '{total:d} total backend files; '
+            '{created:d} created; '
+            '{updated:d} updated; '
+            '{unchanged:d} unchanged; '
+            '{deleted:d} deleted',
+            execution_time=self._execution_time,
+            total=self._created_count + self._updated_count +
+            self._unchanged_count,
+            created=self._created_count,
+            updated=self._updated_count,
+            unchanged=self._unchanged_count,
+            deleted=self._deleted_count)
+
     def _init():
         """Hook point for child classes to perform actions during __init__.
 
         This exists so child classes don't need to implement __init__.
         """
 
     def consume(self, objs):
         """Consume a stream of TreeMetadata instances.
 
         This is the main method of the interface. This is what takes the
         frontend output and does something with it.
 
         Child classes are not expected to implement this method. Instead, the
         base class consumes objects and calls methods (possibly) implemented by
         child classes.
         """
-        cpu_start = time.clock()
-        time_start = time.time()
-        backend_time = 0.0
-
         for obj in objs:
-            self.summary.object_count += 1
             obj_start = time.time()
             self.consume_object(obj)
-            backend_time += time.time() - obj_start
+            self._execution_time += time.time() - obj_start
 
             if isinstance(obj, ContextDerived):
                 self.backend_input_files |= obj.context_all_paths
 
-            if isinstance(obj, ReaderSummary):
-                self.summary.mozbuild_count = obj.total_file_count
-                self.summary.mozbuild_execution_time = obj.total_sandbox_execution_time
-                self.summary.emitter_execution_time = obj.total_emitter_execution_time
-
         # Pull in all loaded Python as dependencies so any Python changes that
         # could influence our output result in a rescan.
         self.backend_input_files |= set(iter_modules_in_path(
             self.environment.topsrcdir, self.environment.topobjdir))
 
         finished_start = time.time()
         self.consume_finished()
-        backend_time += time.time() - finished_start
+        self._execution_time += time.time() - finished_start
 
         # Purge backend files created in previous run, but not created anymore
         delete_files = self._backend_output_list - self._backend_output_files
         for path in delete_files:
             try:
                 os.unlink(mozpath.join(self.environment.topobjdir, path))
-                self.summary.deleted_count += 1
+                self._deleted_count += 1
             except OSError:
                 pass
         # Remove now empty directories
         for dir in set(mozpath.dirname(d) for d in delete_files):
             try:
                 os.removedirs(dir)
             except OSError:
                 pass
 
         # Write out the list of backend files generated, if it changed.
-        if self.summary.deleted_count or self.summary.created_count or \
+        if self._deleted_count or self._created_count or \
                 not os.path.exists(self._backend_output_list_file):
             with open(self._backend_output_list_file, 'w') as fh:
                 fh.write('\n'.join(sorted(self._backend_output_files)))
-        elif self.summary.updated_count:
+        elif self._updated_count:
             with open(self._backend_output_list_file, 'a'):
                 os.utime(self._backend_output_list_file, None)
 
-        self.summary.cpu_time = time.clock() - cpu_start
-        self.summary.wall_time = time.time() - time_start
-        self.summary.backend_execution_time = backend_time
-        self.summary.other_time = self.summary.wall_time - \
-            self.summary.mozbuild_execution_time - \
-            self.summary.emitter_execution_time - \
-            self.summary.backend_execution_time
-
-        return self.summary
-
     @abstractmethod
     def consume_object(self, obj):
         """Consumes an individual TreeMetadata instance.
 
         This is the main method used by child classes to react to build
         metadata.
         """
 
     def consume_finished(self):
         """Called when consume() has completed handling all objects."""
 
     @contextmanager
     def _write_file(self, path=None, fh=None):
         """Context manager to write a file.
 
         This is a glorified wrapper around FileAvoidWrite with integration to
-        update the BackendConsumeSummary on this instance.
+        update the summary data on this instance.
 
         Example usage:
 
             with self._write_file('foo.txt') as fh:
                 fh.write('hello world')
         """
 
         if path is not None:
@@ -271,23 +193,23 @@ class BuildBackend(LoggingMixin):
             if error.errno != errno.EEXIST:
                 raise
 
         yield fh
 
         self._backend_output_files.add(mozpath.relpath(fh.name, self.environment.topobjdir))
         existed, updated = fh.close()
         if not existed:
-            self.summary.created_count += 1
+            self._created_count += 1
         elif updated:
-            self.summary.updated_count += 1
+            self._updated_count += 1
             if fh.diff:
-                self.summary.file_diffs[fh.name] = fh.diff
+                self.file_diffs[fh.name] = fh.diff
         else:
-            self.summary.unchanged_count += 1
+            self._unchanged_count += 1
 
     @contextmanager
     def _get_preprocessor(self, obj):
         '''Returns a preprocessor with a few predefined values depending on
         the given BaseConfigSubstitution(-like) object, and all the substs
         in the current environment.'''
         pp = Preprocessor()
         srcdir = mozpath.dirname(obj.input_path)
--- a/python/mozbuild/mozbuild/backend/cpp_eclipse.py
+++ b/python/mozbuild/mozbuild/backend/cpp_eclipse.py
@@ -10,16 +10,17 @@ import os
 import subprocess
 import types
 import xml.etree.ElementTree as ET
 from .common import CommonBackend
 
 from ..frontend.data import (
     Defines,
 )
+from mozbuild.base import ExecutionSummary
 
 # TODO Have ./mach eclipse generate the workspace and index it:
 # /Users/bgirard/mozilla/eclipse/eclipse/eclipse/eclipse -application org.eclipse.cdt.managedbuilder.core.headlessbuild -data $PWD/workspace -importAll $PWD/eclipse
 # Open eclipse:
 # /Users/bgirard/mozilla/eclipse/eclipse/eclipse/eclipse -data $PWD/workspace
 
 class CppEclipseBackend(CommonBackend):
     """Backend that generates Cpp Eclipse project files.
@@ -36,25 +37,25 @@ class CppEclipseBackend(CommonBackend):
 
         self._macbundle = self.environment.substs['MOZ_MACBUNDLE_NAME']
         self._appname = self.environment.substs['MOZ_APP_NAME']
         self._bin_suffix = self.environment.substs['BIN_SUFFIX']
         self._cxx = self.environment.substs['CXX']
         # Note: We need the C Pre Processor (CPP) flags, not the CXX flags
         self._cppflags = self.environment.substs.get('CPPFLAGS', '')
 
-        def detailed(summary):
-            return ('Generated Cpp Eclipse workspace in "%s".\n' + \
-                   'If missing, import the project using File > Import > General > Existing Project into workspace\n' + \
-                   '\n' + \
-                   'Run with: eclipse -data %s\n') \
-                   % (self._workspace_dir, self._workspace_dir)
-
-        self.summary.backend_detailed_summary = types.MethodType(detailed,
-            self.summary)
+    def summary(self):
+        return ExecutionSummary(
+            'CppEclipse backend executed in {execution_time:.2f}s\n'
+            'Generated Cpp Eclipse workspace in "{workspace:s}".\n'
+            'If missing, import the project using File > Import > General > Existing Project into workspace\n'
+            '\n'
+            'Run with: eclipse -data {workspace:s}\n',
+            execution_time=self._execution_time,
+            workspace=self._workspace_dir)
 
     def _get_workspace_path(self):
         return CppEclipseBackend.get_workspace_path(self.environment.topsrcdir, self.environment.topobjdir)
 
     @staticmethod
     def get_workspace_path(topsrcdir, topobjdir):
         # Eclipse doesn't support having the workspace inside the srcdir.
         # Since most people have their objdir inside their srcdir it's easier
--- a/python/mozbuild/mozbuild/backend/recursivemake.py
+++ b/python/mozbuild/mozbuild/backend/recursivemake.py
@@ -371,36 +371,18 @@ class RecursiveMakeBackend(CommonBackend
     """
 
     def _init(self):
         CommonBackend._init(self)
 
         self._backend_files = {}
         self._idl_dirs = set()
 
-        def detailed(summary):
-            s = '{:d} total backend files; ' \
-                '{:d} created; {:d} updated; {:d} unchanged; ' \
-                '{:d} deleted; {:d} -> {:d} Makefile'.format(
-                summary.created_count + summary.updated_count +
-                summary.unchanged_count,
-                summary.created_count,
-                summary.updated_count,
-                summary.unchanged_count,
-                summary.deleted_count,
-                summary.makefile_in_count,
-                summary.makefile_out_count)
-
-            return s
-
-        # This is a little kludgy and could be improved with a better API.
-        self.summary.backend_detailed_summary = types.MethodType(detailed,
-            self.summary)
-        self.summary.makefile_in_count = 0
-        self.summary.makefile_out_count = 0
+        self._makefile_in_count = 0
+        self._makefile_out_count = 0
 
         self._test_manifests = {}
 
         self.backend_input_files.add(mozpath.join(self.environment.topobjdir,
             'config', 'autoconf.mk'))
 
         self._install_manifests = {
             k: InstallManifest() for k in [
@@ -423,16 +405,23 @@ class RecursiveMakeBackend(CommonBackend
             'export': set(),
             'libs': set(),
         }
         self._no_skip = {
             'misc': set(),
             'tools': set(),
         }
 
+    def summary(self):
+        summary = super(RecursiveMakeBackend, self).summary()
+        summary.extend('; {makefile_in:d} -> {makefile_out:d} Makefile',
+                       makefile_in=self._makefile_in_count,
+                       makefile_out=self._makefile_out_count)
+        return summary
+
     def _get_backend_file_for(self, obj):
         if obj.objdir not in self._backend_files:
             self._backend_files[obj.objdir] = \
                 BackendMakeFile(obj.srcdir, obj.objdir, obj.config,
                     obj.topsrcdir, self.environment.topobjdir)
         return self._backend_files[obj.objdir]
 
     def consume_object(self, obj):
@@ -797,17 +786,17 @@ class RecursiveMakeBackend(CommonBackend
                 makefile = mozpath.join(objdir, 'Makefile')
 
                 # If Makefile.in exists, use it as a template. Otherwise,
                 # create a stub.
                 stub = not os.path.exists(makefile_in)
                 if not stub:
                     self.log(logging.DEBUG, 'substitute_makefile',
                         {'path': makefile}, 'Substituting makefile: {path}')
-                    self.summary.makefile_in_count += 1
+                    self._makefile_in_count += 1
 
                     for tier, skiplist in self._may_skip.items():
                         if bf.relobjdir in skiplist:
                             skiplist.remove(bf.relobjdir)
                 else:
                     self.log(logging.DEBUG, 'stub_makefile',
                         {'path': makefile}, 'Creating stub Makefile: {path}')
 
@@ -1446,17 +1435,17 @@ INSTALL_TARGETS += %(prefix)s
         if not stub:
             # Adding the Makefile.in here has the desired side-effect
             # that if the Makefile.in disappears, this will force
             # moz.build traversal. This means that when we remove empty
             # Makefile.in files, the old file will get replaced with
             # the autogenerated one automatically.
             self.backend_input_files.add(obj.input_path)
 
-        self.summary.makefile_out_count += 1
+        self._makefile_out_count += 1
 
     def _handle_ipdl_sources(self, ipdl_dir, sorted_ipdl_sources,
                              unified_ipdl_cppsrcs_mapping):
         # Write out a master list of all IPDL source files.
         mk = Makefile()
 
         mk.add_statement('ALL_IPDLSRCS := %s' % ' '.join(sorted_ipdl_sources))
 
--- a/python/mozbuild/mozbuild/backend/visualstudio.py
+++ b/python/mozbuild/mozbuild/backend/visualstudio.py
@@ -22,16 +22,17 @@ from ..frontend.data import (
     Defines,
     GeneratedSources,
     HostSources,
     Library,
     LocalInclude,
     Sources,
     UnifiedSources,
 )
+from mozbuild.base import ExecutionSummary
 
 
 MSBUILD_NAMESPACE = 'http://schemas.microsoft.com/developer/msbuild/2003'
 
 def get_id(name):
     return str(uuid.uuid5(uuid.NAMESPACE_URL, name)).upper()
 
 # TODO validate mappings are correct. only 2010 confirmed so far
@@ -96,22 +97,22 @@ class VisualStudioBackend(CommonBackend)
         self._version = '2013'
 
         self._paths_to_sources = {}
         self._paths_to_includes = {}
         self._paths_to_defines = {}
         self._paths_to_configs = {}
         self._libs_to_paths = {}
 
-        def detailed(summary):
-            return 'Generated Visual Studio solution at %s' % (
-                os.path.join(self._out_dir, 'mozilla.sln'))
-
-        self.summary.backend_detailed_summary = types.MethodType(detailed,
-            self.summary)
+    def summary(self):
+        return ExecutionSummary(
+            'VisualStudio backend executed in {execution_time:.2f}s\n'
+            'Generated Visual Studio solution at {path:s}',
+            execution_time=self._execution_time,
+            path=os.path.join(self._out_dir, 'mozilla.sln'))
 
     def consume_object(self, obj):
         # Just acknowledge everything.
         obj.ack()
 
         reldir = getattr(obj, 'relativedir', None)
 
         if hasattr(obj, 'config') and reldir not in self._paths_to_configs:
--- a/python/mozbuild/mozbuild/base.py
+++ b/python/mozbuild/mozbuild/base.py
@@ -838,8 +838,27 @@ class PathArgument(object):
 
         return mozpath.normsep(self.arg)
 
     def srcdir_path(self):
         return mozpath.join(self.topsrcdir, self.relpath())
 
     def objdir_path(self):
         return mozpath.join(self.topobjdir, self.relpath())
+
+
+class ExecutionSummary(dict):
+    """Helper for execution summaries."""
+
+    def __init__(self, summary_format, **data):
+        self._summary_format = ''
+        assert 'execution_time' in data
+        self.extend(summary_format, **data)
+
+    def extend(self, summary_format, **data):
+        self._summary_format += summary_format
+        self.update(data)
+
+    def __str__(self):
+        return self._summary_format.format(**self)
+
+    def __getattr__(self, key):
+        return self[key]
--- a/python/mozbuild/mozbuild/config_status.py
+++ b/python/mozbuild/mozbuild/config_status.py
@@ -6,16 +6,17 @@
 # drop-in replacement for autoconf 2.13's config.status, with features
 # borrowed from autoconf > 2.5, and additional features.
 
 from __future__ import absolute_import, print_function
 
 import logging
 import os
 import sys
+import time
 
 from optparse import OptionParser
 
 from mach.logging import LoggingManager
 from mozbuild.backend.configenvironment import ConfigEnvironment
 from mozbuild.backend.recursivemake import RecursiveMakeBackend
 from mozbuild.base import MachCommandConditions
 from mozbuild.frontend.emitter import TreeMetadataEmitter
@@ -128,16 +129,19 @@ def config_status(topobjdir='.', topsrcd
           raise Exception('Eclipse is not supported on Windows. Consider using Visual Studio instead.')
     elif options.backend == 'VisualStudio':
         from mozbuild.backend.visualstudio import VisualStudioBackend
         backend_cls = VisualStudioBackend
     elif options.backend == 'FasterMake':
         from mozbuild.backend.fastermake import FasterMakeBackend
         backend_cls = FasterMakeBackend
 
+    cpu_start = time.clock()
+    time_start = time.time()
+
     the_backend = backend_cls(env)
 
     reader = BuildReader(env)
     emitter = TreeMetadataEmitter(env)
     # This won't actually do anything because of the magic of generators.
     definitions = emitter.emit(reader.read_topsrcdir())
 
     if options.recheck:
@@ -145,23 +149,38 @@ def config_status(topobjdir='.', topsrcd
         os.chdir(topobjdir)
         os.execlp('sh', 'sh', '-c', ' '.join([os.path.join(topsrcdir, 'configure'), env.substs['ac_configure_args'], '--no-create', '--no-recursion']))
 
     log_level = logging.DEBUG if options.verbose else logging.INFO
     log_manager.add_terminal_logging(level=log_level)
     log_manager.enable_unstructured()
 
     print('Reticulating splines...', file=sys.stderr)
-    summary = the_backend.consume(definitions)
+    the_backend.consume(definitions)
+
+    execution_time = 0.0
+    for obj in (reader, emitter, the_backend):
+        summary = obj.summary()
+        print(summary, file=sys.stderr)
+        execution_time += summary.execution_time
 
-    for line in summary.summaries():
-        print(line, file=sys.stderr)
+    cpu_time = time.clock() - cpu_start
+    wall_time = time.time() - time_start
+    efficiency = cpu_time / wall_time if wall_time else 100
+    untracked = wall_time - execution_time
+
+    print(
+        'Total wall time: {:.2f}s; CPU time: {:.2f}s; Efficiency: '
+        '{:.0%}; Untracked: {:.2f}s'.format(
+            wall_time, cpu_time, efficiency, untracked),
+        file=sys.stderr
+    )
 
     if options.diff:
-        for path, diff in sorted(summary.file_diffs.items()):
+        for path, diff in sorted(the_backend.file_diffs.items()):
             print('\n'.join(diff))
 
     # Advertise Visual Studio if appropriate.
     if os.name == 'nt' and options.backend == 'RecursiveMake':
         print(VISUAL_STUDIO_ADVERTISEMENT)
 
     # Advertise Eclipse if it is appropriate.
     if MachCommandConditions.is_android(env):
--- a/python/mozbuild/mozbuild/frontend/context.py
+++ b/python/mozbuild/mozbuild/frontend/context.py
@@ -83,17 +83,16 @@ class Context(KeyedDefaultDict):
     def __init__(self, allowed_variables={}, config=None):
         self._allowed_variables = allowed_variables
         self.main_path = None
         self.current_path = None
         # There aren't going to be enough paths for the performance of scanning
         # a list to be a problem.
         self._all_paths = []
         self.config = config
-        self.execution_time = 0
         self._sandbox = None
         KeyedDefaultDict.__init__(self, self._factory)
 
     def push_source(self, path):
         """Adds the given path as source of the data from this context and make
         it the current path for the context."""
         assert os.path.isabs(path)
         if not self.main_path:
--- a/python/mozbuild/mozbuild/frontend/data.py
+++ b/python/mozbuild/mozbuild/frontend/data.py
@@ -38,27 +38,16 @@ class TreeMetadata(object):
 
     def __init__(self):
         self._ack = False
 
     def ack(self):
         self._ack = True
 
 
-class ReaderSummary(TreeMetadata):
-    """A summary of what the reader did."""
-
-    def __init__(self, total_file_count, total_sandbox_execution_time,
-        total_emitter_execution_time):
-        TreeMetadata.__init__(self)
-        self.total_file_count = total_file_count
-        self.total_sandbox_execution_time = total_sandbox_execution_time
-        self.total_emitter_execution_time = total_emitter_execution_time
-
-
 class ContextDerived(TreeMetadata):
     """Build object derived from a single Context instance.
 
     It holds fields common to all context derived classes. This class is likely
     never instantiated directly but is instead derived from.
     """
 
     __slots__ = (
--- a/python/mozbuild/mozbuild/frontend/emitter.py
+++ b/python/mozbuild/mozbuild/frontend/emitter.py
@@ -58,17 +58,16 @@ from .data import (
     Library,
     Linkable,
     LinkageWrongKindError,
     LocalInclude,
     PerSourceFlag,
     PreprocessedTestWebIDLFile,
     PreprocessedWebIDLFile,
     Program,
-    ReaderSummary,
     Resources,
     SharedLibrary,
     SimpleProgram,
     Sources,
     StaticLibrary,
     TestHarnessFiles,
     TestWebIDLFile,
     TestManifest,
@@ -90,16 +89,18 @@ from .context import (
     Context,
     SourcePath,
     ObjDirPath,
     Path,
     SubContext,
     TemplateContext,
 )
 
+from mozbuild.base import ExecutionSummary
+
 
 class TreeMetadataEmitter(LoggingMixin):
     """Converts the executed mozbuild files into data structures.
 
     This is a bridge between reader.py and data.py. It takes what was read by
     reader.BuildReader and converts it into the classes defined in the data
     module.
     """
@@ -131,29 +132,37 @@ class TreeMetadataEmitter(LoggingMixin):
         subconfigures = os.path.join(self.config.topobjdir, 'subconfigures')
         paths = []
         if os.path.exists(subconfigures):
             paths = open(subconfigures).read().splitlines()
         self._external_paths = set(mozpath.normsep(d) for d in paths)
         # Add security/nss manually, since it doesn't have a subconfigure.
         self._external_paths.add('security/nss')
 
+        self._emitter_time = 0.0
+        self._object_count = 0
+
+    def summary(self):
+        return ExecutionSummary(
+            'Processed into {object_count:d} build config descriptors in '
+            '{execution_time:.2f}s',
+            execution_time=self._emitter_time,
+            object_count=self._object_count)
+
     def emit(self, output):
         """Convert the BuildReader output into data structures.
 
         The return value from BuildReader.read_topsrcdir() (a generator) is
         typically fed into this function.
         """
-        file_count = 0
-        sandbox_execution_time = 0.0
-        emitter_time = 0.0
         contexts = {}
 
         def emit_objs(objs):
             for o in objs:
+                self._object_count += 1
                 yield o
                 if not o._ack:
                     raise Exception('Unhandled object of type %s' % type(o))
 
         for out in output:
             # Nothing in sub-contexts is currently of interest to us. Filter
             # them all out.
             if isinstance(out, SubContext):
@@ -161,38 +170,32 @@ class TreeMetadataEmitter(LoggingMixin):
 
             if isinstance(out, Context):
                 # Keep all contexts around, we will need them later.
                 contexts[out.objdir] = out
 
                 start = time.time()
                 # We need to expand the generator for the timings to work.
                 objs = list(self.emit_from_context(out))
-                emitter_time += time.time() - start
+                self._emitter_time += time.time() - start
 
                 for o in emit_objs(objs): yield o
 
-                # Update the stats.
-                file_count += len(out.all_paths)
-                sandbox_execution_time += out.execution_time
-
             else:
                 raise Exception('Unhandled output type: %s' % type(out))
 
         # Don't emit Linkable objects when COMPILE_ENVIRONMENT is explicitely
         # set to a value meaning false (usually '').
         if self.config.substs.get('COMPILE_ENVIRONMENT', True):
             start = time.time()
             objs = list(self._emit_libs_derived(contexts))
-            emitter_time += time.time() - start
+            self._emitter_time += time.time() - start
 
             for o in emit_objs(objs): yield o
 
-        yield ReaderSummary(file_count, sandbox_execution_time, emitter_time)
-
     def _emit_libs_derived(self, contexts):
         # First do FINAL_LIBRARY linkage.
         for lib in (l for libs in self._libs.values() for l in libs):
             if not isinstance(lib, StaticLibrary) or not lib.link_into:
                 continue
             if lib.link_into not in self._libs:
                 raise SandboxValidationError(
                     'FINAL_LIBRARY ("%s") does not match any LIBRARY_NAME'
--- a/python/mozbuild/mozbuild/frontend/gyp_reader.py
+++ b/python/mozbuild/mozbuild/frontend/gyp_reader.py
@@ -1,17 +1,16 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, unicode_literals
 
 import gyp
 import sys
-import time
 import os
 import mozpack.path as mozpath
 from mozpack.files import FileFinder
 from .sandbox import alphabetical_sorted
 from .context import (
     SourcePath,
     TemplateContext,
     VARIABLES,
@@ -78,18 +77,16 @@ def read_from_gyp(config, path, output, 
     process.
 
     config is a ConfigEnvironment, path is the path to a root gyp configuration
     file, output is the base path under which the objdir for the various gyp
     dependencies will be, and vars a dict of variables to pass to the gyp
     processor.
     """
 
-    time_start = time.time()
-
     # gyp expects plain str instead of unicode. The frontend code gives us
     # unicode strings, so convert them.
     path = encode(path)
     str_vars = dict((name, encode(value)) for name, value in vars.items())
 
     params = {
         b'parallel': False,
         b'generator_flags': {},
@@ -229,11 +226,9 @@ def read_from_gyp(config, path, output, 
         ]
         context['GENERATED_INCLUDES'] += ['/ipc/ipdl/_ipdlheaders']
         # These get set via VC project file settings for normal GYP builds.
         if config.substs['OS_TARGET'] == 'WINNT':
             context['DEFINES']['UNICODE'] = True
             context['DEFINES']['_UNICODE'] = True
         context['DISABLE_STL_WRAPPING'] = True
 
-        context.execution_time = time.time() - time_start
         yield context
-        time_start = time.time()
--- a/python/mozbuild/mozbuild/frontend/reader.py
+++ b/python/mozbuild/mozbuild/frontend/reader.py
@@ -73,16 +73,19 @@ from .context import (
     DEPRECATION_HINTS,
     SourcePath,
     SPECIAL_VARIABLES,
     SUBCONTEXTS,
     SubContext,
     TemplateContext,
 )
 
+from mozbuild.base import ExecutionSummary
+
+
 if sys.version_info.major == 2:
     text_type = unicode
     type_type = types.TypeType
 else:
     text_type = str
     type_type = type
 
 
@@ -864,16 +867,26 @@ class BuildReader(object):
     def __init__(self, config, finder=default_finder):
         self.config = config
 
         self._log = logging.getLogger(__name__)
         self._read_files = set()
         self._execution_stack = []
         self._finder = finder
 
+        self._execution_time = 0.0
+        self._file_count = 0
+
+    def summary(self):
+        return ExecutionSummary(
+            'Finished reading {file_count:d} moz.build files in '
+            '{execution_time:.2f}s',
+            file_count=self._file_count,
+            execution_time=self._execution_time)
+
     def read_topsrcdir(self):
         """Read the tree of linked moz.build files.
 
         This starts with the tree's top-most moz.build file and descends into
         all linked moz.build files until all relevant files have been evaluated.
 
         This is a generator of Context instances. As each moz.build file is
         read, a new Context is created and emitted.
@@ -1098,17 +1111,18 @@ class BuildReader(object):
                 mozpath.join(topobjdir, reldir, 'config.status'))
             config.topobjdir = topobjdir
             config.external_source_dir = None
 
         context = Context(VARIABLES, config)
         sandbox = MozbuildSandbox(context, metadata=metadata,
                                   finder=self._finder)
         sandbox.exec_file(path)
-        context.execution_time = time.time() - time_start
+        self._execution_time += time.time() - time_start
+        self._file_count += len(context.all_paths)
 
         # Yield main context before doing any processing. This gives immediate
         # consumers an opportunity to change state before our remaining
         # processing is performed.
         yield context
 
         # We first collect directories populated in variables.
         dir_vars = ['DIRS']
@@ -1136,24 +1150,27 @@ class BuildReader(object):
             from .gyp_reader import read_from_gyp
             non_unified_sources = set()
             for s in gyp_dir.non_unified_sources:
                 source = SourcePath(context, s)
                 if not self._finder.get(source.full_path):
                     raise SandboxValidationError('Cannot find %s.' % source,
                         context)
                 non_unified_sources.add(source)
+            time_start = time.time()
             for gyp_context in read_from_gyp(context.config,
                                              mozpath.join(curdir, gyp_dir.input),
                                              mozpath.join(context.objdir,
                                                           target_dir),
                                              gyp_dir.variables,
                                              non_unified_sources = non_unified_sources):
                 gyp_context.update(gyp_dir.sandbox_vars)
                 gyp_contexts.append(gyp_context)
+                self._file_count += len(gyp_context.all_paths)
+            self._execution_time += time.time() - time_start
 
         for gyp_context in gyp_contexts:
             context['DIRS'].append(mozpath.relpath(gyp_context.objdir, context.objdir))
             sandbox.subcontexts.append(gyp_context)
 
         for subcontext in sandbox.subcontexts:
             yield subcontext
 
--- a/python/mozbuild/mozbuild/test/frontend/test_emitter.py
+++ b/python/mozbuild/mozbuild/test/frontend/test_emitter.py
@@ -22,17 +22,16 @@ from mozbuild.frontend.data import (
     GeneratedSources,
     HostDefines,
     HostSources,
     IPDLFile,
     JARManifest,
     JsPreferenceFile,
     LocalInclude,
     Program,
-    ReaderSummary,
     Resources,
     SimpleProgram,
     Sources,
     StaticLibrary,
     TestHarnessFiles,
     TestManifest,
     UnifiedSources,
     VariablePassthru,
@@ -74,27 +73,22 @@ class TestEmitterBasic(unittest.TestCase
     def read_topsrcdir(self, reader, filter_common=True):
         emitter = TreeMetadataEmitter(reader.config)
         def ack(obj):
             obj.ack()
             return obj
 
         objs = list(ack(o) for o in emitter.emit(reader.read_topsrcdir()))
         self.assertGreater(len(objs), 0)
-        self.assertIsInstance(objs[-1], ReaderSummary)
 
         filtered = []
         for obj in objs:
             if filter_common and isinstance(obj, DirectoryTraversal):
                 continue
 
-            # Always filter ReaderSummary because it's asserted above.
-            if isinstance(obj, ReaderSummary):
-                continue
-
             filtered.append(obj)
 
         return filtered
 
     def test_dirs_traversal_simple(self):
         reader = self.reader('traversal-simple')
         objs = self.read_topsrcdir(reader, filter_common=False)
         self.assertEqual(len(objs), 4)