Bug 847073 - Record and print detailed summary of moz.build execution; r=glandium
authorGregory Szorc <gps@mozilla.com>
Mon, 04 Mar 2013 10:32:57 -0800
changeset 123708 cc1e0568c96ad93dbb52be2be3298474d69f1e90
parent 123707 b55a438b74e27b314b70a6e7c80fa1b80f43afdf
child 123709 1501761c97260bdc9f54202ec9ccc2f8c66bb950
push id2452
push userlsblakk@mozilla.com
push dateMon, 13 May 2013 16:59:38 +0000
treeherdermozilla-esr52@1c070ab0f9db [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersglandium
bugs847073
milestone22.0a1
Bug 847073 - Record and print detailed summary of moz.build execution; r=glandium
build/ConfigStatus.py
js/src/build/ConfigStatus.py
python/mozbuild/mozbuild/backend/base.py
python/mozbuild/mozbuild/backend/configenvironment.py
python/mozbuild/mozbuild/backend/recursivemake.py
python/mozbuild/mozbuild/frontend/data.py
python/mozbuild/mozbuild/frontend/emitter.py
python/mozbuild/mozbuild/frontend/reader.py
python/mozbuild/mozbuild/test/frontend/test_emitter.py
python/mozbuild/mozbuild/test/test_util.py
python/mozbuild/mozbuild/util.py
--- a/build/ConfigStatus.py
+++ b/build/ConfigStatus.py
@@ -1,16 +1,18 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 # Combined with build/autoconf/config.status.m4, ConfigStatus is an almost
 # drop-in replacement for autoconf 2.13's config.status, with features
 # borrowed from autoconf > 2.5, and additional features.
 
+from __future__ import print_function
+
 import logging
 import os
 import sys
 
 from optparse import OptionParser
 
 from mach.logging import LoggingManager
 from mozbuild.backend.configenvironment import ConfigEnvironment
@@ -19,23 +21,16 @@ from mozbuild.frontend.emitter import Tr
 from mozbuild.frontend.reader import BuildReader
 
 from Preprocessor import Preprocessor
 
 
 log_manager = LoggingManager()
 
 
-# Basic logging facility
-verbose = False
-def log(string):
-    if verbose:
-        print >>sys.stderr, string
-
-
 def config_status(topobjdir = '.', topsrcdir = '.',
                   defines = [], non_global_defines = [], substs = [],
                   files = [], headers = []):
     '''Main function, providing config.status functionality.
 
     Contrary to config.status, it doesn't use CONFIG_FILES or CONFIG_HEADERS
     variables, but like config.status from autoconf 2.6, single files may be
     generated with the --file and --header options. Several such options can
@@ -112,27 +107,27 @@ def config_status(topobjdir = '.', topsr
         headers = options.headers
         if not options.files:
             files = []
     # Default to display messages when giving --file or --headers on the
     # command line.
     log_level = logging.INFO
 
     if options.files or options.headers or options.verbose:
-        global verbose
-        verbose = True
         log_level = logging.DEBUG
 
     log_manager.add_terminal_logging(level=log_level)
     log_manager.enable_unstructured()
 
     if not options.files and not options.headers:
-        print >>sys.stderr, "creating config files and headers..."
+        print('Reticulating splines...', file=sys.stderr)
+        summary = backend.consume(definitions)
 
-        backend.consume(definitions)
+        for line in summary.summaries():
+            print(line, file=sys.stderr)
 
         files = [os.path.join(topobjdir, f) for f in files]
         headers = [os.path.join(topobjdir, f) for f in headers]
 
     for file in files:
         env.create_config_file(file)
     for header in headers:
         env.create_config_header(header)
--- a/js/src/build/ConfigStatus.py
+++ b/js/src/build/ConfigStatus.py
@@ -1,16 +1,18 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 # Combined with build/autoconf/config.status.m4, ConfigStatus is an almost
 # drop-in replacement for autoconf 2.13's config.status, with features
 # borrowed from autoconf > 2.5, and additional features.
 
+from __future__ import print_function
+
 import logging
 import os
 import sys
 
 from optparse import OptionParser
 
 from mach.logging import LoggingManager
 from mozbuild.backend.configenvironment import ConfigEnvironment
@@ -19,23 +21,16 @@ from mozbuild.frontend.emitter import Tr
 from mozbuild.frontend.reader import BuildReader
 
 from Preprocessor import Preprocessor
 
 
 log_manager = LoggingManager()
 
 
-# Basic logging facility
-verbose = False
-def log(string):
-    if verbose:
-        print >>sys.stderr, string
-
-
 def config_status(topobjdir = '.', topsrcdir = '.',
                   defines = [], non_global_defines = [], substs = [],
                   files = [], headers = []):
     '''Main function, providing config.status functionality.
 
     Contrary to config.status, it doesn't use CONFIG_FILES or CONFIG_HEADERS
     variables, but like config.status from autoconf 2.6, single files may be
     generated with the --file and --header options. Several such options can
@@ -112,27 +107,27 @@ def config_status(topobjdir = '.', topsr
         headers = options.headers
         if not options.files:
             files = []
     # Default to display messages when giving --file or --headers on the
     # command line.
     log_level = logging.INFO
 
     if options.files or options.headers or options.verbose:
-        global verbose
-        verbose = True
         log_level = logging.DEBUG
 
     log_manager.add_terminal_logging(level=log_level)
     log_manager.enable_unstructured()
 
     if not options.files and not options.headers:
-        print >>sys.stderr, "creating config files and headers..."
+        print('Reticulating splines...', file=sys.stderr)
+        summary = backend.consume(definitions)
 
-        backend.consume(definitions)
+        for line in summary.summaries():
+            print(line, file=sys.stderr)
 
         files = [os.path.join(topobjdir, f) for f in files]
         headers = [os.path.join(topobjdir, f) for f in headers]
 
     for file in files:
         env.create_config_file(file)
     for header in headers:
         env.create_config_header(header)
--- a/python/mozbuild/mozbuild/backend/base.py
+++ b/python/mozbuild/mozbuild/backend/base.py
@@ -6,39 +6,107 @@ from __future__ import unicode_literals
 
 from abc import (
     ABCMeta,
     abstractmethod,
 )
 
 import os
 import sys
+import time
 
 from mach.mixin.logging import LoggingMixin
 
-from ..frontend.data import SandboxDerived
+from ..frontend.data import (
+    ReaderSummary,
+    SandboxDerived,
+)
 from .configenvironment import ConfigEnvironment
 
 
+class BackendConsumeSummary(object):
+    """Holds state about what a backend did.
+
+    This is used primarily to print a summary of what the backend did
+    so people know what's going on.
+    """
+    def __init__(self):
+        # How many moz.build files were read. This includes included files.
+        self.mozbuild_count = 0
+
+        # The number of derived objects from the read moz.build files.
+        self.object_count = 0
+
+        # The total wall time this backend spent consuming objects. If
+        # the iterable passed into consume() is a generator, this includes the
+        # time spent to read moz.build files.
+        self.wall_time = 0.0
+
+        # CPU time spent by during the interval captured by wall_time.
+        self.cpu_time = 0.0
+
+        # The total wall time spent executing moz.build files. This is just
+        # the read and execute time. It does not cover consume time.
+        self.mozbuild_execution_time = 0.0
+
+        # The total wall time spent in the backend. This counts the time the
+        # backend writes out files, etc.
+        self.backend_execution_time = 0.0
+
+        # How much wall time the system spent doing other things. This is
+        # wall_time - mozbuild_execution_time - backend_execution_time.
+        self.other_time = 0.0
+
+    @property
+    def reader_summary(self):
+        return 'Finished reading {:d} moz.build files into {:d} descriptors in {:.2f}s'.format(
+            self.mozbuild_count, self.object_count,
+            self.mozbuild_execution_time)
+
+    @property
+    def backend_summary(self):
+        return 'Backend executed in {:.2f}s'.format(self.backend_execution_time)
+
+    def backend_detailed_summary(self):
+        """Backend summary to be supplied by BuildBackend implementations."""
+        return None
+
+    @property
+    def total_summary(self):
+        return 'Total wall time: {:.2f}s; CPU time: {:.2f}s; Efficiency: {:.0%}'.format(
+            self.wall_time, self.cpu_time, self.cpu_time / self.wall_time)
+
+    def summaries(self):
+        yield self.reader_summary
+        yield self.backend_summary
+
+        detailed = self.backend_detailed_summary()
+        if detailed:
+            yield detailed
+
+        yield self.total_summary
+
+
 class BuildBackend(LoggingMixin):
     """Abstract base class for build backends.
 
     A build backend is merely a consumer of the build configuration (the output
     of the frontend processing). It does something with said data. What exactly
     is the discretion of the specific implementation.
     """
 
     __metaclass__ = ABCMeta
 
     def __init__(self, environment):
         assert isinstance(environment, ConfigEnvironment)
 
         self.populate_logger()
 
         self.environment = environment
+        self.summary = BackendConsumeSummary()
 
         self._environments = {}
         self._environments[environment.topobjdir] = environment
 
         self._init()
 
     def _init():
         """Hook point for child classes to perform actions during __init__.
@@ -69,27 +137,48 @@ class BuildBackend(LoggingMixin):
 
         This is the main method of the interface. This is what takes the
         frontend output and does something with it.
 
         Child classes are not expected to implement this method. Instead, the
         base class consumes objects and calls methods (possibly) implemented by
         child classes.
         """
+        cpu_start = time.clock()
+        time_start = time.time()
+        backend_time = 0.0
 
         for obj in objs:
+            self.summary.object_count += 1
+            obj_start = time.time()
             self.consume_object(obj)
+            backend_time += time.time() - obj_start
+
+            if isinstance(obj, ReaderSummary):
+                self.summary.mozbuild_count = obj.total_file_count
+                self.summary.mozbuild_execution_time = obj.total_execution_time
 
         # Write out a file indicating when this backend was last generated.
         age_file = os.path.join(self.environment.topobjdir,
             'backend.%s.built' % self.__class__.__name__)
         with open(age_file, 'a'):
             os.utime(age_file, None)
 
+        finished_start = time.time()
         self.consume_finished()
+        backend_time += time.time() - finished_start
+
+        self.summary.cpu_time = time.clock() - cpu_start
+        self.summary.wall_time = time.time() - time_start
+        self.summary.backend_execution_time = backend_time
+        self.summary.other_time = self.summary.wall_time - \
+            self.summary.mozbuild_execution_time - \
+            self.summary.backend_execution_time
+
+        return self.summary
 
     @abstractmethod
     def consume_object(self, obj):
         """Consumes an individual TreeMetadata instance.
 
         This is the main method used by child classes to react to build
         metadata.
         """
--- a/python/mozbuild/mozbuild/backend/configenvironment.py
+++ b/python/mozbuild/mozbuild/backend/configenvironment.py
@@ -175,18 +175,20 @@ class ConfigEnvironment(object):
         pp = Preprocessor()
         pp.context.update(self.substs)
         pp.context.update(top_srcdir = self.get_top_srcdir(path))
         pp.context.update(srcdir = self.get_file_srcdir(path))
         pp.context.update(relativesrcdir = self.get_relative_srcdir(path))
         pp.context.update(DEPTH = self.get_depth(path))
         pp.do_filter('attemptSubstitution')
         pp.setMarker(None)
-        with FileAvoidWrite(path) as pp.out:
-            pp.do_include(input)
+
+        pp.out = FileAvoidWrite(path)
+        pp.do_include(input)
+        return pp.out.close()
 
     def create_config_header(self, path):
         '''Creates the given config header. A config header is generated by
         taking the corresponding source file and replacing some #define/#undef
         occurences:
             "#undef NAME" is turned into "#define NAME VALUE"
             "#define NAME" is unchanged
             "#define NAME ORIGINAL_VALUE" is turned into "#define NAME VALUE"
@@ -208,10 +210,10 @@ class ConfigEnvironment(object):
                             if cmd == 'define' and value:
                                 l = l[:m.start('value')] + str(self.defines[name]) + l[m.end('value'):]
                             elif cmd == 'undef':
                                 l = l[:m.start('cmd')] + 'define' + l[m.end('cmd'):m.end('name')] + ' ' + str(self.defines[name]) + l[m.end('name'):]
                         elif cmd == 'undef':
                            l = '/* ' + l[:m.end('name')] + ' */' + l[m.end('name'):]
 
                 output.write(l)
-            output.close()
+            return output.close()
 
--- a/python/mozbuild/mozbuild/backend/recursivemake.py
+++ b/python/mozbuild/mozbuild/backend/recursivemake.py
@@ -2,21 +2,23 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import unicode_literals
 
 import errno
 import logging
 import os
+import types
 
 from .base import BuildBackend
 from ..frontend.data import (
     ConfigFileSubstitution,
     DirectoryTraversal,
+    SandboxDerived,
 )
 from ..util import FileAvoidWrite
 
 
 class BackendMakeFile(object):
     """Represents a generated backend.mk file.
 
     This is both a wrapper around a file handle as well as a container that
@@ -80,20 +82,20 @@ class BackendMakeFile(object):
     def write(self, buf):
         self.fh.write(buf)
 
     def close(self):
         if self.inputs:
             l = ' '.join(sorted(self.inputs))
             self.fh.write('BACKEND_INPUT_FILES += %s\n' % l)
 
-        self.fh.close()
+        result = self.fh.close()
 
         if not self.inputs:
-            return
+            return result
 
         # Update mtime iff any of its input files are newer. See class notes
         # for why we do this.
         existing_mtime = os.path.getmtime(self.path)
 
         def mtime(path):
             try:
                 return os.path.getmtime(path)
@@ -103,16 +105,18 @@ class BackendMakeFile(object):
 
                 raise
 
         input_mtime = max(mtime(path) for path in self.inputs)
 
         if input_mtime > existing_mtime:
             os.utime(self.path, None)
 
+        return result
+
 
 class RecursiveMakeBackend(BuildBackend):
     """Backend that integrates with the existing recursive make build system.
 
     This backend facilitates the transition from Makefile.in to moz.build
     files.
 
     This backend performs Makefile.in -> Makefile conversion. It also writes
@@ -122,35 +126,63 @@ class RecursiveMakeBackend(BuildBackend)
     This backend may eventually evolve to write out non-recursive make files.
     However, as long as there are Makefile.in files in the tree, we are tied to
     recursive make and thus will need this backend.
     """
 
     def _init(self):
         self._backend_files = {}
 
+        self.summary.managed_count = 0
+        self.summary.created_count = 0
+        self.summary.updated_count = 0
+        self.summary.unchanged_count = 0
+
+        def detailed(summary):
+            return '{:d} total backend files. {:d} created; {:d} updated; {:d} unchanged'.format(
+                summary.managed_count, summary.created_count,
+                summary.updated_count, summary.unchanged_count)
+
+        # This is a little kludgy and could be improved with a better API.
+        self.summary.backend_detailed_summary = types.MethodType(detailed,
+            self.summary)
+
+    def _update_from_avoid_write(self, result):
+        existed, updated = result
+
+        if not existed:
+            self.summary.created_count += 1
+        elif updated:
+            self.summary.updated_count += 1
+        else:
+            self.summary.unchanged_count += 1
+
     def consume_object(self, obj):
         """Write out build files necessary to build with recursive make."""
 
+        if not isinstance(obj, SandboxDerived):
+            return
+
         backend_file = self._backend_files.get(obj.srcdir,
             BackendMakeFile(obj.srcdir, obj.objdir, self.get_environment(obj)))
 
         # Define the paths that will trigger a backend rebuild. We always
         # add autoconf.mk because that is proxy for CONFIG. We can't use
         # config.status because there is no make target for that!
         autoconf_path = os.path.join(obj.topobjdir, 'config', 'autoconf.mk')
         backend_file.inputs.add(autoconf_path)
         backend_file.inputs |= obj.sandbox_all_paths
 
         if isinstance(obj, DirectoryTraversal):
             self._process_directory_traversal(obj, backend_file)
         elif isinstance(obj, ConfigFileSubstitution):
             backend_file.write('SUBSTITUTE_FILES += %s\n' % obj.relpath)
-
-            backend_file.environment.create_config_file(obj.output_path)
+            self._update_from_avoid_write(
+                backend_file.environment.create_config_file(obj.output_path))
+            self.summary.managed_count += 1
 
         self._backend_files[obj.srcdir] = backend_file
 
     def consume_finished(self):
         for srcdir in sorted(self._backend_files.keys()):
             bf = self._backend_files[srcdir]
 
             if not os.path.exists(bf.objdir):
@@ -159,20 +191,23 @@ class RecursiveMakeBackend(BuildBackend)
             makefile_in = os.path.join(srcdir, 'Makefile.in')
 
             if not os.path.exists(makefile_in):
                 raise Exception('Could not find Makefile.in: %s' % makefile_in)
 
             out_path = os.path.join(bf.objdir, 'Makefile')
             self.log(logging.DEBUG, 'create_makefile', {'path': out_path},
                 'Generating makefile: {path}')
-            bf.environment.create_config_file(out_path)
+            self._update_from_avoid_write(
+                bf.environment.create_config_file(out_path))
+            self.summary.managed_count += 1
 
             bf.write('SUBSTITUTE_FILES += Makefile\n')
-            bf.close()
+            self._update_from_avoid_write(bf.close())
+            self.summary.managed_count += 1
 
     def _process_directory_traversal(self, obj, backend_file):
         """Process a data.DirectoryTraversal instance."""
         fh = backend_file.fh
 
         for tier, dirs in obj.tier_dirs.iteritems():
             fh.write('TIERS += %s\n' % tier)
 
--- a/python/mozbuild/mozbuild/frontend/data.py
+++ b/python/mozbuild/mozbuild/frontend/data.py
@@ -19,16 +19,24 @@ from __future__ import unicode_literals
 
 from collections import OrderedDict
 
 
 class TreeMetadata(object):
     """Base class for all data being captured."""
 
 
+class ReaderSummary(TreeMetadata):
+    """A summary of what the reader did."""
+
+    def __init__(self, total_file_count, total_execution_time):
+        self.total_file_count = total_file_count
+        self.total_execution_time = total_execution_time
+
+
 class SandboxDerived(TreeMetadata):
     """Build object derived from a single MozbuildSandbox instance.
 
     It holds fields common to all sandboxes. This class is likely never
     instantiated directly but is instead derived from.
     """
 
     __slots__ = (
--- a/python/mozbuild/mozbuild/frontend/emitter.py
+++ b/python/mozbuild/mozbuild/frontend/emitter.py
@@ -4,16 +4,17 @@
 
 from __future__ import unicode_literals
 
 import os
 
 from .data import (
     DirectoryTraversal,
     ConfigFileSubstitution,
+    ReaderSummary,
 )
 
 from .reader import MozbuildSandbox
 
 
 class TreeMetadataEmitter(object):
     """Converts the executed mozbuild files into data structures.
 
@@ -26,23 +27,33 @@ class TreeMetadataEmitter(object):
         self.config = config
 
     def emit(self, output):
         """Convert the BuildReader output into data structures.
 
         The return value from BuildReader.read_topsrcdir() (a generator) is
         typically fed into this function.
         """
+        file_count = 0
+        execution_time = 0.0
+
         for out in output:
             if isinstance(out, MozbuildSandbox):
                 for o in self.emit_from_sandbox(out):
                     yield o
+
+                # Update the stats.
+                file_count += len(out.all_paths)
+                execution_time += out.execution_time
+
             else:
                 raise Exception('Unhandled output type: %s' % out)
 
+        yield ReaderSummary(file_count, execution_time)
+
     def emit_from_sandbox(self, sandbox):
         """Convert a MozbuildSandbox to tree metadata objects.
 
         This is a generator of mozbuild.frontend.data.SandboxDerived instances.
         """
 
         # We always emit a directory traversal descriptor. This is needed by
         # the recursive make backend.
--- a/python/mozbuild/mozbuild/frontend/reader.py
+++ b/python/mozbuild/mozbuild/frontend/reader.py
@@ -21,16 +21,17 @@ The BuildReader contains basic logic for
 It does this by examining specific variables populated during execution.
 """
 
 from __future__ import print_function, unicode_literals
 
 import logging
 import os
 import sys
+import time
 import traceback
 import types
 
 from io import StringIO
 
 from mozbuild.util import (
     ReadOnlyDefaultDict,
     ReadOnlyDict,
@@ -608,18 +609,20 @@ class BuildReader(object):
 
         if path in self._read_files:
             log(self._log, logging.WARNING, 'read_already', {'path': path},
                 'File already read. Skipping: {path}')
             return
 
         self._read_files.add(path)
 
+        time_start = time.time()
         sandbox = MozbuildSandbox(self.config, path)
         sandbox.exec_file(path, filesystem_absolute=filesystem_absolute)
+        sandbox.execution_time = time.time() - time_start
         yield sandbox
 
         # Traverse into referenced files.
 
         # We first collect directories populated in variables.
         dir_vars = ['DIRS', 'PARALLEL_DIRS', 'TOOL_DIRS']
 
         if self.config.substs.get('ENABLE_TESTS', False) == '1':
--- a/python/mozbuild/mozbuild/test/frontend/test_emitter.py
+++ b/python/mozbuild/mozbuild/test/frontend/test_emitter.py
@@ -7,16 +7,17 @@ from __future__ import unicode_literals
 import os
 import unittest
 
 from mozunit import main
 
 from mozbuild.frontend.data import (
     ConfigFileSubstitution,
     DirectoryTraversal,
+    ReaderSummary,
 )
 from mozbuild.frontend.emitter import TreeMetadataEmitter
 from mozbuild.frontend.reader import BuildReader
 
 from mozbuild.test.common import MockConfig
 
 
 data_path = os.path.abspath(os.path.dirname(__file__))
@@ -25,22 +26,28 @@ data_path = os.path.join(data_path, 'dat
 
 class TestEmitterBasic(unittest.TestCase):
     def reader(self, name):
         config = MockConfig(os.path.join(data_path, name))
         config.substs['ENABLE_TESTS'] = '1'
 
         return BuildReader(config)
 
-    def test_dirs_traversal_simple(self):
-        reader = self.reader('traversal-simple')
+    def read_topsrcdir(self, reader):
         emitter = TreeMetadataEmitter(reader.config)
 
         objs = list(emitter.emit(reader.read_topsrcdir()))
+        self.assertGreater(len(objs), 0)
+        self.assertIsInstance(objs[-1], ReaderSummary)
 
+        return objs[:-1]
+
+    def test_dirs_traversal_simple(self):
+        reader = self.reader('traversal-simple')
+        objs = self.read_topsrcdir(reader)
         self.assertEqual(len(objs), 4)
 
         for o in objs:
             self.assertIsInstance(o, DirectoryTraversal)
             self.assertEqual(o.parallel_dirs, [])
             self.assertEqual(o.tool_dirs, [])
             self.assertEqual(o.test_dirs, [])
             self.assertEqual(o.test_tool_dirs, [])
@@ -52,19 +59,17 @@ class TestEmitterBasic(unittest.TestCase
         reldirs = [o.relativedir for o in objs]
         self.assertEqual(reldirs, ['', 'foo', 'foo/biz', 'bar'])
 
         dirs = [o.dirs for o in objs]
         self.assertEqual(dirs, [['foo', 'bar'], ['biz'], [], []])
 
     def test_traversal_all_vars(self):
         reader = self.reader('traversal-all-vars')
-        emitter = TreeMetadataEmitter(reader.config)
-
-        objs = list(emitter.emit(reader.read_topsrcdir()))
+        objs = self.read_topsrcdir(reader)
         self.assertEqual(len(objs), 6)
 
         for o in objs:
             self.assertIsInstance(o, DirectoryTraversal)
 
         reldirs = set([o.relativedir for o in objs])
         self.assertEqual(reldirs, set(['', 'parallel', 'regular', 'test',
             'test_tool', 'tool']))
@@ -79,29 +84,25 @@ class TestEmitterBasic(unittest.TestCase
                 self.assertEqual(o.test_tool_dirs, ['test_tool'])
                 self.assertEqual(o.tool_dirs, ['tool'])
                 self.assertEqual(o.external_make_dirs, ['external_make'])
                 self.assertEqual(o.parallel_external_make_dirs,
                     ['parallel_external_make'])
 
     def test_tier_simple(self):
         reader = self.reader('traversal-tier-simple')
-        emitter = TreeMetadataEmitter(reader.config)
-
-        objs = list(emitter.emit(reader.read_topsrcdir()))
+        objs = self.read_topsrcdir(reader)
         self.assertEqual(len(objs), 4)
 
         reldirs = [o.relativedir for o in objs]
         self.assertEqual(reldirs, ['', 'foo', 'foo/biz', 'bar'])
 
     def test_config_file_substitution(self):
         reader = self.reader('config-file-substitution')
-        emitter = TreeMetadataEmitter(reader.config)
-
-        objs = list(emitter.emit(reader.read_topsrcdir()))
+        objs = self.read_topsrcdir(reader)
         self.assertEqual(len(objs), 3)
 
         self.assertIsInstance(objs[0], DirectoryTraversal)
         self.assertIsInstance(objs[1], ConfigFileSubstitution)
         self.assertIsInstance(objs[2], ConfigFileSubstitution)
 
         topobjdir = os.path.abspath(reader.config.topobjdir)
         self.assertEqual(objs[1].relpath, 'foo')
--- a/python/mozbuild/mozbuild/test/test_util.py
+++ b/python/mozbuild/mozbuild/test/test_util.py
@@ -49,25 +49,33 @@ class TestHashing(unittest.TestCase):
 
         self.assertEqual(actual, expected)
 
 
 class TestFileAvoidWrite(unittest.TestCase):
     def test_file_avoid_write(self):
         with MockedOpen({'file': 'content'}):
             # Overwriting an existing file replaces its content
-            with FileAvoidWrite('file') as file:
-                file.write('bazqux')
+            faw = FileAvoidWrite('file')
+            faw.write('bazqux')
+            self.assertEqual(faw.close(), (True, True))
             self.assertEqual(open('file', 'r').read(), 'bazqux')
 
             # Creating a new file (obviously) stores its content
-            with FileAvoidWrite('file2') as file:
-                file.write('content')
+            faw = FileAvoidWrite('file2')
+            faw.write('content')
+            self.assertEqual(faw.close(), (False, True))
             self.assertEqual(open('file2').read(), 'content')
 
+        with MockedOpen({'file': 'content'}):
+            with FileAvoidWrite('file') as file:
+                file.write('foobar')
+
+            self.assertEqual(open('file', 'r').read(), 'foobar')
+
         class MyMockedOpen(MockedOpen):
             '''MockedOpen extension to raise an exception if something
             attempts to write in an opened file.
             '''
             def __call__(self, name, mode):
                 if 'w' in mode:
                     raise Exception, 'Unexpected open with write mode'
                 return MockedOpen.__call__(self, name, mode)
@@ -75,14 +83,15 @@ class TestFileAvoidWrite(unittest.TestCa
         with MyMockedOpen({'file': 'content'}):
             # Validate that MyMockedOpen works as intended
             file = FileAvoidWrite('file')
             file.write('foobar')
             self.assertRaises(Exception, file.close)
 
             # Check that no write actually happens when writing the
             # same content as what already is in the file
-            with FileAvoidWrite('file') as file:
-                file.write('content')
+            faw = FileAvoidWrite('file')
+            faw.write('content')
+            self.assertEqual(faw.close(), (True, False))
 
 
 if __name__ == '__main__':
     main()
--- a/python/mozbuild/mozbuild/util.py
+++ b/python/mozbuild/mozbuild/util.py
@@ -108,32 +108,42 @@ class FileAvoidWrite(StringIO):
     differs from what is on disk, then we write out the new content. Otherwise,
     the original file is untouched.
     """
     def __init__(self, filename):
         StringIO.__init__(self)
         self.filename = filename
 
     def close(self):
+        """Stop accepting writes, compare file contents, and rewrite if needed.
+
+        Returns a tuple of bools indicating what action was performed:
+
+            (file existed, file updated)
+        """
         buf = self.getvalue()
         StringIO.close(self)
+        existed = False
         try:
             existing = open(self.filename, 'rU')
+            existed = True
         except IOError:
             pass
         else:
             try:
                 if existing.read() == buf:
-                    return
+                    return True, False
             except IOError:
                 pass
             finally:
                 existing.close()
 
         ensureParentDir(self.filename)
         with open(self.filename, 'w') as file:
             file.write(buf)
 
+        return existed, True
+
     def __enter__(self):
         return self
     def __exit__(self, type, value, traceback):
         self.close()