Bug 948272 - Report more timings from moz.build; r=glandium
authorGregory Szorc <gps@mozilla.com>
Tue, 10 Dec 2013 14:26:10 +0900
changeset 159977 7027a7afab3c5f5626070c864f4b969f9a8a6d90
parent 159976 d551609a45a9c3f1e39ed8a7f035ed62528a5824
child 159978 6f89d41fec228cb8eae38ab6525541970bfa7e95
push id37477
push usergszorc@mozilla.com
push dateThu, 12 Dec 2013 01:44:05 +0000
treeherdermozilla-inbound@7027a7afab3c [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersglandium
bugs948272
milestone29.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 948272 - Report more timings from moz.build; r=glandium The big change is we now explicitly record time spent in emitter.py. We also report how much untracked time was measured.
python/mozbuild/mozbuild/backend/base.py
python/mozbuild/mozbuild/frontend/data.py
python/mozbuild/mozbuild/frontend/emitter.py
--- a/python/mozbuild/mozbuild/backend/base.py
+++ b/python/mozbuild/mozbuild/backend/base.py
@@ -61,46 +61,58 @@ class BackendConsumeSummary(object):
 
         # CPU time spent by during the interval captured by wall_time.
         self.cpu_time = 0.0
 
         # The total wall time spent executing moz.build files. This is just
         # the read and execute time. It does not cover consume time.
         self.mozbuild_execution_time = 0.0
 
+        # The total wall time spent emitting objects from sandboxes.
+        self.emitter_execution_time = 0.0
+
         # The total wall time spent in the backend. This counts the time the
         # backend writes out files, etc.
         self.backend_execution_time = 0.0
 
         # How much wall time the system spent doing other things. This is
-        # wall_time - mozbuild_execution_time - backend_execution_time.
+        # wall_time - mozbuild_execution_time - emitter_execution_time -
+        # backend_execution_time.
         self.other_time = 0.0
 
     @property
     def reader_summary(self):
-        return 'Finished reading {:d} moz.build files into {:d} descriptors in {:.2f}s'.format(
-            self.mozbuild_count, self.object_count,
+        return 'Finished reading {:d} moz.build files in {:.2f}s'.format(
+            self.mozbuild_count,
             self.mozbuild_execution_time)
 
     @property
+    def emitter_summary(self):
+        return 'Processed into {:d} build config descriptors in {:.2f}s'.format(
+            self.object_count, self.emitter_execution_time)
+
+    @property
     def backend_summary(self):
         return 'Backend executed in {:.2f}s'.format(self.backend_execution_time)
 
     def backend_detailed_summary(self):
         """Backend summary to be supplied by BuildBackend implementations."""
         return None
 
     @property
     def total_summary(self):
         efficiency_value = self.cpu_time / self.wall_time if self.wall_time else 100
-        return 'Total wall time: {:.2f}s; CPU time: {:.2f}s; Efficiency: {:.0%}'.format(
-            self.wall_time, self.cpu_time, efficiency_value)
+        return 'Total wall time: {:.2f}s; CPU time: {:.2f}s; Efficiency: ' \
+            '{:.0%}; Untracked: {:.2f}s'.format(
+                self.wall_time, self.cpu_time, efficiency_value,
+                self.other_time)
 
     def summaries(self):
         yield self.reader_summary
+        yield self.emitter_summary
         yield self.backend_summary
 
         detailed = self.backend_detailed_summary()
         if detailed:
             yield detailed
 
         yield self.total_summary
 
@@ -192,17 +204,18 @@ class BuildBackend(LoggingMixin):
             self.consume_object(obj)
             backend_time += time.time() - obj_start
 
             if isinstance(obj, SandboxDerived):
                 self.backend_input_files |= obj.sandbox_all_paths
 
             if isinstance(obj, ReaderSummary):
                 self.summary.mozbuild_count = obj.total_file_count
-                self.summary.mozbuild_execution_time = obj.total_execution_time
+                self.summary.mozbuild_execution_time = obj.total_sandbox_execution_time
+                self.summary.emitter_execution_time = obj.total_emitter_execution_time
 
         finished_start = time.time()
         self.consume_finished()
         backend_time += time.time() - finished_start
 
         # Purge backend files created in previous run, but not created anymore
         delete_files = self._backend_output_list - self._backend_output_files
         for path in delete_files:
@@ -227,16 +240,17 @@ class BuildBackend(LoggingMixin):
             with open(self._backend_output_list_file, 'a'):
                 os.utime(self._backend_output_list_file, None)
 
         self.summary.cpu_time = time.clock() - cpu_start
         self.summary.wall_time = time.time() - time_start
         self.summary.backend_execution_time = backend_time
         self.summary.other_time = self.summary.wall_time - \
             self.summary.mozbuild_execution_time - \
+            self.summary.emitter_execution_time - \
             self.summary.backend_execution_time
 
         return self.summary
 
     @abstractmethod
     def consume_object(self, obj):
         """Consumes an individual TreeMetadata instance.
 
--- a/python/mozbuild/mozbuild/frontend/data.py
+++ b/python/mozbuild/mozbuild/frontend/data.py
@@ -36,20 +36,22 @@ class TreeMetadata(object):
 
     def ack(self):
         self._ack = True
 
 
 class ReaderSummary(TreeMetadata):
     """A summary of what the reader did."""
 
-    def __init__(self, total_file_count, total_execution_time):
+    def __init__(self, total_file_count, total_sandbox_execution_time,
+        total_emitter_execution_time):
         TreeMetadata.__init__(self)
         self.total_file_count = total_file_count
-        self.total_execution_time = total_execution_time
+        self.total_sandbox_execution_time = total_sandbox_execution_time
+        self.total_emitter_execution_time = total_emitter_execution_time
 
 
 class SandboxDerived(TreeMetadata):
     """Build object derived from a single MozbuildSandbox instance.
 
     It holds fields common to all sandboxes. This class is likely never
     instantiated directly but is instead derived from.
     """
--- a/python/mozbuild/mozbuild/frontend/emitter.py
+++ b/python/mozbuild/mozbuild/frontend/emitter.py
@@ -4,16 +4,17 @@
 
 from __future__ import unicode_literals
 
 import json
 import logging
 import os
 import traceback
 import sys
+import time
 
 from mach.mixin.logging import LoggingMixin
 
 import mozpack.path as mozpath
 import manifestparser
 
 from .data import (
     ConfigFileSubstitution,
@@ -74,36 +75,54 @@ class TreeMetadataEmitter(LoggingMixin):
 
     def emit(self, output):
         """Convert the BuildReader output into data structures.
 
         The return value from BuildReader.read_topsrcdir() (a generator) is
         typically fed into this function.
         """
         file_count = 0
-        execution_time = 0.0
+        sandbox_execution_time = 0.0
+        emitter_time = 0.0
         sandboxes = {}
 
+        def emit_objs(objs):
+            for o in objs:
+                yield o
+                if not o._ack:
+                    raise Exception('Unhandled object of type %s' % type(o))
+
         for out in output:
             if isinstance(out, MozbuildSandbox):
                 # Keep all sandboxes around, we will need them later.
                 sandboxes[out['OBJDIR']] = out
 
-                for o in self.emit_from_sandbox(out):
-                    yield o
-                    if not o._ack:
-                        raise Exception('Unhandled object of type %s' % type(o))
+                start = time.time()
+                # We need to expand the generator for the timings to work.
+                objs = list(self.emit_from_sandbox(out))
+                emitter_time += time.time() - start
+
+                for o in emit_objs(objs): yield o
 
                 # Update the stats.
                 file_count += len(out.all_paths)
-                execution_time += out.execution_time
+                sandbox_execution_time += out.execution_time
 
             else:
                 raise Exception('Unhandled output type: %s' % out)
 
+        start = time.time()
+        objs = list(self._emit_libs_derived(sandboxes))
+        emitter_time += time.time() - start
+
+        for o in emit_objs(objs): yield o
+
+        yield ReaderSummary(file_count, sandbox_execution_time, emitter_time)
+
+    def _emit_libs_derived(self, sandboxes):
         for objdir, libname, final_lib in self._final_libs:
             if final_lib not in self._libs:
                 raise Exception('FINAL_LIBRARY in %s (%s) does not match any '
                                 'LIBRARY_NAME' % (objdir, final_lib))
             libs = self._libs[final_lib]
             if len(libs) > 1:
                 raise Exception('FINAL_LIBRARY in %s (%s) matches a '
                                 'LIBRARY_NAME defined in multiple places (%s)' %
@@ -129,18 +148,16 @@ class TreeMetadataEmitter(LoggingMixin):
                 # (recursively), that its FINAL_LIBRARY is that root library.
                 if not libdef.refcount:
                     for p in recurse_libs(path, basename):
                         passthru = VariablePassthru(sandboxes[p])
                         passthru.variables['FINAL_LIBRARY'] = basename
                         yield passthru
                 yield libdef
 
-        yield ReaderSummary(file_count, execution_time)
-
     def emit_from_sandbox(self, sandbox):
         """Convert a MozbuildSandbox to tree metadata objects.
 
         This is a generator of mozbuild.frontend.data.SandboxDerived instances.
         """
         # We always emit a directory traversal descriptor. This is needed by
         # the recursive make backend.
         for o in self._emit_directory_traversal_from_sandbox(sandbox): yield o