Bug 1117860 - part 1 - factor sources->unified file computation out of _add_unified_build_rules; r=mshal
authorNathan Froyd <froydnj@mozilla.com>
Mon, 22 Dec 2014 09:35:52 -0500
changeset 235460 a862343408d4025970a300be60de96399695a710
parent 235459 839f6e4f322a626c9565455e29988ecbb3b9f407
child 235461 a5dcb0bb6327a271e9ab7ff4ddd869cdae6b13b9
push id366
push usercmanchester@mozilla.com
push dateThu, 08 Jan 2015 16:40:24 +0000
reviewersmshal
bugs1117860
milestone37.0a1
Bug 1117860 - part 1 - factor sources->unified file computation out of _add_unified_build_rules; r=mshal _add_unified_build_rules does quite a lot of work besides adding makefile rules and variables. The divying up of source files into unified files is one part of that, so move it out into its own function. When we eventually move that computation out of recursivemake.py, this refactoring will make it easier to verify that's what we've done.
python/mozbuild/mozbuild/backend/recursivemake.py
--- a/python/mozbuild/mozbuild/backend/recursivemake.py
+++ b/python/mozbuild/mozbuild/backend/recursivemake.py
@@ -605,16 +605,38 @@ class RecursiveMakeBackend(CommonBackend
         with self._write_file(
                 mozpath.join(self.environment.topobjdir, 'root.mk')) as root:
             root_mk.dump(root, removal_guard=False)
 
         with self._write_file(
                 mozpath.join(self.environment.topobjdir, 'root-deps.mk')) as root_deps:
             root_deps_mk.dump(root_deps, removal_guard=False)
 
+    def _group_unified_files(self, files, unified_prefix, unified_suffix,
+                             files_per_unified_file):
+        "Return an iterator of (unified_filename, source_filenames) tuples."
+        # Our last returned list of source filenames may be short, and we
+        # don't want the fill value inserted by izip_longest to be an
+        # issue.  So we do a little dance to filter it out ourselves.
+        dummy_fill_value = ("dummy",)
+        def filter_out_dummy(iterable):
+            return itertools.ifilter(lambda x: x != dummy_fill_value,
+                                     iterable)
+
+        # From the itertools documentation, slightly modified:
+        def grouper(n, iterable):
+            "grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx"
+            args = [iter(iterable)] * n
+            return itertools.izip_longest(fillvalue=dummy_fill_value, *args)
+
+        for i, unified_group in enumerate(grouper(files_per_unified_file,
+                                                  files)):
+            just_the_filenames = list(filter_out_dummy(unified_group))
+            yield '%s%d.%s' % (unified_prefix, i, unified_suffix), just_the_filenames
+
     def _add_unified_build_rules(self, makefile, files, output_directory,
                                  unified_prefix='Unified',
                                  unified_suffix='cpp',
                                  extra_dependencies=[],
                                  unified_files_makefile_variable='unified_files',
                                  include_curdir_build_rules=True,
                                  poison_windows_h=False,
                                  files_per_unified_file=16):
@@ -625,42 +647,25 @@ class RecursiveMakeBackend(CommonBackend
         explanation = "\n" \
             "# We build files in 'unified' mode by including several files\n" \
             "# together into a single source file.  This cuts down on\n" \
             "# compilation times and debug information size.  %d was chosen as\n" \
             "# a reasonable compromise between clobber rebuild time, incremental\n" \
             "# rebuild time, and compiler memory usage." % files_per_unified_file
         makefile.add_statement(explanation)
 
-        def unified_files():
-            "Return an iterator of (unified_filename, source_filenames) tuples."
-            # Our last returned list of source filenames may be short, and we
-            # don't want the fill value inserted by izip_longest to be an
-            # issue.  So we do a little dance to filter it out ourselves.
-            dummy_fill_value = ("dummy",)
-            def filter_out_dummy(iterable):
-                return itertools.ifilter(lambda x: x != dummy_fill_value,
-                                         iterable)
-
-            # From the itertools documentation, slightly modified:
-            def grouper(n, iterable):
-                "grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx"
-                args = [iter(iterable)] * n
-                return itertools.izip_longest(fillvalue=dummy_fill_value, *args)
-
-            for i, unified_group in enumerate(grouper(files_per_unified_file,
-                                                      files)):
-                just_the_filenames = list(filter_out_dummy(unified_group))
-                yield '%s%d.%s' % (unified_prefix, i, unified_suffix), just_the_filenames
-
-        all_sources = ' '.join(source for source, _ in unified_files())
+        unified_source_mapping = list(self._group_unified_files(files,
+                                                                unified_prefix=unified_prefix,
+                                                                unified_suffix=unified_suffix,
+                                                                files_per_unified_file=files_per_unified_file))
+        all_sources = ' '.join(source for source, _ in unified_source_mapping)
         makefile.add_statement('%s := %s' % (unified_files_makefile_variable,
                                                all_sources))
 
-        for unified_file, source_filenames in unified_files():
+        for unified_file, source_filenames in unified_source_mapping:
             if extra_dependencies:
                 rule = makefile.create_rule([unified_file])
                 rule.add_dependencies(extra_dependencies)
 
             # The rule we just defined is only for cases where the cpp files get
             # blown away and we need to regenerate them.  The rule doesn't correctly
             # handle source files being added/removed/renamed.  Therefore, we
             # generate them here also to make sure everything's up-to-date.