mach
author Gregory Szorc <gps@mozilla.com>
Tue, 24 May 2016 11:35:44 -0700
changeset 298842 32886f06e0fb2a5c2b58d49500b3baee59e970d4
parent 244638 fec6ff864e3c636b9c6c72b9f3b0026338a66d8e
child 301378 9e6cbeb45a5c61364128b127f6511cc0052a1041
permissions -rwxr-xr-x
Bug 1274655 - Resolve changeset when only repo is defined; r=mshal Previously, we required both or none of MOZ_SOURCE_REPO and MOZ_SOURCE_CHANGESET to be defined. This logic was established in 51029f4d82d3 (bug 1247162). There appears to be no good reason why we require MOZ_SOURCE_CHANGESET if MOZ_SOURCE_REPO is defined. After all, if we have a checkout we should be able to resolve the revision. This commit changes the logic to resolve the changeset when not defined. We still error if MOZ_SOURCE_REPO is defined but we can't resolve the changeset. I can't imagine this breaking anything. This change will be necessary to appease TaskCluster tasks once mozharness is changed in a subsequent commit to define MOZ_SOURCE_REPO. Buildbot and TC each have their own way of specifying the source revision. Rather than change mozharness, it feels easier to just have the build system derive things. This decision is further justified by the fact there is a chicken and egg problem in mozharness: the environment variable dict is resolved before source directory population. So, we'd need to teach mozharness about TC's VCS mechanism, which it currently has no knowledge of. I'd rather not do that. MozReview-Commit-ID: ANaoGbPGWj2

#!/bin/sh
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.

# The beginning of this script is both valid shell and valid python,
# such that the script starts with the shell and is reexecuted with
# the right python.
'''which' python2.7 > /dev/null && exec python2.7 "$0" "$@" || exec python "$0" "$@"
'''

from __future__ import print_function, unicode_literals

import os
import sys

def ancestors(path):
    while path:
        yield path
        (path, child) = os.path.split(path)
        if child == "":
            break

def load_mach(dir_path, mach_path):
    import imp
    with open(mach_path, 'r') as fh:
        imp.load_module('mach_bootstrap', fh, mach_path,
                        ('.py', 'r', imp.PY_SOURCE))
    import mach_bootstrap
    return mach_bootstrap.bootstrap(dir_path)


def check_and_get_mach(dir_path):
    bootstrap_paths = (
        'build/mach_bootstrap.py',
        # test package bootstrap
        'tools/mach_bootstrap.py',
    )
    for bootstrap_path in bootstrap_paths:
        mach_path = os.path.join(dir_path, bootstrap_path)
        if os.path.isfile(mach_path):
            return load_mach(dir_path, mach_path)
    return None


def get_mach():
    # Check whether the current directory is within a mach src or obj dir.
    for dir_path in ancestors(os.getcwd()):
        # If we find a "mozinfo.json" file, we are in the objdir.
        mozinfo_path = os.path.join(dir_path, 'mozinfo.json')
        if os.path.isfile(mozinfo_path):
            import json
            info = json.load(open(mozinfo_path))
            if 'mozconfig' in info and 'MOZCONFIG' not in os.environ:
                # If the MOZCONFIG environment variable is not already set, set it
                # to the value from mozinfo.json.  This will tell the build system
                # to look for a config file at the path in $MOZCONFIG rather than
                # its default locations.
                #
                # Note: subprocess requires native strings in os.environ on Windows
                os.environ[b'MOZCONFIG'] = str(info['mozconfig'])

            if 'topsrcdir' in info:
                # Continue searching for mach_bootstrap in the source directory.
                dir_path = info['topsrcdir']

        mach = check_and_get_mach(dir_path)
        if mach:
            return mach

    # If we didn't find a source path by scanning for a mozinfo.json, check
    # whether the directory containing this script is a source directory.
    return check_and_get_mach(os.path.dirname(__file__))

def main(args):
    mach = get_mach()
    if not mach:
        print('Could not run mach: No mach source directory found.')
        sys.exit(1)
    sys.exit(mach.run(args))


if __name__ == '__main__':
    if sys.platform == 'win32':
        # This is a complete hack to work around the fact that Windows
        # multiprocessing needs to import the original module (ie: this
        # file), but only works if it has a .py extension.
        #
        # We do this by a sort of two-level function interposing. The first
        # level interposes forking.get_command_line() with our version defined
        # in my_get_command_line(). Our version of get_command_line will
        # replace the command string with the contents of the fork_interpose()
        # function to be used in the subprocess.
        #
        # The subprocess then gets an interposed imp.find_module(), which we
        # hack up to find 'mach' without the .py extension, since we already
        # know where it is (it's us!). If we're not looking for 'mach', then
        # the original find_module will suffice.
        #
        # See also: http://bugs.python.org/issue19946
        # And: https://bugzilla.mozilla.org/show_bug.cgi?id=914563
        import inspect
        from multiprocessing import forking
        global orig_command_line

        def fork_interpose():
            import imp
            import os
            import sys
            orig_find_module = imp.find_module
            def my_find_module(name, dirs):
                if name == 'mach':
                    path = os.path.join(dirs[0], 'mach')
                    f = open(path)
                    return (f, path, ('', 'r', imp.PY_SOURCE))
                return orig_find_module(name, dirs)

            # Don't allow writing bytecode file for mach module.
            orig_load_module = imp.load_module
            def my_load_module(name, file, path, description):
                # multiprocess.forking invokes imp.load_module manually and
                # hard-codes the name __parents_main__ as the module name.
                if name == '__parents_main__':
                    old_bytecode = sys.dont_write_bytecode
                    sys.dont_write_bytecode = True
                    try:
                        return orig_load_module(name, file, path, description)
                    finally:
                        sys.dont_write_bytecode = old_bytecode

                return orig_load_module(name, file, path, description)

            imp.find_module = my_find_module
            imp.load_module = my_load_module
            from multiprocessing.forking import main; main()

        def my_get_command_line():
            fork_code, lineno = inspect.getsourcelines(fork_interpose)
            # Remove the first line (for 'def fork_interpose():') and the three
            # levels of indentation (12 spaces).
            fork_string = ''.join(x[12:] for x in fork_code[1:])
            cmdline = orig_command_line()
            cmdline[2] = fork_string
            return cmdline
        orig_command_line = forking.get_command_line
        forking.get_command_line = my_get_command_line

    main(sys.argv[1:])