Update in-tree pymake to tip. a=NPOTB
authorKyle Huey <khuey@kylehuey.com>
Mon, 18 Oct 2010 09:50:27 -0400
changeset 55996 942830bbb94c02d13dcfbe2ff0d956eb818d1d5a
parent 55995 2b50a7545f0edcebb3a111a911ee5b47fb898c47
child 55997 6714a0c929d43948a936926243dcd085ae705427
push id16366
push userkhuey@kylehuey.com
push dateMon, 18 Oct 2010 13:53:06 +0000
treeherdermozilla-central@942830bbb94c [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersNPOTB
milestone2.0b8pre
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Update in-tree pymake to tip. a=NPOTB
build/pymake/.hg_archival.txt
build/pymake/make.py
build/pymake/pymake/builtins.py
build/pymake/pymake/data.py
build/pymake/pymake/implicit.py
build/pymake/pymake/process.py
build/pymake/tests/native-environment.mk
build/pymake/tests/native-pycommandpath-sep.mk
build/pymake/tests/native-pycommandpath.mk
build/pymake/tests/native-simple.mk
build/pymake/tests/parallel-native.mk
build/pymake/tests/pycmd.py
build/pymake/tests/rm-fail.mk
build/pymake/tests/rm.mk
build/pymake/tests/runtests.py
build/pymake/tests/subdir/pymod.py
--- a/build/pymake/.hg_archival.txt
+++ b/build/pymake/.hg_archival.txt
@@ -1,2 +1,5 @@
 repo: f5ab154deef2ffa97f1b2139589ae4a1962090a4
-node: 7ae0b4af32617677698f9de3ab76bcb154bbf085
+node: 84ced2ab192d4b2ad796110d4c62fabc9ac6968d
+branch: default
+latesttag: null
+latesttagdistance: 257
--- a/build/pymake/make.py
+++ b/build/pymake/make.py
@@ -5,13 +5,15 @@ make.py
 
 A drop-in or mostly drop-in replacement for GNU make.
 """
 
 import sys, os
 import pymake.command, pymake.process
 
 import gc
-gc.disable()
+
+if __name__ == '__main__':
+  gc.disable()
 
-pymake.command.main(sys.argv[1:], os.environ, os.getcwd(), cb=sys.exit)
-pymake.process.ParallelContext.spin()
-assert False, "Not reached"
+  pymake.command.main(sys.argv[1:], os.environ, os.getcwd(), cb=sys.exit)
+  pymake.process.ParallelContext.spin()
+  assert False, "Not reached"
--- a/build/pymake/pymake/builtins.py
+++ b/build/pymake/pymake/builtins.py
@@ -1,10 +1,68 @@
-"""
-Implicit variables; perhaps in the future this will also include some implicit
-rules, at least match-anything cancellation rules.
-"""
+# Basic commands implemented in Python
+import sys, os, shutil, time
+from getopt import getopt, GetoptError
+
+from process import PythonException
+
+__all__ = ["rm", "sleep", "touch"]
 
-variables = {
-    'RM': 'rm -f',
-    '.LIBPATTERNS': 'lib%.so lib%.a',
-    '.PYMAKE': '1',
-    }
+def rm(args):
+  """
+  Emulate most of the behavior of rm(1).
+  Only supports the -r (--recursive) and -f (--force) arguments.
+  """
+  try:
+    opts, args = getopt(args, "rRf", ["force", "recursive"])
+  except GetoptError, e:
+    raise PythonException, ("rm: %s" % e, 1)
+  force = False
+  recursive = False
+  for o, a in opts:
+    if o in ('-f', '--force'):
+      force = True
+    elif o in ('-r', '-R', '--recursive'):
+      recursive = True
+  for f in args:
+    if os.path.isdir(f):
+      if not recursive:
+        raise PythonException, ("rm: cannot remove '%s': Is a directory" % f, 1)
+      else:
+        shutil.rmtree(f, force)
+    elif os.path.exists(f):
+      try:
+        os.unlink(f)
+      except:
+        if not force:
+          raise PythonException, ("rm: failed to remove '%s': %s" % (f, sys.exc_info()[0]), 1)
+    elif not force:
+      raise PythonException, ("rm: cannot remove '%s': No such file or directory" % f, 1)
+
+def sleep(args):
+    """
+    Emulate the behavior of sleep(1).
+    """
+    total = 0
+    values = {'s': 1, 'm': 60, 'h': 3600, 'd': 86400}
+    for a in args:
+        multiplier = 1
+        for k, v in values.iteritems():
+            if a.endswith(k):
+                a = a[:-1]
+                multiplier = v
+                break
+        try:
+            f = float(a)
+            total += f * multiplier
+        except ValueError:
+            raise PythonException, ("sleep: invalid time interval '%s'" % a, 1)
+    time.sleep(total)
+
+def touch(args):
+    """
+    Emulate the behavior of touch(1).
+    """
+    for f in args:
+        if os.path.exists(f):
+            os.utime(f, None)
+        else:
+            open(f, 'w').close()
--- a/build/pymake/pymake/data.py
+++ b/build/pymake/pymake/data.py
@@ -1,14 +1,14 @@
 """
 A representation of makefile data structures.
 """
 
 import logging, re, os, sys
-import parserdata, parser, functions, process, util, builtins
+import parserdata, parser, functions, process, util, implicit
 from cStringIO import StringIO
 
 _log = logging.getLogger('pymake.data')
 
 class DataError(util.MakeError):
     pass
 
 class ResolutionError(DataError):
@@ -1137,27 +1137,28 @@ def splitcommand(command):
 
         i += 1
 
     if i > start:
         yield command[start:i]
 
 def findmodifiers(command):
     """
-    Find any of +-@ prefixed on the command.
-    @returns (command, isHidden, isRecursive, ignoreErrors)
+    Find any of +-@% prefixed on the command.
+    @returns (command, isHidden, isRecursive, ignoreErrors, isNative)
     """
 
     isHidden = False
     isRecursive = False
     ignoreErrors = False
+    isNative = False
 
-    realcommand = command.lstrip(' \t\n@+-')
+    realcommand = command.lstrip(' \t\n@+-%')
     modset = set(command[:-len(realcommand)])
-    return realcommand, '@' in modset, '+' in modset, '-' in modset
+    return realcommand, '@' in modset, '+' in modset, '-' in modset, '%' in modset
 
 class _CommandWrapper(object):
     def __init__(self, cline, ignoreErrors, loc, context, **kwargs):
         self.ignoreErrors = ignoreErrors
         self.loc = loc
         self.cline = cline
         self.kwargs = kwargs
         self.context = context
@@ -1168,34 +1169,69 @@ class _CommandWrapper(object):
             self.usercb(error=True)
         else:
             self.usercb(error=False)
 
     def __call__(self, cb):
         self.usercb = cb
         process.call(self.cline, loc=self.loc, cb=self._cb, context=self.context, **self.kwargs)
 
+class _NativeWrapper(_CommandWrapper):
+    def __init__(self, cline, ignoreErrors, loc, context,
+                 pycommandpath, **kwargs):
+        _CommandWrapper.__init__(self, cline, ignoreErrors, loc, context,
+                                 **kwargs)
+        # get the module and method to call
+        parts, badchar = process.clinetoargv(cline)
+        if parts is None:
+            raise DataError("native command '%s': shell metacharacter '%s' in command line" % (cline, badchar), self.loc)
+        if len(parts) < 2:
+            raise DataError("native command '%s': no method name specified" % cline, self.loc)
+        if pycommandpath:
+            self.pycommandpath = re.split('[%s\s]+' % os.pathsep,
+                                          pycommandpath)
+        else:
+            self.pycommandpath = None
+        self.module = parts[0]
+        self.method = parts[1]
+        self.cline_list = parts[2:]
+
+    def __call__(self, cb):
+        self.usercb = cb
+        process.call_native(self.module, self.method, self.cline_list,
+                            loc=self.loc, cb=self._cb, context=self.context,
+                            pycommandpath=self.pycommandpath, **self.kwargs)
+
 def getcommandsforrule(rule, target, makefile, prerequisites, stem):
     v = Variables(parent=target.variables)
     setautomaticvariables(v, makefile, target, prerequisites)
     if stem is not None:
         setautomatic(v, '*', [stem])
 
     env = makefile.getsubenvironment(v)
 
     for c in rule.commands:
         cstring = c.resolvestr(makefile, v)
         for cline in splitcommand(cstring):
-            cline, isHidden, isRecursive, ignoreErrors = findmodifiers(cline)
+            cline, isHidden, isRecursive, ignoreErrors, isNative = findmodifiers(cline)
             if isHidden or makefile.silent:
                 echo = None
             else:
                 echo = "%s$ %s" % (c.loc, cline)
-            yield _CommandWrapper(cline, ignoreErrors=ignoreErrors, env=env, cwd=makefile.workdir, loc=c.loc, context=makefile.context,
-                                 echo=echo)
+            if not isNative:
+                yield _CommandWrapper(cline, ignoreErrors=ignoreErrors, env=env, cwd=makefile.workdir, loc=c.loc, context=makefile.context,
+                                      echo=echo)
+            else:
+                f, s, e = v.get("PYCOMMANDPATH", True)
+                if e:
+                    e = e.resolvestr(makefile, v, ["PYCOMMANDPATH"])
+                yield _NativeWrapper(cline, ignoreErrors=ignoreErrors,
+                                     env=env, cwd=makefile.workdir,
+                                     loc=c.loc, context=makefile.context,
+                                     echo=echo, pycommandpath=e)
 
 class Rule(object):
     """
     A rule contains a list of prerequisites and a list of commands. It may also
     contain rule-specific variables. This rule may be associated with multiple targets.
     """
 
     def __init__(self, prereqs, doublecolon, loc, weakdeps):
@@ -1365,16 +1401,18 @@ class Makefile(object):
                            Variables.SOURCE_AUTOMATIC, workdir.replace('\\','/'))
 
         # the list of included makefiles, whether or not they existed
         self.included = []
 
         self.variables.set('MAKE_RESTARTS', Variables.FLAVOR_SIMPLE,
                            Variables.SOURCE_AUTOMATIC, restarts > 0 and str(restarts) or '')
 
+        self.variables.set('.PYMAKE', Variables.FLAVOR_SIMPLE,
+                           Variables.SOURCE_MAKEFILE, "1")
         if make is not None:
             self.variables.set('MAKE', Variables.FLAVOR_SIMPLE,
                                Variables.SOURCE_MAKEFILE, make)
 
         if makeoverrides != '':
             self.variables.set('-*-command-variables-*-', Variables.FLAVOR_SIMPLE,
                                Variables.SOURCE_AUTOMATIC, makeoverrides)
             makeflags += ' -- $(MAKEOVERRIDES)'
@@ -1389,17 +1427,17 @@ class Makefile(object):
 
         self.makelevel = makelevel
         self.variables.set('MAKELEVEL', Variables.FLAVOR_SIMPLE,
                            Variables.SOURCE_MAKEFILE, str(makelevel))
 
         self.variables.set('MAKECMDGOALS', Variables.FLAVOR_SIMPLE,
                            Variables.SOURCE_AUTOMATIC, ' '.join(targets))
 
-        for vname, val in builtins.variables.iteritems():
+        for vname, val in implicit.variables.iteritems():
             self.variables.set(vname,
                                Variables.FLAVOR_SIMPLE,
                                Variables.SOURCE_IMPLICIT, val)
 
     def foundtarget(self, t):
         """
         Inform the makefile of a target which is a candidate for being the default target,
         if there isn't already a default target.
new file mode 100644
--- /dev/null
+++ b/build/pymake/pymake/implicit.py
@@ -0,0 +1,13 @@
+"""
+Implicit variables; perhaps in the future this will also include some implicit
+rules, at least match-anything cancellation rules.
+"""
+
+variables = {
+    'RM': '%pymake.builtins rm -f',
+    'SLEEP': '%pymake.builtins sleep',
+    'TOUCH': '%pymake.builtins touch',
+    '.LIBPATTERNS': 'lib%.so lib%.a',
+    '.PYMAKE': '1',
+    }
+
--- a/build/pymake/pymake/process.py
+++ b/build/pymake/pymake/process.py
@@ -1,14 +1,18 @@
 """
 Skipping shell invocations is good, when possible. This wrapper around subprocess does dirty work of
 parsing command lines into argv and making sure that no shell magic is being used.
 """
 
-import subprocess, shlex, re, logging, sys, traceback, os
+#TODO: ship pyprocessing?
+import multiprocessing, multiprocessing.dummy
+import subprocess, shlex, re, logging, sys, traceback, os, imp
+# XXXkhuey Work around http://bugs.python.org/issue1731717
+subprocess._cleanup = lambda: None
 import command, util
 if sys.platform=='win32':
     import win32process
 
 _log = logging.getLogger('pymake.process')
 
 _blacklist = re.compile(r'[$><;*?[{~`|&]|\\\n')
 def clinetoargv(cline):
@@ -74,136 +78,250 @@ def call(cline, env, cwd, loc, cb, conte
 
     if argv[0].find('/') != -1:
         executable = util.normaljoin(cwd, argv[0])
     else:
         executable = None
 
     context.call(argv, executable=executable, shell=False, env=env, cwd=cwd, cb=cb, echo=echo)
 
+def call_native(module, method, argv, env, cwd, loc, cb, context, echo,
+                pycommandpath=None):
+    context.call_native(module, method, argv, env=env, cwd=cwd, cb=cb,
+                        echo=echo, pycommandpath=pycommandpath)
+
 def statustoresult(status):
     """
     Convert the status returned from waitpid into a prettier numeric result.
     """
     sig = status & 0xFF
     if sig:
         return -sig
 
     return status >>8
 
+class Job(object):
+    """
+    A single job to be executed on the process pool.
+    """
+    done = False # set to true when the job completes
+
+    def __init__(self):
+        self.exitcode = -127
+
+    def notify(self, condition, result):
+        condition.acquire()
+        self.done = True
+        self.exitcode = result
+        condition.notify()
+        condition.release()
+
+    def get_callback(self, condition):
+        return lambda result: self.notify(condition, result)
+
+class PopenJob(Job):
+    """
+    A job that executes a command using subprocess.Popen.
+    """
+    def __init__(self, argv, executable, shell, env, cwd):
+        Job.__init__(self)
+        self.argv = argv
+        self.executable = executable
+        self.shell = shell
+        self.env = env
+        self.cwd = cwd
+
+    def run(self):
+        try:
+            p = subprocess.Popen(self.argv, executable=self.executable, shell=self.shell, env=self.env, cwd=self.cwd)
+            return p.wait()
+        except OSError, e:
+            print >>sys.stderr, e
+            return -127
+
+class PythonException(Exception):
+    def __init__(self, message, exitcode):
+        Exception.__init__(self)
+        self.message = message
+        self.exitcode = exitcode
+
+    def __str__(self):
+        return self.message
+
+def load_module_recursive(module, path):
+    """
+    Emulate the behavior of __import__, but allow
+    passing a custom path to search for modules.
+    """
+    bits = module.split('.')
+    for i, bit in enumerate(bits):
+        dotname = '.'.join(bits[:i+1])
+        try:
+          f, path, desc = imp.find_module(bit, path)
+          m = imp.load_module(dotname, f, path, desc)
+          if f is None:
+              path = m.__path__
+        except ImportError:
+            return
+
+class PythonJob(Job):
+    """
+    A job that calls a Python method.
+    """
+    def __init__(self, module, method, argv, env, cwd, pycommandpath=None):
+        self.module = module
+        self.method = method
+        self.argv = argv
+        self.env = env
+        self.cwd = cwd
+        self.pycommandpath = pycommandpath or []
+
+    def run(self):
+        oldenv = os.environ
+        try:
+            os.chdir(self.cwd)
+            os.environ = self.env
+            if self.module not in sys.modules:
+                load_module_recursive(self.module,
+                                      sys.path + self.pycommandpath)
+            if self.module not in sys.modules:
+                print >>sys.stderr, "No module named '%s'" % self.module
+                return -127                
+            m = sys.modules[self.module]
+            if self.method not in m.__dict__:
+                print >>sys.stderr, "No method named '%s' in module %s" % (method, module)
+                return -127
+            m.__dict__[self.method](self.argv)
+        except PythonException, e:
+            print >>sys.stderr, e
+            return e.exitcode
+        except:
+            print >>sys.stderr, sys.exc_info()[1]
+            return -127
+        finally:
+            os.environ = oldenv
+        return 0
+
+def job_runner(job):
+    """
+    Run a job. Called in a Process pool.
+    """
+    return job.run()
+
 class ParallelContext(object):
     """
     Manages the parallel execution of processes.
     """
 
     _allcontexts = set()
+    _condition = multiprocessing.Condition()
 
     def __init__(self, jcount):
         self.jcount = jcount
         self.exit = False
 
+        self.processpool = multiprocessing.Pool(processes=jcount)
+        self.threadpool = multiprocessing.dummy.Pool(processes=jcount)
         self.pending = [] # list of (cb, args, kwargs)
         self.running = [] # list of (subprocess, cb)
 
         self._allcontexts.add(self)
 
     def finish(self):
         assert len(self.pending) == 0 and len(self.running) == 0, "pending: %i running: %i" % (len(self.pending), len(self.running))
+        self.processpool.close()
+        self.threadpool.close()
+        self.processpool.join()
+        self.threadpool.join()
         self._allcontexts.remove(self)
 
     def run(self):
         while len(self.pending) and len(self.running) < self.jcount:
             cb, args, kwargs = self.pending.pop(0)
             cb(*args, **kwargs)
 
     def defer(self, cb, *args, **kwargs):
         assert self.jcount > 1 or not len(self.pending), "Serial execution error defering %r %r %r: currently pending %r" % (cb, args, kwargs, self.pending)
         self.pending.append((cb, args, kwargs))
 
     def _docall(self, argv, executable, shell, env, cwd, cb, echo):
-            if echo is not None:
-                print echo
-            try:
-                p = subprocess.Popen(argv, executable=executable, shell=shell, env=env, cwd=cwd)
-            except OSError, e:
-                print >>sys.stderr, e
-                cb(-127)
-                return
+        if echo is not None:
+            print echo
+        job = PopenJob(argv, executable=executable, shell=shell, env=env, cwd=cwd)
+        self.threadpool.apply_async(job_runner, args=(job,), callback=job.get_callback(ParallelContext._condition))
+        self.running.append((job, cb))
 
-            self.running.append((p, cb))
+    def _docallnative(self, module, method, argv, env, cwd, cb, echo,
+                      pycommandpath=None):
+        if echo is not None:
+            print echo
+        job = PythonJob(module, method, argv, env, cwd, pycommandpath)
+        self.processpool.apply_async(job_runner, args=(job,), callback=job.get_callback(ParallelContext._condition))
+        self.running.append((job, cb))
 
     def call(self, argv, shell, env, cwd, cb, echo, executable=None):
         """
         Asynchronously call the process
         """
 
         self.defer(self._docall, argv, executable, shell, env, cwd, cb, echo)
 
-    if sys.platform == 'win32':
-        @staticmethod
-        def _waitany():
-            return win32process.WaitForAnyProcess([p for c in ParallelContext._allcontexts for p, cb in c.running])
+    def call_native(self, module, method, argv, env, cwd, cb,
+                    echo, pycommandpath=None):
+        """
+        Asynchronously call the native function
+        """
 
-        @staticmethod
-        def _comparepid(pid, process):
-            return pid == process
+        self.defer(self._docallnative, module, method, argv, env, cwd, cb,
+                   echo, pycommandpath)
 
-    else:
-        @staticmethod
-        def _waitany():
-            return os.waitpid(-1, 0)
+    @staticmethod
+    def _waitany(condition):
+        def _checkdone():
+            jobs = []
+            for c in ParallelContext._allcontexts:
+                for i in xrange(0, len(c.running)):
+                    if c.running[i][0].done:
+                        jobs.append(c.running[i])
+                for j in jobs:
+                    if j in c.running:
+                        c.running.remove(j)
+            return jobs
 
-        @staticmethod
-        def _comparepid(pid, process):
-            return pid == process.pid
+        # We must acquire the lock, and then check to see if any jobs have
+        # finished.  If we don't check after acquiring the lock it's possible
+        # that all outstanding jobs will have completed before we wait and we'll
+        # wait for notifications that have already occurred.
+        condition.acquire()
+        jobs = _checkdone()
 
+        if jobs == []:
+            condition.wait()
+            jobs = _checkdone()
+
+        condition.release()
+
+        return jobs
+        
     @staticmethod
     def spin():
         """
         Spin the 'event loop', and never return.
         """
 
         while True:
             clist = list(ParallelContext._allcontexts)
             for c in clist:
                 c.run()
 
-            # In python 2.4, subprocess instances wait on child processes under the hood when they are created... this
-            # unfortunate behavior means that before using os.waitpid, we need to check the status using .poll()
-            # see http://bytes.com/groups/python/675403-os-wait-losing-child
-            found = False
-            for c in clist:
-                for i in xrange(0, len(c.running)):
-                    p, cb = c.running[i]
-                    result = p.poll()
-                    if result != None:
-                        del c.running[i]
-                        cb(result)
-                        found = True
-                        break
-
-                if found: break
-            if found: continue
-
             dowait = util.any((len(c.running) for c in ParallelContext._allcontexts))
-
             if dowait:
-                pid, status = ParallelContext._waitany()
-                result = statustoresult(status)
-
-                for c in ParallelContext._allcontexts:
-                    for i in xrange(0, len(c.running)):
-                        p, cb = c.running[i]
-                        if ParallelContext._comparepid(pid, p):
-                            del c.running[i]
-                            cb(result)
-                            found = True
-                            break
-
-                    if found: break
+                # Wait on local jobs first for perf
+                for job, cb in ParallelContext._waitany(ParallelContext._condition):
+                    cb(job.exitcode)
             else:
                 assert any(len(c.pending) for c in ParallelContext._allcontexts)
 
 def makedeferrable(usercb, **userkwargs):
     def cb(*args, **kwargs):
         kwargs.update(userkwargs)
         return usercb(*args, **kwargs)
 
new file mode 100644
--- /dev/null
+++ b/build/pymake/tests/native-environment.mk
@@ -0,0 +1,10 @@
+#T gmake skip
+export EXPECTED := some data
+
+CMD = %pycmd writeenvtofile
+PYCOMMANDPATH = $(TESTPATH)
+
+all:
+	$(CMD) results EXPECTED
+	test "$$(cat results)" = "$(EXPECTED)"
+	@echo TEST-PASS
new file mode 100644
--- /dev/null
+++ b/build/pymake/tests/native-pycommandpath-sep.mk
@@ -0,0 +1,21 @@
+#T gmake skip
+EXPECTED := some data
+
+# verify that we can load native command modules from
+# multiple directories in PYCOMMANDPATH separated by the native
+# path separator
+ifdef __WIN32__
+PS:=;
+else
+PS:=:
+endif
+CMD = %pycmd writetofile
+CMD2 = %pymod writetofile
+PYCOMMANDPATH = $(TESTPATH)$(PS)$(TESTPATH)/subdir
+
+all:
+	$(CMD) results $(EXPECTED)
+	test "$$(cat results)" = "$(EXPECTED)"
+	$(CMD2) results2 $(EXPECTED)
+	test "$$(cat results2)" = "$(EXPECTED)"
+	@echo TEST-PASS
new file mode 100644
--- /dev/null
+++ b/build/pymake/tests/native-pycommandpath.mk
@@ -0,0 +1,15 @@
+#T gmake skip
+EXPECTED := some data
+
+# verify that we can load native command modules from
+# multiple space-separated directories in PYCOMMANDPATH
+CMD = %pycmd writetofile
+CMD2 = %pymod writetofile
+PYCOMMANDPATH = $(TESTPATH) $(TESTPATH)/subdir
+
+all:
+	$(CMD) results $(EXPECTED)
+	test "$$(cat results)" = "$(EXPECTED)"
+	$(CMD2) results2 $(EXPECTED)
+	test "$$(cat results2)" = "$(EXPECTED)"
+	@echo TEST-PASS
new file mode 100644
--- /dev/null
+++ b/build/pymake/tests/native-simple.mk
@@ -0,0 +1,10 @@
+ifndef TOUCH
+TOUCH = touch
+endif
+
+all: testfile
+	test -f testfile
+	@echo TEST-PASS
+
+testfile:
+	$(TOUCH) $@
new file mode 100644
--- /dev/null
+++ b/build/pymake/tests/parallel-native.mk
@@ -0,0 +1,21 @@
+#T commandline: ['-j2']
+
+# ensure that calling python commands doesn't block other targets
+ifndef SLEEP
+SLEEP := sleep
+endif
+
+PRINTF = printf "$@:0:" >>results
+EXPECTED = target2:0:target1:0:
+
+all:: target1 target2
+	cat results
+	test "$$(cat results)" = "$(EXPECTED)"
+	@echo TEST-PASS
+
+target1:
+	$(SLEEP) 0.1
+	$(PRINTF)
+
+target2:
+	$(PRINTF)
new file mode 100644
--- /dev/null
+++ b/build/pymake/tests/pycmd.py
@@ -0,0 +1,9 @@
+import os
+
+def writetofile(args):
+  with open(args[0], 'w') as f:
+    f.write(' '.join(args[1:]))
+
+def writeenvtofile(args):
+  with open(args[0], 'w') as f:
+    f.write(os.environ[args[1]])
new file mode 100644
--- /dev/null
+++ b/build/pymake/tests/rm-fail.mk
@@ -0,0 +1,7 @@
+#T returncode: 2
+all:
+	mkdir newdir
+	test -d newdir
+	touch newdir/newfile
+	$(RM) newdir
+	@echo TEST-PASS
new file mode 100644
--- /dev/null
+++ b/build/pymake/tests/rm.mk
@@ -0,0 +1,13 @@
+all:
+# $(RM) defaults to -f
+	$(RM) nosuchfile
+	touch newfile
+	test -f newfile
+	$(RM) newfile
+	test ! -f newfile
+	mkdir newdir
+	test -d newdir
+	touch newdir/newfile
+	$(RM) -r newdir
+	test ! -d newdir
+	@echo TEST-PASS
--- a/build/pymake/tests/runtests.py
+++ b/build/pymake/tests/runtests.py
@@ -1,8 +1,9 @@
+#!/usr/bin/env python
 """
 Run the test(s) listed on the command line. If a directory is listed, the script will recursively
 walk the directory for files named .mk and run each.
 
 For each test, we run gmake -f test.mk. By default, make must exit with an exit code of 0, and must print 'TEST-PASS'.
 
 Each test is run in an empty directory.
 
new file mode 100644
--- /dev/null
+++ b/build/pymake/tests/subdir/pymod.py
@@ -0,0 +1,3 @@
+def writetofile(args):
+  with open(args[0], 'w') as f:
+    f.write(' '.join(args[1:]))