Merging from default production
authorJustin Wood <Callek@gmail.com>
Tue, 01 Jul 2014 17:46:23 -0700
branchproduction
changeset 2906 c100cf364cf7904765a3fbdacb7e1cbc1a5d7d07
parent 2904 d47a93589041b656c718a9bc6a1cec6ca2f82110 (current diff)
parent 2905 e89b33af090000d32e98a7693fa25b3d892a296d (diff)
child 2909 0b97c42fea853a1d8f829e28c45fb1298faa2934
push id2179
push userCallek@gmail.com
push dateWed, 02 Jul 2014 00:46:24 +0000
bugs986112, 1028816, 874510, 1025731, 898554, 1025322
Merging from default changeset: 2889:e095aa215e3c parent: 2887:dd042bcf9f47 user: Armen Zambrano Gasparnian <armenzg@mozilla.com> date: Tue Jun 24 17:27:02 2014 -0400 summary: Bug 986112 - Make blobber uploads discoverable. r=aki changeset: 2890:c49fa160c924 user: Armen Zambrano Gasparnian <armenzg@mozilla.com> date: Tue Jun 24 17:37:07 2014 -0400 summary: Backout e095aa215e3c. r=backout changeset: 2891:c4e0534d7b2e user: Jonathan Griffin <jgriffin@mozilla.com> date: Wed Jun 25 16:36:49 2014 -0700 summary: Bug 1028816 - Pass env variables on the command-line, r=jhford changeset: 2892:126900c316ea user: Armen Zambrano Gasparnian <armenzg@mozilla.com> date: Thu Jun 26 09:13:41 2014 -0400 summary: Bug 986112 - Request blobuploader to upload a summary of all uploaded files. r=aki changeset: 2893:e8c037e4feca user: Jonathan Griffin <jgriffin@mozilla.com> date: Thu Jun 26 10:50:12 2014 -0700 summary: Bug 1028816 - Remove quotes changeset: 2895:90e3a18d9ac7 parent: 2893:e8c037e4feca user: Jonathan Griffin <jgriffin@mozilla.com> date: Mon Jun 30 12:12:45 2014 -0700 summary: Bug 874510 - Use make update-common instead of make node_modules, r=julienw changeset: 2896:b2576e91fd9e user: Jonathan Griffin <jgriffin@mozilla.com> date: Mon Jun 30 12:48:50 2014 -0700 summary: Backed out changeset 90e3a18d9ac7 changeset: 2897:8135cd2fdad9 user: Jonathan Griffin <jgriffin@mozilla.com> date: Mon Jun 30 13:31:46 2014 -0700 summary: Bug 1025731 - Use new xre.zip on linux64_gecko Gi changeset: 2898:383ef298caf0 user: Steve Fink <sfink@mozilla.com> date: Wed May 28 11:05:58 2014 -0700 summary: Bug 898554 - Add a new b2g hazard build script, r=catlee changeset: 2900:35d2981a6365 parent: 2898:383ef298caf0 user: Jonathan Griffin <jgriffin@mozilla.com> date: Mon Jun 30 15:41:26 2014 -0700 summary: Bug 874510 - Run 'make node_modules' then 'make update-common', r=julienw changeset: 2901:930858624d92 user: Steve Fink <sfink@mozilla.com> date: Mon Jun 30 16:46:02 2014 -0700 summary: Bug 898554 - Spot fix some path breakage due to part of a later patch (fallback manifests) sneaking in changeset: 2902:404dfba5c39d user: Jonathan Griffin <jgriffin@mozilla.com> date: Mon Jun 30 16:48:49 2014 -0700 summary: Bug 1025731 - Use new osx xre.zip for OSX tests changeset: 2905:e89b33af0900 tag: tip parent: 2902:404dfba5c39d user: John Ford <jhford@mozilla.com> date: Tue Jul 01 11:16:20 2014 -0700 summary: Bug 1025322 - gaia-try: do a merge locally instead of using the pull request reference r=aki
--- a/configs/b2g/gaia_integration_config.py
+++ b/configs/b2g/gaia_integration_config.py
@@ -1,17 +1,17 @@
 # This is a template config file for b2g emulator unittest testing
 import platform
 
 HG_SHARE_BASE_DIR = "/builds/hg-shared"
 
 if platform.system().lower() == 'darwin':
-    xre_url = "http://runtime-binaries.pvt.build.mozilla.org/tooltool/sha512/cb675b8a50a4df7c510d0ba09ddec99950aaa63373f69e69ee86b89755fd04944b140ce02ffdc9faa80e34f53752896a38c91fbab0febc81c583cb80e8515e9e"
+    xre_url = "http://runtime-binaries.pvt.build.mozilla.org/tooltool/sha512/441be719e6984d24e9eadca5d13a1cd7d22e81505b21a82d25a7da079a48211b5feb4525a6f32100a00748f8a824a341065d66a97be8e932c3a3e1e55ade0ede"
 else:
-    xre_url = "http://runtime-binaries.pvt.build.mozilla.org/tooltool/sha512/d4a0da54e75c27cd2f535e66b586f119ef08b3bde4a9eee03662d296b3434189c542c0a7e7a75954030c04396a9823e22e1f884f5d87c0f4017944cd50ff38de"
+    xre_url = "http://runtime-binaries.pvt.build.mozilla.org/tooltool/sha512/b48e7defed365b5899f4a782304e4c621e94c6759e32fdec66aa3e088688401e4c404b1778cd0c6b947d9faa874f60a68e1c7d8ccaa5f2d25077eafad5d533cc"
 
 config = {
     # mozharness script options
     "xre_url": xre_url,
 
     # mozharness configuration
     "tooltool_servers": ["http://runtime-binaries.pvt.build.mozilla.org/tooltool/"],
 
new file mode 100644
--- /dev/null
+++ b/configs/hazards/build_b2g.py
@@ -0,0 +1,22 @@
+config = {
+  'build_command': "build.b2g",
+  'expect_file': "expect.b2g.json",
+
+  'default_actions': [
+      'checkout-tools',
+      'checkout-sources',
+      'get-blobs',
+      'clobber-shell',
+      'configure-shell',
+      'build-shell',
+      'clobber-analysis',
+      'setup-analysis',
+      'run-analysis',
+      'collect-analysis-output',
+      'upload-analysis',
+      'check-expectations',
+   ],
+
+    'sixgill_manifest': "build/sixgill-b2g.manifest",
+    'b2g_target_compiler_prefix': "target_compiler/gcc/linux-x86/arm/arm-linux-androideabi-4.7/bin/arm-linux-androideabi-",
+}
--- a/configs/hazards/common.py
+++ b/configs/hazards/common.py
@@ -1,23 +1,25 @@
 HG_SHARE_BASE_DIR = "/builds/hg-shared"
 
 PYTHON_DIR = "/tools/python27"
 SRCDIR = "source"
-ANALYSIS_SCRIPTDIR = SRCDIR + "/js/src/devtools/rootAnalysis"
 
 config = {
-    "log_name": "spidermonkey",
+    "log_name": "hazards",
     "shell-objdir": "obj-opt-js",
     "analysis-dir": "analysis",
     "analysis-objdir": "obj-analyzed",
     "srcdir": SRCDIR,
+    "analysis-scriptdir": "js/src/devtools/rootAnalysis",
 
-    "sixgill": SRCDIR + "/sixgill/usr/libexec/sixgill",
-    "sixgill_bin": SRCDIR + "/sixgill/usr/bin",
+    # These paths are relative to the tooltool checkout location
+    "sixgill": "sixgill/usr/libexec/sixgill",
+    "sixgill_bin": "sixgill/usr/bin",
+
     "python": PYTHON_DIR + "/bin/python2.7",
 
     "exes": {
         'hgtool.py': '%(abs_tools_dir)s/buildfarm/utils/hgtool.py',
         'gittool.py': '%(abs_tools_dir)s/buildfarm/utils/gittool.py',
         'tooltool.py': '/tools/tooltool.py',
     },
 
@@ -29,19 +31,20 @@ config = {
         "repo": "https://hg.mozilla.org/build/tools",
         "revision": "default",
         "dest": "tools"
     }],
 
     "upload_remote_baseuri": 'https://ftp-ssl.mozilla.org/',
 
     'tools_dir': "/tools",
-    'compiler_manifest': ANALYSIS_SCRIPTDIR + "/build/gcc.manifest",
+    'compiler_manifest': "build/gcc.manifest",
+    'b2g_compiler_manifest': "build/gcc-b2g.manifest",
     'compiler_setup': "setup.sh.gcc",
-    'sixgill_manifest': ANALYSIS_SCRIPTDIR + "/build/sixgill.manifest",
+    'sixgill_manifest': "build/sixgill.manifest",
     'sixgill_setup': "setup.sh.sixgill",
 
     # Mock.
     "mock_packages": [
         "autoconf213", "mozilla-python27-mercurial", "ccache",
         "zip", "zlib-devel", "glibc-static",
         "openssh-clients", "mpfr", "wget", "rsync",
 
@@ -71,16 +74,16 @@ config = {
         'gstreamer-devel', 'gstreamer-plugins-base-devel',
     ],
     "mock_files": [
         ("/home/cltbld/.ssh", "/home/mock_mozilla/.ssh"),
         ("/tools/tooltool.py", "/tools/tooltool.py"),
     ],
     "env_replacements": {
         "pythondir": PYTHON_DIR,
-        "gccdir": "%(abs_work_dir)s/" + SRCDIR + "/gcc",
-        "sixgilldir": "%(abs_work_dir)s/" + SRCDIR + "/sixgill",
+        "gccdir": "%(abs_work_dir)s/gcc",
+        "sixgilldir": "%(abs_work_dir)s/sixgill",
     },
     "partial_env": {
         "PATH": "%(pythondir)s/bin:%(gccdir)s/bin:%(PATH)s",
         "LD_LIBRARY_PATH": "%(sixgilldir)s/usr/lib64",
     },
 }
--- a/configs/marionette/gaia_ui_test_prod_config.py
+++ b/configs/marionette/gaia_ui_test_prod_config.py
@@ -1,18 +1,18 @@
 # This is a template config file for marionette production.
 import os
 import platform
 
 HG_SHARE_BASE_DIR = "/builds/hg-shared"
 
 if platform.system().lower() == 'darwin':
-    xre_url = "http://runtime-binaries.pvt.build.mozilla.org/tooltool/sha512/cb675b8a50a4df7c510d0ba09ddec99950aaa63373f69e69ee86b89755fd04944b140ce02ffdc9faa80e34f53752896a38c91fbab0febc81c583cb80e8515e9e"
+    xre_url = "http://runtime-binaries.pvt.build.mozilla.org/tooltool/sha512/441be719e6984d24e9eadca5d13a1cd7d22e81505b21a82d25a7da079a48211b5feb4525a6f32100a00748f8a824a341065d66a97be8e932c3a3e1e55ade0ede"
 else:
-    xre_url = "http://runtime-binaries.pvt.build.mozilla.org/tooltool/sha512/d4a0da54e75c27cd2f535e66b586f119ef08b3bde4a9eee03662d296b3434189c542c0a7e7a75954030c04396a9823e22e1f884f5d87c0f4017944cd50ff38de"
+    xre_url = "http://runtime-binaries.pvt.build.mozilla.org/tooltool/sha512/b48e7defed365b5899f4a782304e4c621e94c6759e32fdec66aa3e088688401e4c404b1778cd0c6b947d9faa874f60a68e1c7d8ccaa5f2d25077eafad5d533cc"
 
 config = {
     # marionette options
     "test_type": "b2g",
     "marionette_address": "localhost:2828",
     "gaiatest": True,
     "xre_url": xre_url,
     "application": "b2g",
new file mode 100644
--- /dev/null
+++ b/configs/users/sfink/mock.py
@@ -0,0 +1,3 @@
+config = {
+     "mock_target": "mozilla-centos6-x86_64",
+}
--- a/configs/users/sfink/spidermonkey.py
+++ b/configs/users/sfink/spidermonkey.py
@@ -30,14 +30,14 @@ config = {
     "tooltool_servers": [ "http://localhost/tooltool" ],
 
     "mock_target": "mozilla-centos6-x86_64",
 
     "upload_remote_basepath": "/tmp/upload-base",
     "upload_ssh_server": "localhost",
     "upload_ssh_key": "/home/sfink/.ssh/id_rsa",
     "upload_ssh_user": "sfink",
-    "target": "linux64-br-haz",
+    "upload_label": "linux64-br-haz",
 
     # For testing tryserver uploads (directory structure is different)
     #"branch": "try",
     #"revision": "deadbeef1234",
 }
--- a/mozharness/mozilla/blob_upload.py
+++ b/mozharness/mozilla/blob_upload.py
@@ -15,24 +15,24 @@ blobupload_config_options = [
     ]
 
 
 class BlobUploadMixin(VirtualenvMixin):
     """Provides mechanism to automatically upload files written in
     MOZ_UPLOAD_DIR to the blobber upload server at the end of the
     running script.
 
-    This is dependent on ScriptMixin.
+    This is dependent on ScriptMixin and BuildbotMixin.
     The testing script inheriting this class is to specify as cmdline
     options the <blob-upload-branch> and <blob-upload-server>
 
     """
     def __init__(self, *args, **kwargs):
         requirements = [
-            'blobuploader==1.1.7',
+            'blobuploader==1.2.0',
         ]
         super(BlobUploadMixin, self).__init__(*args, **kwargs)
         for req in requirements:
             self.register_virtualenv_module(req, method='pip')
 
     def upload_blobber_files(self):
         self.debug("Check branch and server cmdline options.")
         if self.config.get('blob_upload_branch') and \
@@ -70,20 +70,30 @@ class BlobUploadMixin(VirtualenvMixin):
                                self.config.get('default_blob_upload_servers'))
 
             servers = []
             for server in blob_servers_list:
                 servers.extend(['-u', server])
             auth = ['-a', auth_file]
             branch = ['-b', blob_branch]
             dir_to_upload = ['-d', blob_dir]
+            # We want blobberc to tell us if a summary file was uploaded through this manifest file
+            manifest_path = os.path.join(dirs['abs_work_dir'], "blobber_manifest.txt")
+            f = open(manifest_path, 'w').close() # Create empty file
+            record_uploaded_files = ['--output-manifest-url', manifest_path]
             self.info("Files from %s are to be uploaded with <%s> branch at "
                       "the following location(s): %s" % (blob_dir, blob_branch,
                       ", ".join(["%s" % s for s in blob_servers_list])))
 
             # call blob client to upload files to server
-            self.run_command(upload + servers + auth + branch + dir_to_upload)
+            self.run_command(upload + servers + auth + branch + dir_to_upload + record_uploaded_files)
+            # if blobberc writes anything into the manifest file then it means that a manifest file has been uploaded
+            if os.path.getsize(manifest_path) > 0:
+                blobber_manifest_url = self.read_from_file(manifest_path)
+                self.set_buildbot_property(prop_name="blobber_manifest_url",
+                        prop_value=blobber_manifest_url, write_to_file=True)
+            self.rmtree(manifest_path)
         else:
             self.warning("Blob upload gear skipped. Missing cmdline options.")
 
     @PostScriptRun
     def _upload_blobber_files(self):
         self.upload_blobber_files()
--- a/mozharness/mozilla/building/buildb2gbase.py
+++ b/mozharness/mozilla/building/buildb2gbase.py
@@ -69,16 +69,21 @@ class B2GBuildBaseScript(BuildbotMixin, 
         [["--variant"], {
             "dest": "variant",
             "help": "b2g build variant. overrides gecko config's value",
         }],
         [["--checkout-revision"], {
             "dest": "checkout_revision",
             "help": "checkout a specific gecko revision.",
         }],
+        [["--disable-mock"], {
+            "dest": "disable_mock",
+            "action": "store_true",
+            "help": "do not run under mock despite what gecko-config says",
+        }],
     ]
 
     def __init__(self,
                  config_options=[],
                  require_config_file=False,
                  config={},
                  all_actions=[],
                  default_actions=[]):
--- a/mozharness/mozilla/gaia.py
+++ b/mozharness/mozilla/gaia.py
@@ -27,87 +27,159 @@ class GaiaMixin(object):
               clone gaia; if False, repo represents a gaia repo to clone.
         """
 
         repo_path = repo.get('repo_path')
         revision = repo.get('revision')
         branch = repo.get('branch')
         gaia_json_path = self.config.get("gaia_json_path", "{repo_path}/raw-file/{revision}/b2g/config/gaia.json")
         git = False
-        pr_num = None
+        pr_git_revision = None
+        pr_remote = None
 
         self.info('dest: %s' % dest)
 
         if use_gaia_json:
             url = gaia_json_path.format(
                 repo_path=repo_path,
                 revision=revision)
             contents = self.retry(self.load_json_from_url, args=(url,))
             if contents.get('git') and contents['git'].get('remote'):
                 git = True
                 remote = contents['git']['remote']
                 branch = contents['git'].get('branch')
                 revision = contents['git'].get('git_revision')
-                pr_num = contents['git'].get('github_pr_number')
+                pr_git_revision = contents['git'].get('pr_git_revision')
+                pr_remote = contents['git'].get('pr_remote')
+                if pr_remote or pr_git_revision:
+                    if not (pr_remote and pr_git_revision):
+                        self.fatal('Pull request mode requres rev *and* remote')
                 if not (branch or revision):
                     self.fatal('Must specify branch or revision for git repo')
             elif contents.get('repo_path') and contents.get('revision'):
                 repo_path = 'https://hg.mozilla.org/%s' % contents['repo_path']
                 revision = contents['revision']
                 branch = None
 
         if git:
             git_cmd = self.query_exe('git')
             needs_clobber = True
 
-            if os.path.exists(dest) and os.path.exists(os.path.join(dest, '.git')):
+            # For pull requests, we only want to clobber when we can't find the
+            # two exact commit ids that we'll be working with.  As long as we
+            # have those two commits, we don't care about the rest of the repo
+            def has_needed_commit(commit, fatal=False):
+                cmd = [git_cmd, 'rev-parse', '--quiet', '--verify', '%s^{commit}' % commit]
+                rc = self.run_command(cmd, cwd=dest, halt_on_failure=False, success_codes=[1,0])
+                if rc != 0:
+                    return False
+                return True
+
+            if not pr_remote and os.path.exists(dest) and os.path.exists(os.path.join(dest, '.git')):
                 cmd = [git_cmd, 'remote', '-v']
                 output = self.get_output_from_command(cmd, cwd=dest)
                 for line in output:
                     if remote in line:
                         needs_clobber = False
 
+
+            # We want to do some cleanup logic differently for pull requests
+            if pr_git_revision and pr_remote:
+                needs_clobber = False
+                if os.path.exists(dest) and os.path.exists(os.path.join(dest, '.git')):
+                    cmd = [git_cmd, 'clean', '--force', '-x', '-d']
+                    self.run_command(cmd, cwd=dest, halt_on_failure=True,
+                                     fatal_exit_code=3)
+                    if not has_needed_commit(revision):
+                        cmd = [git_cmd, 'fetch', 'origin']
+                        self.run_command(cmd, cwd=dest, halt_on_failure=True,
+                                         fatal_exit_code=3)
+                    if not has_needed_commit(revision):
+                        self.warn('Repository does not contain required revisions, clobbering')
+                        needs_clobber = True
+
             if needs_clobber:
                 self.rmtree(dest)
 
-            # git clone
-            cmd = [git_cmd,
-                   'clone',
-                   remote]
-            self.run_command(cmd,
-                             cwd=os.path.dirname(dest),
-                             output_timeout=1760,
-                             halt_on_failure=True,
-                             fatal_exit_code=3)
+            # In pull request mode, we don't want to clone because we're satisfied
+            # that the base directory is good enough because
+            needs_clone = True
+            if pr_git_revision and pr_remote:
+                if os.path.exists(dest) and os.path.exists(os.path.join(dest, '.git')):
+                    needs_clone = False
 
-            if pr_num:
-                local_pr_branch = 'local_pr_%d' % pr_num
+            if needs_clone:
+                # git clone
                 cmd = [git_cmd,
-                       'fetch',
-                       'origin',
-                       '+refs/pull/%d/merge:%s' % (pr_num, local_pr_branch)]
+                       'clone',
+                       remote]
                 self.run_command(cmd,
-                                 cwd=dest,
+                                 cwd=os.path.dirname(dest),
                                  output_timeout=1760,
                                  halt_on_failure=True,
                                  fatal_exit_code=3)
-                # Ideally, we'd just use the merge_sha from the github api
-                # but the PR object that gets sent with the hook call is
-                # either an empty string for new PRs or an outdated value
-                # for synchronise events.  I guess Github doesn't generate
-                # a new merge sha before sending the webhook
-                branch = revision = local_pr_branch
 
             # checkout git branch
             cmd = [git_cmd,
                    'checkout',
                    revision or branch]
             self.run_command(cmd, cwd=dest, halt_on_failure=True,
                              fatal_exit_code=3)
 
+            # handle pull request magic
+            if pr_git_revision and pr_remote:
+                # Optimization opportunity: instead of fetching all remote references,
+                # pull only the single commit.  I don't know how to right now
+
+                # If the 'other' remote exists, get rid of it
+                cmd = [git_cmd, 'remote']
+                output = self.get_output_from_command(cmd, cwd=dest)
+                for line in output.split('\n'):
+                  if 'other' in line:
+                    cmd = [git_cmd, 'remote', 'rm', 'other']
+                    self.run_command(cmd, cwd=dest, halt_on_failure=True,
+                                     fatal_exit_code=3)
+                    break;
+                # Set the correct remote
+                cmd = [git_cmd, 'remote', 'add', 'other', pr_remote]
+                self.run_command(cmd, cwd=dest, halt_on_failure=True,
+                                 fatal_exit_code=3)
+                if not has_needed_commit(pr_git_revision):
+                    cmd = [git_cmd, 'fetch', 'other']
+                    self.run_command(cmd, cwd=dest, halt_on_failure=True,
+                                     fatal_exit_code=3)
+                if not has_needed_commit(pr_git_revision):
+                    self.fatal('Missing the Pull Request target revision')
+
+                # With these environment variables we should have deterministic
+                # merge commit identifiers
+                self.info('If you want to prove that this merge commit is the same')
+                self.info('you get, use this environment while doing the merge')
+                env = {
+                  'GIT_COMMITTER_DATE': "Wed Feb 16 14:00 2037 +0100",
+                  'GIT_AUTHOR_DATE': "Wed Feb 16 14:00 2037 +0100",
+                  'GIT_AUTHOR_NAME': 'automation',
+                  'GIT_AUTHOR_EMAIL': 'auto@mati.on',
+                  'GIT_COMMITTER_NAME': 'automation',
+                  'GIT_COMMITTER_EMAIL': 'auto@mati.on'
+                }
+                cmd = [git_cmd, 'reset', '--hard', 'HEAD']
+                self.run_command(cmd, cwd=dest, halt_on_failure=True,
+                                 fatal_exit_code=3)
+                cmd = [git_cmd, 'clean', '--force', '-x', '-d']
+                self.run_command(cmd, cwd=dest, halt_on_failure=True,
+                                 fatal_exit_code=3)
+                cmd = [git_cmd, 'merge', '--no-ff', pr_git_revision]
+                self.run_command(cmd, cwd=dest, env=env, halt_on_failure=True,
+                                 fatal_exit_code=3)
+                # So that people can verify that their merge commit is identical
+                cmd = [git_cmd, 'rev-parse', 'HEAD']
+                self.run_command(cmd, cwd=dest, halt_on_failure=True,
+                                 fatal_exit_code=3)
+
             # verify
             for cmd in ([git_cmd, 'log', '-1'], [git_cmd, 'branch']):
                 self.run_command(cmd, cwd=dest, halt_on_failure=True,
                                  fatal_exit_code=3)
 
         else:
             # purge the repo if it already exists
             if os.path.exists(dest):
@@ -149,20 +221,23 @@ class GaiaMixin(object):
                     if err:
                         self.fatal("Error while reading %s, aborting" %
                                    build_config_path)
                     else:
                         contents = f.read()
                         config = json.loads(contents)
                         env.update(config.get('env', {}))
 
-        make = self.query_exe('make', return_type="list")
-        self.run_command(make,
+        self.info('Sending environment as make vars because of bug 1028816')
+
+        cmd = self.query_exe('make', return_type="list")
+        for key, value in env.iteritems():
+            cmd.append('%s=%s' % (key, value))
+        self.run_command(cmd,
                          cwd=gaia_dir,
-                         env=env,
                          halt_on_failure=True)
 
     def make_node_modules(self):
         dirs = self.query_abs_dirs()
 
         self.run_command(['npm', 'cache', 'clean'])
 
         # run 'make node_modules' first, so we can separately handle
@@ -181,8 +256,19 @@ class GaiaMixin(object):
             # Dump npm-debug.log, if it exists
             npm_debug = os.path.join(dirs['abs_gaia_dir'], 'npm-debug.log')
             if os.access(npm_debug, os.F_OK):
                 self.info('dumping npm-debug.log')
                 self.run_command(['cat', npm_debug])
             else:
                 self.info('npm-debug.log doesn\'t exist, not dumping')
             self.fatal('Errors during \'npm install\'', exit_code=code)
+
+        cmd = ['make',
+               'update-common']
+        kwargs = {
+            'cwd': dirs['abs_gaia_dir'],
+            'output_timeout': 300
+        }
+        code = self.retry(self.run_command, attempts=3, good_statuses=(0,),
+                          args=[cmd], kwargs=kwargs)
+        if code:
+            self.fatal('Errors during make update-common')
new file mode 100755
--- /dev/null
+++ b/scripts/hazard_build.py
@@ -0,0 +1,513 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+from datetime import datetime
+from functools import wraps
+import json
+import re
+
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.errors import MakefileErrorList
+from mozharness.mozilla.buildbot import TBPL_WARNING
+from mozharness.mozilla.building.buildb2gbase import B2GBuildBaseScript
+
+from b2g_build import B2GMakefileErrorList
+
+SUCCESS, WARNINGS, FAILURE, EXCEPTION, RETRY = xrange(5)
+
+nuisance_env_vars = ['TERMCAP', 'LS_COLORS', 'PWD', '_']
+
+
+def requires(*queries):
+    def make_wrapper(f):
+        @wraps(f)
+        def wrapper(self, *args, **kwargs):
+            for query in queries:
+                val = query(self)
+                assert (val is not None and "None" not in str(val)), "invalid " + query.__name__
+            return f(self, *args, **kwargs)
+        return wrapper
+    return make_wrapper
+
+
+class B2GHazardBuild(B2GBuildBaseScript):
+    all_actions = [
+        'checkout-tools',
+        'clobber',
+        'checkout-sources',
+        'get-blobs',
+        'update-source-manifest',
+        'clobber-shell',
+        'configure-shell',
+        'build-shell',
+        'clobber-analysis',
+        'setup-analysis',
+        'run-analysis',
+        'collect-analysis-output',
+        'upload-analysis',
+        'check-expectations',
+    ]
+
+    default_actions = [
+        'checkout-tools',
+        'checkout-sources',
+        'get-blobs',
+        'clobber-shell',
+        'configure-shell',
+        'build-shell',
+        'clobber-analysis',
+        'setup-analysis',
+        'run-analysis',
+        'collect-analysis-output',
+        'upload-analysis',
+        'check-expectations',
+    ]
+
+    def __init__(self):
+        super(B2GHazardBuild, self).__init__(
+            config_options=[],
+            config={
+                'default_vcs': 'hgtool',
+                'ccache': False,
+                'mozilla_dir': 'build/gecko',
+
+                'upload_ssh_server': None,
+                'upload_remote_basepath': None,
+                'enable_try_uploads': True,
+            },
+            all_actions=B2GHazardBuild.all_actions,
+            default_actions=B2GHazardBuild.default_actions,
+        )
+
+        self.buildtime = None
+
+    def _pre_config_lock(self, rw_config):
+        super(B2GHazardBuild, self)._pre_config_lock(rw_config)
+
+        if self.buildbot_config:
+            bb_props = [('tooltool_url_list', 'tooltool_servers', ['http://runtime-binaries.pvt.build.mozilla.org/tooltool']),
+                        ]
+            buildbot_props = self.buildbot_config.get('properties', {})
+            for bb_prop, cfg_prop, default in bb_props:
+                if not self.config.get(cfg_prop) and buildbot_props.get(bb_prop, default):
+                    self.config[cfg_prop] = buildbot_props.get(bb_prop, default)
+            self.config['is_automation'] = True
+        else:
+            self.config['is_automation'] = False
+
+        dirs = self.query_abs_dirs()
+        replacements = self.config['env_replacements'].copy()
+        for k, v in replacements.items():
+            replacements[k] = v % dirs
+
+        self.env = self.query_env(replace_dict=replacements,
+                                  partial_env=self.config['partial_env'],
+                                  purge_env=nuisance_env_vars)
+
+    def query_abs_dirs(self):
+        if self.abs_dirs:
+            return self.abs_dirs
+        abs_dirs = super(B2GHazardBuild, self).query_abs_dirs()
+
+        abs_work_dir = abs_dirs['abs_work_dir']
+        dirs = {
+            'b2g_src':
+                abs_work_dir,
+            'target_compiler_base':
+                os.path.join(abs_dirs['abs_work_dir'], 'target_compiler'),
+            'shell_objdir':
+                os.path.join(abs_work_dir, self.config['shell-objdir']),
+            'mozharness_scriptdir':
+                os.path.abspath(os.path.dirname(__file__)),
+            'abs_analysis_dir':
+                os.path.join(abs_work_dir, self.config['analysis-dir']),
+            'abs_analyzed_objdir':
+                os.path.join(abs_work_dir, self.config['srcdir'], self.config['analysis-objdir']),
+            'analysis_scriptdir':
+                os.path.join(abs_dirs['gecko_src'], self.config['analysis-scriptdir'])
+        }
+
+        abs_dirs.update(dirs)
+        self.abs_dirs = abs_dirs
+        return self.abs_dirs
+
+    def query_branch(self):
+        if self.buildbot_config and 'properties' in self.buildbot_config:
+            return self.buildbot_config['properties']['branch']
+        elif 'branch' in self.config:
+            # Used for locally testing try vs non-try
+            return self.config['branch']
+        else:
+            return os.path.basename(self.query_repo())
+
+    def query_compiler_manifest(self):
+        dirs = self.query_abs_dirs()
+        return os.path.join(dirs['analysis_scriptdir'], self.config['compiler_manifest'])
+
+    def query_b2g_compiler_manifest(self):
+        dirs = self.query_abs_dirs()
+        return os.path.join(dirs['analysis_scriptdir'], self.config['b2g_compiler_manifest'])
+
+    def query_sixgill_manifest(self):
+        dirs = self.query_abs_dirs()
+        return os.path.join(dirs['analysis_scriptdir'], self.config['sixgill_manifest'])
+
+    def query_buildtime(self):
+        if self.buildtime:
+            return self.buildtime
+        self.buildtime = datetime.now().strftime("%Y%m%d%H%M%S")
+        return self.buildtime
+
+    def query_upload_ssh_server(self):
+        if self.buildbot_config and 'properties' in self.buildbot_config:
+            return self.buildbot_config['properties']['upload_ssh_server']
+        else:
+            return self.config['upload_ssh_server']
+
+    def query_upload_ssh_key(self):
+        if self.buildbot_config and 'properties' in self.buildbot_config:
+            key = self.buildbot_config['properties']['upload_ssh_key']
+        else:
+            key = self.config['upload_ssh_key']
+        if self.mock_enabled and not key.startswith("/"):
+            key = "/home/mock_mozilla/.ssh/" + key
+        return key
+
+    def query_upload_ssh_user(self):
+        if self.buildbot_config and 'properties' in self.buildbot_config:
+            return self.buildbot_config['properties']['upload_ssh_user']
+        else:
+            return self.config['upload_ssh_user']
+
+    def query_product(self):
+        if self.buildbot_config and 'properties' in self.buildbot_config:
+            return self.buildbot_config['properties']['product']
+        else:
+            return self.config['product']
+
+    def query_upload_remote_basepath(self):
+        if self.config.get('upload_remote_basepath'):
+            return self.config['upload_remote_basepath']
+        else:
+            return "/pub/mozilla.org/{product}".format(
+                product=self.query_product(),
+            )
+
+    def query_upload_remote_baseuri(self):
+        baseuri = self.config.get('upload_remote_baseuri')
+        if self.buildbot_config and 'properties' in self.buildbot_config:
+            buildprops = self.buildbot_config['properties']
+            if 'upload_remote_baseuri' in buildprops:
+                baseuri = buildprops['upload_remote_baseuri']
+        return baseuri.strip("/") if baseuri else None
+
+    def query_upload_label(self):
+        if self.buildbot_config and 'properties' in self.buildbot_config:
+            return self.buildbot_config['properties']['platform']
+        else:
+            return self.config.get('upload_label')
+
+    def query_upload_path(self):
+        branch = self.query_branch()
+
+        common = {
+            'basepath': self.query_upload_remote_basepath(),
+            'branch': branch,
+            'target': self.query_upload_label(),
+        }
+
+        if branch == 'try':
+            if not self.config['enable_try_uploads']:
+                return None
+            try:
+                user = self.buildbot_config['sourcestamp']['changes'][0]['who']
+            except (KeyError, TypeError):
+                user = "unknown"
+            return "{basepath}/try-builds/{user}-{rev}/{branch}-{target}".format(
+                user=user,
+                rev=self.query_revision(),
+                **common
+            )
+        else:
+            return "{basepath}/tinderbox-builds/{branch}-{target}/{buildtime}".format(
+                buildtime=self.query_buildtime(),
+                **common
+            )
+
+    def query_do_upload(self):
+        if self.query_branch() == 'try':
+            return self.config.get('enable_try_uploads')
+        return True
+
+    def make_source_dir(self):
+        dirs = self.query_abs_dirs()
+        dest = dirs['b2g_src']
+        if not os.path.exists(dest):
+            self.mkdir_p(dest)
+
+    # Actions {{{2
+    def checkout_sources(self):
+        self.make_source_dir()
+        super(B2GHazardBuild, self).checkout_sources()
+
+    def get_blobs(self):
+        dirs = self.query_abs_dirs()
+        self.tooltool_fetch(self.query_compiler_manifest(), "sh " + self.config['compiler_setup'],
+                            dirs['abs_work_dir'])
+        self.tooltool_fetch(self.query_sixgill_manifest(), "sh " + self.config['sixgill_setup'],
+                            dirs['abs_work_dir'])
+        if not os.path.exists(dirs['target_compiler_base']):
+            self.mkdir_p(dirs['target_compiler_base'])
+        self.tooltool_fetch(self.query_b2g_compiler_manifest(), "sh " + self.config['compiler_setup'],
+                            dirs['target_compiler_base'])
+
+    def clobber_shell(self):
+        dirs = self.query_abs_dirs()
+        self.rmtree(dirs['shell_objdir'])
+
+    def configure_shell(self):
+        dirs = self.query_abs_dirs()
+
+        if not os.path.exists(dirs['shell_objdir']):
+            self.mkdir_p(dirs['shell_objdir'])
+
+        js_src_dir = os.path.join(dirs['gecko_src'], 'js', 'src')
+        rc = self.run_command(['autoconf-2.13'],
+                              cwd=js_src_dir,
+                              env=self.env,
+                              error_list=MakefileErrorList)
+        if rc != 0:
+            self.fatal("autoconf failed, can't continue.", exit_code=FAILURE)
+
+        rc = self.run_command([os.path.join(js_src_dir, 'configure'),
+                               '--enable-optimize',
+                               '--disable-debug',
+                               '--enable-ctypes',
+                               '--with-system-nspr',
+                               '--without-intl-api'],
+                              cwd=dirs['shell_objdir'],
+                              env=self.env,
+                              error_list=MakefileErrorList)
+        if rc != 0:
+            self.fatal("Configure failed, can't continue.", exit_code=FAILURE)
+
+    def build_shell(self):
+        dirs = self.query_abs_dirs()
+
+        rc = self.run_command(['make', '-j', str(self.config.get('concurrency', 4)), '-s'],
+                              cwd=dirs['shell_objdir'],
+                              env=self.env,
+                              error_list=MakefileErrorList)
+        if rc != 0:
+            self.fatal("Build failed, can't continue.", exit_code=FAILURE)
+
+    def clobber_analysis(self):
+        dirs = self.query_abs_dirs()
+        self.rmtree(dirs['abs_analysis_dir'])
+        self.rmtree(dirs['abs_analyzed_objdir'])
+
+    def setup_analysis(self):
+        dirs = self.query_abs_dirs()
+        analysis_dir = dirs['abs_analysis_dir']
+
+        if not os.path.exists(analysis_dir):
+            self.mkdir_p(analysis_dir)
+
+        values = {
+            'js': os.path.join(dirs['shell_objdir'], 'dist', 'bin', 'js'),
+            'analysis_scriptdir': dirs['analysis_scriptdir'],
+            'source_objdir': dirs['abs_analyzed_objdir'],
+            'source': os.path.join(dirs['work_dir'], 'source'),
+            'sixgill': os.path.join(dirs['work_dir'], self.config['sixgill']),
+            'sixgill_bin': os.path.join(dirs['work_dir'], self.config['sixgill_bin']),
+        }
+        defaults = """
+js = '%(js)s'
+analysis_scriptdir = '%(analysis_scriptdir)s'
+objdir = '%(source_objdir)s'
+source = '%(source)s'
+sixgill = '%(sixgill)s'
+sixgill_bin = '%(sixgill_bin)s'
+jobs = 2
+""" % values
+
+        #defaults_path = os.path.join(analysis_dir, 'defaults.py')
+        defaults_path = os.path.join(analysis_dir, 'defaults.py')
+        file(defaults_path, "w").write(defaults)
+        self.log("Wrote analysis config file " + defaults_path)
+
+        build_command = self.config['build_command']
+        self.copyfile(os.path.join(dirs['mozharness_scriptdir'],
+                                   os.path.join('spidermonkey', build_command)),
+                      os.path.join(analysis_dir, build_command),
+                      copystat=True)
+
+    def run_analysis(self):
+        dirs = self.query_abs_dirs()
+        analysis_dir = dirs['abs_analysis_dir']
+        analysis_scriptdir = dirs['analysis_scriptdir']
+
+        gecko_config = self.load_gecko_config()
+        env = self.query_build_env().copy()
+        self.enable_mock()
+
+        build_command = self.config['build_command']
+        build_command = os.path.abspath(os.path.join(analysis_dir, build_command))
+        gonk_misc = os.path.join(dirs['b2g_src'], 'gonk-misc')
+        mozconfig = os.path.join(gonk_misc, 'hazard-analysis-config')
+        mozconfig_text = '. "%s/default-gecko-config"\n' % gonk_misc
+        basecc = os.path.join(dirs['abs_work_dir'], self.config['sixgill'], 'scripts', 'wrap_gcc', 'basecc')
+        mozconfig_text += "ac_add_options --with-compiler-wrapper=" + basecc + "\n"
+        mozconfig_text += "ac_add_options --without-ccache\n"
+        file(mozconfig, "w").write(mozconfig_text)
+
+        # Stuff I set in my .userconfig for manual builds
+        env['B2G_SOURCE'] = dirs['b2g_src']
+        env['MOZCONFIG_PATH'] = mozconfig
+        env['GECKO_PATH'] = dirs['gecko_src']
+        env['TARGET_TOOLS_PREFIX'] = os.path.join(dirs['abs_work_dir'], self.config['b2g_target_compiler_prefix'])
+
+        cmd = [
+            self.config['python'],
+            os.path.join(analysis_scriptdir, 'analyze.py'),
+            "--source", dirs['gecko_src'],
+            "--buildcommand", build_command,
+        ]
+        retval = self.run_command(cmd,
+                                  cwd=analysis_dir,
+                                  env=env,
+                                  error_list=B2GMakefileErrorList)
+        if retval != 0:
+            self.fatal("failed to build", exit_code=2)
+
+        self.disable_mock()
+
+    def collect_analysis_output(self):
+        dirs = self.query_abs_dirs()
+        analysis_dir = dirs['abs_analysis_dir']
+        upload_dir = dirs['abs_upload_dir']
+        self.mkdir_p(upload_dir)
+        files = (('rootingHazards.txt',
+                  'rooting_hazards',
+                  'list of rooting hazards, unsafe references, and extra roots'),
+                 ('gcFunctions.txt',
+                  'gcFunctions',
+                  'list of functions that can gc, and why'),
+                 ('gcTypes.txt',
+                  'gcTypes',
+                  'list of types containing unrooted gc pointers'),
+                 ('unnecessary.txt',
+                  'extra',
+                  'list of extra roots (rooting with no GC function in scope)'),
+                 ('refs.txt',
+                  'refs',
+                  'list of unsafe references to unrooted pointers'),
+                 ('hazards.txt',
+                  'hazards',
+                  'list of just the hazards, together with gcFunction reason for each'))
+        for f, short, long in files:
+            self.copy_to_upload_dir(os.path.join(analysis_dir, f),
+                                    short_desc=short,
+                                    long_desc=long,
+                                    compress=True)
+
+    @requires(query_upload_path,
+              query_upload_ssh_key,
+              query_upload_ssh_user,
+              query_upload_ssh_server,
+              query_upload_remote_baseuri)
+    def upload_analysis(self):
+        if not self.query_do_upload():
+            self.info("Uploads disabled for this build. Skipping...")
+            return
+
+        dirs = self.query_abs_dirs()
+        upload_path = self.query_upload_path()
+
+        retval = self.rsync_upload_directory(
+            dirs['abs_upload_dir'],
+            self.query_upload_ssh_key(),
+            self.query_upload_ssh_user(),
+            self.query_upload_ssh_server(),
+            upload_path)
+
+        if retval is not None:
+            self.error("failed to upload")
+            self.return_code = WARNINGS
+        else:
+            upload_url = "{baseuri}{upload_path}".format(
+                baseuri=self.query_upload_remote_baseuri(),
+                upload_path=upload_path,
+            )
+            self.info("TinderboxPrint: upload <a title='hazards_results' href='%s'>results</a>: complete" % upload_url)
+
+    def check_expectations(self):
+        if 'expect_file' not in self.config:
+            self.info('No expect_file given; skipping comparison with expected hazard count')
+            return
+
+        dirs = self.query_abs_dirs()
+        analysis_dir = dirs['abs_analysis_dir']
+        analysis_scriptdir = os.path.join(dirs['gecko_src'], 'js', 'src', 'devtools', 'rootAnalysis')
+        expect_file = os.path.join(analysis_scriptdir, self.config['expect_file'])
+        expect = self.read_from_file(expect_file)
+        if expect is None:
+            self.fatal("could not load expectation file")
+        data = json.loads(expect)
+
+        num_hazards = 0
+        num_refs = 0
+        with self.opened(os.path.join(analysis_dir, "rootingHazards.txt")) as (hazards_fh, err):
+            if err:
+                self.fatal("hazards file required")
+            for line in hazards_fh:
+                m = re.match(r"^Function.*has unrooted.*live across GC call", line)
+                if m:
+                    num_hazards += 1
+
+                m = re.match(r'^Function.*takes unsafe address of unrooted', line)
+                if m:
+                    num_refs += 1
+
+        expect_hazards = data.get('expect-hazards')
+        status = []
+        if expect_hazards is None:
+            status.append("%d hazards" % num_hazards)
+        else:
+            status.append("%d/%d hazards allowed" % (num_hazards, expect_hazards))
+
+        if expect_hazards is not None and expect_hazards != num_hazards:
+            if expect_hazards < num_hazards:
+                self.warning("TEST-UNEXPECTED-FAIL %d more hazards than expected (expected %d, saw %d)" %
+                             (num_hazards - expect_hazards, expect_hazards, num_hazards))
+                self.buildbot_status(TBPL_WARNING)
+            else:
+                self.info("%d fewer hazards than expected! (expected %d, saw %d)" %
+                          (expect_hazards - num_hazards, expect_hazards, num_hazards))
+
+        expect_refs = data.get('expect-refs')
+        if expect_refs is None:
+            status.append("%d unsafe refs" % num_refs)
+        else:
+            status.append("%d/%d unsafe refs allowed" % (num_refs, expect_refs))
+
+        if expect_refs is not None and expect_refs != num_refs:
+            if expect_refs < num_refs:
+                self.warning("TEST-UNEXPECTED-FAIL %d more unsafe refs than expected (expected %d, saw %d)" %
+                             (num_refs - expect_refs, expect_refs, num_refs))
+                self.buildbot_status(TBPL_WARNING)
+            else:
+                self.info("%d fewer unsafe refs than expected! (expected %d, saw %d)" %
+                          (expect_refs - num_refs, expect_refs, num_refs))
+
+        self.info("TinderboxPrint: " + ", ".join(status))
+
+# main {{{1
+if __name__ == '__main__':
+    myScript = B2GHazardBuild()
+    myScript.run_and_exit()
new file mode 100755
--- /dev/null
+++ b/scripts/spidermonkey/build.b2g
@@ -0,0 +1,4 @@
+#!/bin/bash -e
+
+cd $B2G_SOURCE
+exec ./build.sh MOZCONFIG_PATH=$MOZCONFIG_PATH gecko
--- a/scripts/spidermonkey_build.py
+++ b/scripts/spidermonkey_build.py
@@ -67,23 +67,21 @@ class SpidermonkeyBuild(MockMixin,
     ]
 
     def __init__(self):
         BaseScript.__init__(self,
                             config_options=self.config_options,
                             # other stuff
                             all_actions=[
                                 'purge',
-                                'setup-mock',
-                                'reuse-mock',
                                 'checkout-tools',
 
                                 # First, build an optimized JS shell for running the analysis
                                 'checkout-source',
-                                'checkout-tooltool',
+                                'get-blobs',
                                 'clobber-shell',
                                 'configure-shell',
                                 'build-shell',
 
                                 # Next, build a tree with the analysis plugin
                                 # active. Note that we are using the same
                                 # checkout for the JS shell build and the build
                                 # of the source to be analyzed, which is a
@@ -94,21 +92,19 @@ class SpidermonkeyBuild(MockMixin,
                                 'setup-analysis',
                                 'run-analysis',
                                 'collect-analysis-output',
                                 'upload-analysis',
                                 'check-expectations',
                             ],
                             default_actions=[
                                 'purge',
-                                #'reuse-mock',
-                                'setup-mock',
                                 'checkout-tools',
                                 'checkout-source',
-                                'checkout-tooltool',
+                                'get-blobs',
                                 'clobber-shell',
                                 'configure-shell',
                                 'build-shell',
                                 'clobber-analysis',
                                 'setup-analysis',
                                 'run-analysis',
                                 'collect-analysis-output',
                                 'upload-analysis',
@@ -123,18 +119,16 @@ class SpidermonkeyBuild(MockMixin,
                                 'tools_repo': 'https://hg.mozilla.org/build/tools',
 
                                 'upload_ssh_server': None,
                                 'upload_remote_basepath': None,
                                 'enable_try_uploads': True,
                             },
         )
 
-        self.env = self.nonmock_env
-
         self.buildtime = None
 
     def _pre_config_lock(self, rw_config):
         super(SpidermonkeyBuild, self)._pre_config_lock(rw_config)
 
         if self.buildbot_config is None:
             self.info("Reading buildbot build properties...")
             self.read_buildbot_config()
@@ -159,51 +153,52 @@ class SpidermonkeyBuild(MockMixin,
         else:
             self.config['is_automation'] = False
 
         dirs = self.query_abs_dirs()
         replacements = self.config['env_replacements'].copy()
         for k,v in replacements.items():
             replacements[k] = v % dirs
 
-        self.mock_env = self.query_env(replace_dict=replacements,
-                                       partial_env=self.config['partial_env'],
-                                       purge_env=nuisance_env_vars)
-        self.nonmock_env = self.query_env(replace_dict=replacements,
-                                          partial_env=self.config['partial_env'],
-                                          purge_env=nuisance_env_vars)
+        self.env = self.query_env(replace_dict=replacements,
+                                  partial_env=self.config['partial_env'],
+                                  purge_env=nuisance_env_vars)
 
     def query_abs_dirs(self):
         if self.abs_dirs:
             return self.abs_dirs
         abs_dirs = BaseScript.query_abs_dirs(self)
 
         abs_work_dir = abs_dirs['abs_work_dir']
         dirs = {
             'shell_objdir':
                 os.path.join(abs_work_dir, self.config['shell-objdir']),
             'mozharness_scriptdir':
                 os.path.abspath(os.path.dirname(__file__)),
             'abs_analysis_dir':
                 os.path.join(abs_work_dir, self.config['analysis-dir']),
             'abs_analyzed_objdir':
                 os.path.join(abs_work_dir, self.config['srcdir'], self.config['analysis-objdir']),
+            'analysis_scriptdir':
+                os.path.join(self.config['srcdir'], self.config['analysis-scriptdir']),
             'abs_tools_dir':
                 os.path.join(abs_dirs['base_work_dir'], 'tools'),
         }
 
         abs_dirs.update(dirs)
         self.abs_dirs = abs_dirs
         return self.abs_dirs
 
     def query_repo(self):
-        if self.buildbot_config and 'properties' in self.buildbot_config:
+        if self.config.get('repo'):
+            return self.config['repo']
+        elif self.buildbot_config and 'properties' in self.buildbot_config:
             return self.config['hgurl'] + self.buildbot_config['properties']['repo_path']
         else:
-            return self.config['repo']
+            return None
 
     def query_revision(self):
         if 'revision' in self.buildbot_properties:
             return self.buildbot_properties['revision']
 
         if self.buildbot_config and 'sourcestamp' in self.buildbot_config:
             return self.buildbot_config['sourcestamp']['revision']
 
@@ -216,20 +211,26 @@ class SpidermonkeyBuild(MockMixin,
         elif 'branch' in self.config:
             # Used for locally testing try vs non-try
             return self.config['branch']
         else:
             return os.path.basename(self.query_repo())
 
     def query_compiler_manifest(self):
         dirs = self.query_abs_dirs()
+        manifest = os.path.join(dirs['abs_work_dir'], dirs['analysis_scriptdir'], self.config['compiler_manifest'])
+        if os.path.exists(manifest):
+            return manifest
         return os.path.join(dirs['abs_work_dir'], self.config['compiler_manifest'])
 
     def query_sixgill_manifest(self):
         dirs = self.query_abs_dirs()
+        manifest = os.path.join(dirs['abs_work_dir'], dirs['analysis_scriptdir'], self.config['sixgill_manifest'])
+        if os.path.exists(manifest):
+            return manifest
         return os.path.join(dirs['abs_work_dir'], self.config['sixgill_manifest'])
 
     def query_buildtime(self):
         if self.buildtime:
             return self.buildtime
         self.buildtime = datetime.now().strftime("%Y%m%d%H%M%S")
         return self.buildtime
 
@@ -309,35 +310,17 @@ class SpidermonkeyBuild(MockMixin,
                 **common
             )
 
     def query_do_upload(self):
         if self.query_branch() == 'try':
             return self.config.get('enable_try_uploads')
         return True
 
-    def enable_mock(self):
-        self.env = self.mock_env
-        super(SpidermonkeyBuild, self).enable_mock()
-
-    def disable_mock(self):
-        self.env = self.nonmock_env
-        super(SpidermonkeyBuild, self).disable_mock()
-
     # Actions {{{2
-    def setup_mock(self):
-        MockMixin.setup_mock(self)
-        self.enable_mock()
-
-    def reuse_mock(self):
-        """Reuse a mock environment without waiting for it to
-        reinitialize."""
-        self.enable_mock()
-        self.done_mock_setup = True
-
     def purge(self):
         dirs = self.query_abs_dirs()
         PurgeMixin.clobber(
             self,
             always_clobber_dirs=[
                 dirs['abs_upload_dir'],
             ],
         )
@@ -369,31 +352,31 @@ class SpidermonkeyBuild(MockMixin,
 
     @requires(query_repo)
     def checkout_source(self):
         try:
             self.do_checkout_source()
         except Exception as e:
             self.fatal("checkout failed: " + str(e), exit_code=RETRY)
 
-    def checkout_tooltool(self):
+    def get_blobs(self):
         dirs = self.query_abs_dirs()
-        source_dir = os.path.join(dirs['abs_work_dir'], 'source')
         self.tooltool_fetch(self.query_compiler_manifest(), "sh " + self.config['compiler_setup'],
-                            source_dir)
+                            dirs['abs_work_dir'])
         self.tooltool_fetch(self.query_sixgill_manifest(), "sh " + self.config['sixgill_setup'],
-                            source_dir)
+                            dirs['abs_work_dir'])
 
     def clobber_shell(self):
         dirs = self.query_abs_dirs()
         self.rmtree(dirs['shell_objdir'])
 
     def configure_shell(self):
+        self.enable_mock()
+
         dirs = self.query_abs_dirs()
-
         if not os.path.exists(dirs['shell_objdir']):
             self.mkdir_p(dirs['shell_objdir'])
 
         rc = self.run_command(['autoconf-2.13'],
                               cwd=dirs['abs_work_dir'] + '/source/js/src',
                               env=self.env,
                               error_list=MakefileErrorList)
         if rc != 0:
@@ -406,26 +389,31 @@ class SpidermonkeyBuild(MockMixin,
                                '--with-system-nspr',
                                '--without-intl-api'],
                               cwd=dirs['shell_objdir'],
                               env=self.env,
                               error_list=MakefileErrorList)
         if rc != 0:
             self.fatal("Configure failed, can't continue.", exit_code=FAILURE)
 
+        self.disable_mock()
+
     def build_shell(self):
+        self.enable_mock()
+
         dirs = self.query_abs_dirs()
-
         rc = self.run_command(['make', '-j', str(self.config['concurrency']), '-s'],
                               cwd=dirs['shell_objdir'],
                               env=self.env,
                               error_list=MakefileErrorList)
         if rc != 0:
             self.fatal("Build failed, can't continue.", exit_code=FAILURE)
 
+        self.disable_mock()
+
     def clobber_analysis(self):
         dirs = self.query_abs_dirs()
         self.rmtree(dirs['abs_analysis_dir'])
         self.rmtree(dirs['abs_analyzed_objdir'])
 
     def setup_analysis(self):
         dirs = self.query_abs_dirs()
         analysis_dir = dirs['abs_analysis_dir']
@@ -454,16 +442,18 @@ jobs = 2
 
         build_command = self.config['build_command']
         self.copyfile(os.path.join(dirs['mozharness_scriptdir'],
                                    os.path.join('spidermonkey', build_command)),
                       os.path.join(analysis_dir, build_command),
                       copystat=True)
 
     def run_analysis(self):
+        self.enable_mock()
+
         dirs = self.query_abs_dirs()
         analysis_dir = dirs['abs_analysis_dir']
         analysis_scriptdir = os.path.join(dirs['abs_work_dir'], 'source/js/src/devtools/rootAnalysis')
 
         # The build for the analysis is always a clobber build,
         # because the analysis needs to see every compile to work
         self.rmtree(dirs['abs_analyzed_objdir'])
 
@@ -475,16 +465,18 @@ jobs = 2
                 "--buildcommand=%s" % build_command,
             ],
             cwd=analysis_dir,
             env=self.env,
             error_list=MakefileErrorList)
         if rc != 0:
             self.fatal("analysis failed, can't continue.", exit_code=FAILURE)
 
+        self.disable_mock()
+
     def collect_analysis_output(self):
         dirs = self.query_abs_dirs()
         analysis_dir = dirs['abs_analysis_dir']
         upload_dir = dirs['abs_upload_dir']
         self.mkdir_p(upload_dir)
         files = (('rootingHazards.txt',
                   'rooting_hazards',
                   'list of rooting hazards, unsafe references, and extra roots'),
--- a/test/test_mozilla_blob_upload.py
+++ b/test/test_mozilla_blob_upload.py
@@ -84,14 +84,18 @@ class TestBlobUploadMechanism(unittest.T
         file_name = os.path.join(parent_dir, 'test_mock_blob_file')
         self.s.write_to_file(file_name, content)
         self.s.upload_blobber_files()
 
         expected_result = ['/path/to/python', '/path/to/blobberc', '-u',
                            'http://blob_server.me', '-a',
                            os.path.abspath(__file__), '-b', 'test-branch', '-d']
         expected_result.append(self.s.query_abs_dirs()['abs_blob_upload_dir'])
+        expected_result += [
+                '--output-manifest-url',
+                os.path.join(self.s.query_abs_dirs()['abs_work_dir'], "blobber_manifest.txt")
+        ]
         self.assertEqual(expected_result, self.s.command)
 
 
 # main {{{1
 if __name__ == '__main__':
     unittest.main()