Bug 1338651: Change docker image home dir to /build. r=dustin,mshal a=hopefullyavoidingmergebustagelater
☠☠ backed out by 587daa4bdc4b ☠ ☠
authorWander Lairson Costa <wcosta@mozilla.com>
Thu, 29 Jun 2017 15:45:01 -0700
changeset 406014 f8a62747c51c862b6e2f5c0af543ef21d34abf3b
parent 406006 57b57379c60c6b938881f80cd58a62479358465a
child 406015 7cc250ff4f6eea2e264906f78dbd9f5be67f3dbe
push id58
push userfmarier@mozilla.com
push dateThu, 06 Jul 2017 00:08:44 +0000
reviewersdustin, mshal, hopefullyavoidingmergebustagelater
bugs1338651
milestone56.0a1
Bug 1338651: Change docker image home dir to /build. r=dustin,mshal a=hopefullyavoidingmergebustagelater Using /home/worker is the build directory has a 30% talos performance loss, because test machines has a /home mount directory. MozReview-Commit-ID: zehcGJrUQX
CLOBBER
build/build-clang/clang-static-analysis-linux64.json
build/build-clang/clang-static-analysis-macosx64.json
build/build-clang/clang-tidy-linux64.json
build/build-clang/clang-tidy-macosx64.json
taskcluster/actions/registry.py
taskcluster/ci/android-stuff/kind.yml
taskcluster/ci/build/android.yml
taskcluster/ci/hazard/kind.yml
taskcluster/ci/nightly-fennec/docker_build.yml
taskcluster/ci/source-test/doc.yml
taskcluster/ci/source-test/mozlint.yml
taskcluster/ci/source-test/python-tests.yml
taskcluster/docker/android-gradle-build/Dockerfile
taskcluster/docker/android-gradle-build/VERSION
taskcluster/docker/android-gradle-build/bin/after.sh
taskcluster/docker/android-gradle-build/bin/build.sh
taskcluster/docker/android-gradle-build/bin/checkout-sources.sh
taskcluster/docker/centos6-build-upd/Dockerfile
taskcluster/docker/centos6-build-upd/VERSION
taskcluster/docker/centos6-build/Dockerfile
taskcluster/docker/centos6-build/VERSION
taskcluster/docker/centos6-build/system-setup.sh
taskcluster/docker/desktop-build/Dockerfile
taskcluster/docker/desktop-build/bin/build.sh
taskcluster/docker/desktop-build/bin/checkout-sources.sh
taskcluster/docker/desktop-test/Dockerfile
taskcluster/docker/desktop1604-test/Dockerfile
taskcluster/docker/lint/Dockerfile
taskcluster/scripts/builder/build-l10n.sh
taskcluster/scripts/builder/build-linux.sh
taskcluster/scripts/builder/repackage.sh
taskcluster/scripts/misc/build-clang-windows-helper32.sh
taskcluster/scripts/misc/build-clang-windows-helper64.sh
taskcluster/scripts/misc/tooltool-download.sh
taskcluster/taskgraph/action.yml
taskcluster/taskgraph/transforms/android_stuff.py
taskcluster/taskgraph/transforms/job/common.py
taskcluster/taskgraph/transforms/job/hazard.py
taskcluster/taskgraph/transforms/job/mozharness.py
taskcluster/taskgraph/transforms/job/mozharness_test.py
taskcluster/taskgraph/transforms/job/run_task.py
taskcluster/taskgraph/transforms/job/spidermonkey.py
taskcluster/taskgraph/transforms/job/toolchain.py
taskcluster/taskgraph/transforms/marionette_harness.py
taskcluster/taskgraph/transforms/repackage.py
testing/mozharness/configs/android/androidarm_4_3.py
testing/mozharness/configs/android/androidx86.py
testing/mozharness/configs/builds/build_pool_specifics.py
testing/mozharness/configs/builds/releng_base_android_64_builds.py
testing/mozharness/configs/builds/releng_base_linux_32_builds.py
testing/mozharness/configs/builds/releng_base_linux_64_builds.py
testing/mozharness/configs/builds/releng_base_mac_64_cross_builds.py
testing/mozharness/configs/builds/releng_sub_linux_configs/64_add-on-devel.py
testing/mozharness/configs/builds/releng_sub_linux_configs/64_artifact.py
testing/mozharness/configs/builds/releng_sub_linux_configs/64_stat_and_debug.py
testing/mozharness/configs/builds/releng_sub_linux_configs/64_stat_and_opt.py
testing/mozharness/configs/firefox_ui_tests/taskcluster.py
testing/mozharness/configs/marionette/prod_config.py
testing/mozharness/configs/single_locale/tc_android-api-15.py
testing/mozharness/configs/single_locale/tc_linux32.py
testing/mozharness/configs/single_locale/tc_linux64.py
testing/mozharness/configs/single_locale/tc_macosx64.py
testing/mozharness/configs/unittests/linux_unittest.py
testing/mozharness/mozharness/mozilla/testing/codecoverage.py
--- a/CLOBBER
+++ b/CLOBBER
@@ -17,9 +17,9 @@
 #
 # Modifying this file will now automatically clobber the buildbot machines \o/
 #
 
 # Are you updating CLOBBER because you think it's needed for your WebIDL
 # changes to stick? As of bug 928195, this shouldn't be necessary! Please
 # don't change CLOBBER for WebIDL changes any more.
 
-Bug 1353650 - Update to ICU 59 requires clobber
+Bug 1338651 - Update OSX Taskcluster docker image
--- a/build/build-clang/clang-static-analysis-linux64.json
+++ b/build/build-clang/clang-static-analysis-linux64.json
@@ -5,18 +5,18 @@
     "build_type": "Release",
     "assertions": false,
     "llvm_repo": "https://llvm.org/svn/llvm-project/llvm/tags/RELEASE_390/final",
     "clang_repo": "https://llvm.org/svn/llvm-project/cfe/tags/RELEASE_390/final",
     "compiler_repo": "https://llvm.org/svn/llvm-project/compiler-rt/tags/RELEASE_390/final",
     "libcxx_repo": "https://llvm.org/svn/llvm-project/libcxx/tags/RELEASE_390/final",
     "libcxxabi_repo": "https://llvm.org/svn/llvm-project/libcxxabi/tags/RELEASE_390/final",
     "python_path": "/usr/bin/python2.7",
-    "gcc_dir": "/home/worker/workspace/build/src/gcc",
-    "cc": "/home/worker/workspace/build/src/gcc/bin/gcc",
-    "cxx": "/home/worker/workspace/build/src/gcc/bin/g++",
-    "as": "/home/worker/workspace/build/src/gcc/bin/gcc",
+    "gcc_dir": "/builds/worker/workspace/build/src/gcc",
+    "cc": "/builds/worker/workspace/build/src/gcc/bin/gcc",
+    "cxx": "/builds/worker/workspace/build/src/gcc/bin/g++",
+    "as": "/builds/worker/workspace/build/src/gcc/bin/gcc",
     "patches": [
       "llvm-debug-frame.patch",
       "r277806.patch",
       "r285657.patch"
     ]
 }
--- a/build/build-clang/clang-static-analysis-macosx64.json
+++ b/build/build-clang/clang-static-analysis-macosx64.json
@@ -6,24 +6,24 @@
     "assertions": false,
     "osx_cross_compile": true,
     "llvm_repo": "https://llvm.org/svn/llvm-project/llvm/tags/RELEASE_390/final",
     "clang_repo": "https://llvm.org/svn/llvm-project/cfe/tags/RELEASE_390/final",
     "compiler_repo": "https://llvm.org/svn/llvm-project/compiler-rt/tags/RELEASE_390/final",
     "libcxx_repo": "https://llvm.org/svn/llvm-project/libcxx/tags/RELEASE_390/final",
     "libcxxabi_repo": "https://llvm.org/svn/llvm-project/libcxxabi/tags/RELEASE_390/final",
     "python_path": "/usr/bin/python2.7",
-    "gcc_dir": "/home/worker/workspace/build/src/gcc",
-    "cc": "/home/worker/workspace/build/src/clang/bin/clang",
-    "cxx": "/home/worker/workspace/build/src/clang/bin/clang++",
-    "as": "/home/worker/workspace/build/src/clang/bin/clang",
-    "ar": "/home/worker/workspace/build/src/cctools/bin/x86_64-apple-darwin11-ar",
-    "ranlib": "/home/worker/workspace/build/src/cctools/bin/x86_64-apple-darwin11-ranlib",
-    "libtool": "/home/worker/workspace/build/src/cctools/bin/x86_64-apple-darwin11-libtool",
-    "ld": "/home/worker/workspace/build/src/clang/bin/clang",
+    "gcc_dir": "/builds/worker/workspace/build/src/gcc",
+    "cc": "/builds/worker/workspace/build/src/clang/bin/clang",
+    "cxx": "/builds/worker/workspace/build/src/clang/bin/clang++",
+    "as": "/builds/worker/workspace/build/src/clang/bin/clang",
+    "ar": "/builds/worker/workspace/build/src/cctools/bin/x86_64-apple-darwin11-ar",
+    "ranlib": "/builds/worker/workspace/build/src/cctools/bin/x86_64-apple-darwin11-ranlib",
+    "libtool": "/builds/worker/workspace/build/src/cctools/bin/x86_64-apple-darwin11-libtool",
+    "ld": "/builds/worker/workspace/build/src/clang/bin/clang",
     "patches":[
       "llvm-debug-frame.patch",
       "compiler-rt-cross-compile.patch",
       "pr28831-r280042.patch",
       "r277806.patch",
       "r285657.patch"
     ]
 }
--- a/build/build-clang/clang-tidy-linux64.json
+++ b/build/build-clang/clang-tidy-linux64.json
@@ -7,13 +7,13 @@
     "build_clang_tidy": true,
     "llvm_repo": "https://llvm.org/svn/llvm-project/llvm/trunk",
     "clang_repo": "https://llvm.org/svn/llvm-project/cfe/trunk",
     "extra_repo": "https://llvm.org/svn/llvm-project/clang-tools-extra/trunk",
     "compiler_repo": "https://llvm.org/svn/llvm-project/compiler-rt/trunk",
     "libcxx_repo": "https://llvm.org/svn/llvm-project/libcxx/trunk",
     "libcxxabi_repo": "https://llvm.org/svn/llvm-project/libcxxabi/trunk",
     "python_path": "/usr/bin/python2.7",
-    "gcc_dir": "/home/worker/workspace/build/src/gcc",
-    "cc": "/home/worker/workspace/build/src/gcc/bin/gcc",
-    "cxx": "/home/worker/workspace/build/src/gcc/bin/g++",
-    "as": "/home/worker/workspace/build/src/gcc/bin/gcc"
+    "gcc_dir": "/builds/worker/workspace/build/src/gcc",
+    "cc": "/builds/worker/workspace/build/src/gcc/bin/gcc",
+    "cxx": "/builds/worker/workspace/build/src/gcc/bin/g++",
+    "as": "/builds/worker/workspace/build/src/gcc/bin/gcc"
 }
--- a/build/build-clang/clang-tidy-macosx64.json
+++ b/build/build-clang/clang-tidy-macosx64.json
@@ -8,20 +8,20 @@
     "osx_cross_compile": true,
     "llvm_repo": "https://llvm.org/svn/llvm-project/llvm/trunk",
     "clang_repo": "https://llvm.org/svn/llvm-project/cfe/trunk",
     "extra_repo": "https://llvm.org/svn/llvm-project/clang-tools-extra/trunk",
     "compiler_repo": "https://llvm.org/svn/llvm-project/compiler-rt/trunk",
     "libcxx_repo": "https://llvm.org/svn/llvm-project/libcxx/trunk",
     "libcxxabi_repo": "https://llvm.org/svn/llvm-project/libcxxabi/trunk",
     "python_path": "/usr/bin/python2.7",
-    "gcc_dir": "/home/worker/workspace/build/src/gcc",
-    "cc": "/home/worker/workspace/build/src/clang/bin/clang",
-    "cxx": "/home/worker/workspace/build/src/clang/bin/clang++",
-    "as": "/home/worker/workspace/build/src/clang/bin/clang",
-    "ar": "/home/worker/workspace/build/src/cctools/bin/x86_64-apple-darwin11-ar",
-    "ranlib": "/home/worker/workspace/build/src/cctools/bin/x86_64-apple-darwin11-ranlib",
-    "ld": "/home/worker/workspace/build/src/clang/bin/clang",
+    "gcc_dir": "/builds/worker/workspace/build/src/gcc",
+    "cc": "/builds/worker/workspace/build/src/clang/bin/clang",
+    "cxx": "/builds/worker/workspace/build/src/clang/bin/clang++",
+    "as": "/builds/worker/workspace/build/src/clang/bin/clang",
+    "ar": "/builds/worker/workspace/build/src/cctools/bin/x86_64-apple-darwin11-ar",
+    "ranlib": "/builds/worker/workspace/build/src/cctools/bin/x86_64-apple-darwin11-ranlib",
+    "ld": "/builds/worker/workspace/build/src/clang/bin/clang",
     "patches": [
       "llvm-debug-frame.patch",
       "compiler-rt-cross-compile.patch"
     ]
 }
--- a/taskcluster/actions/registry.py
+++ b/taskcluster/actions/registry.py
@@ -195,40 +195,41 @@ def register_callback_action(title, symb
                         parameters['project'], parameters['head_rev'], parameters['pushlog_id']),
                 ],
                 'payload': {
                     'env': {
                         'GECKO_BASE_REPOSITORY': 'https://hg.mozilla.org/mozilla-unified',
                         'GECKO_HEAD_REPOSITORY': parameters['head_repository'],
                         'GECKO_HEAD_REF': parameters['head_ref'],
                         'GECKO_HEAD_REV': parameters['head_rev'],
-                        'HG_STORE_PATH': '/home/worker/checkouts/hg-store',
+                        'HG_STORE_PATH': '/builds/worker/checkouts/hg-store',
                         'ACTION_TASK_GROUP_ID': {'$eval': 'taskGroupId'},
                         'ACTION_TASK_ID': {'$dumps': {'$eval': 'taskId'}},
                         'ACTION_TASK': {'$dumps': {'$eval': 'task'}},
                         'ACTION_INPUT': {'$dumps': {'$eval': 'input'}},
                         'ACTION_CALLBACK': cb.__name__,
                         'ACTION_PARAMETERS': {'$dumps': {'$eval': 'parameters'}},
                     },
                     'cache': {
                         'level-{}-checkouts'.format(parameters['level']):
-                            '/home/worker/checkouts',
+                            '/builds/worker/checkouts',
                     },
                     'features': {
                         'taskclusterProxy': True,
                         'chainOfTrust': True,
                     },
                     'image': docker_image('decision'),
                     'maxRunTime': 1800,
                     'command': [
-                        '/home/worker/bin/run-task', '--vcs-checkout=/home/worker/checkouts/gecko',
+                        '/builds/worker/bin/run-task',
+                        '--vcs-checkout=/builds/worker/checkouts/gecko',
                         '--', 'bash', '-cx',
                         """\
-cd /home/worker/checkouts/gecko &&
-ln -s /home/worker/artifacts artifacts &&
+cd /builds/worker/checkouts/gecko &&
+ln -s /builds/worker/artifacts artifacts &&
 ./mach --log-no-times taskgraph action-callback""",
                     ],
                 },
                 'extra': {
                       'treeherder': {
                         'groupName': 'action-callback',
                         'groupSymbol': 'AC',
                         'symbol': symbol,
--- a/taskcluster/ci/android-stuff/kind.yml
+++ b/taskcluster/ci/android-stuff/kind.yml
@@ -25,40 +25,40 @@ jobs:
             tier: 2
             symbol: tc(Deps)
         worker-type: aws-provisioner-v1/gecko-{level}-b-android
         worker:
             implementation: docker-worker
             os: linux
             docker-image: {in-tree: android-gradle-build}
             env:
-                GRADLE_USER_HOME: "/home/worker/workspace/build/src/dotgradle-online"
+                GRADLE_USER_HOME: "/builds/worker/workspace/build/src/dotgradle-online"
                 MH_BUILD_POOL: "taskcluster"
                 MH_CUSTOM_BUILD_VARIANT_CFG: "api-15-gradle-dependencies"
                 MOZHARNESS_ACTIONS: "get-secrets build multi-l10n update"
                 MOZHARNESS_CONFIG: >
                     builds/releng_base_android_64_builds.py
                     disable_signing.py
                     platform_supports_post_upload_to_latest.py
                 MOZHARNESS_SCRIPT: "mozharness/scripts/fx_desktop_build.py"
-                TOOLTOOL_CACHE: "/home/worker/tooltool-cache"
+                TOOLTOOL_CACHE: "/builds/worker/tooltool-cache"
                 TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android-gradle-dependencies/releng.manifest"
             artifacts:
               - name: public/build
-                path: /home/worker/artifacts/
+                path: /builds/worker/artifacts/
                 type: directory
             caches:
               - name: tooltool-cache
-                mount-point: /home/worker/tooltool-cache
+                mount-point: /builds/worker/tooltool-cache
                 type: persistent
             relengapi-proxy: true
             command:
               - "/bin/bash"
               - "-c"
-              - "/home/worker/bin/before.sh && /home/worker/bin/build.sh && /home/worker/bin/after.sh && true\n"
+              - "/builds/worker/bin/before.sh && /builds/worker/bin/build.sh && /builds/worker/bin/after.sh && true\n"
             max-run-time: 36000
         scopes:
           - docker-worker:relengapi-proxy:tooltool.download.internal
           - docker-worker:relengapi-proxy:tooltool.download.public
         optimizations:
           - - skip-unless-changed
             - - "mobile/android/config/**"
               - "testing/mozharness/configs/builds/releng_sub_android_configs/*gradle_dependencies.py"
@@ -72,36 +72,36 @@ jobs:
             tier: 2
             symbol: tc(test)
         worker-type: aws-provisioner-v1/gecko-{level}-b-android
         worker:
             implementation: docker-worker
             os: linux
             docker-image: {in-tree: desktop-build}
             env:
-                GRADLE_USER_HOME: "/home/worker/workspace/build/src/dotgradle"
+                GRADLE_USER_HOME: "/builds/worker/workspace/build/src/dotgradle"
                 MH_BUILD_POOL: "taskcluster"
                 MH_CUSTOM_BUILD_VARIANT_CFG: "android-test"
                 MOZHARNESS_ACTIONS: "get-secrets build multi-l10n update"
                 MOZHARNESS_CONFIG: >
                     builds/releng_base_android_64_builds.py
                     disable_signing.py
                     platform_supports_post_upload_to_latest.py
                 MOZHARNESS_SCRIPT: "mozharness/scripts/fx_desktop_build.py"
                 TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android-frontend/releng.manifest"
             artifacts:
               - name: public/android/unittest
-                path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/reports/tests
+                path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/reports/tests
                 type: directory
               - name: public/build
-                path: /home/worker/artifacts/
+                path: /builds/worker/artifacts/
                 type: directory
             caches:
               - name: tooltool-cache
-                mount-point: /home/worker/tooltool-cache
+                mount-point: /builds/worker/tooltool-cache
                 type: persistent
             relengapi-proxy: true
             command:
               # NOTE: this could probably be a job description with run.using = 'mozharness'
               - "/bin/bash"
               - "bin/build.sh"
             max-run-time: 36000
         scopes:
@@ -120,51 +120,51 @@ jobs:
             tier: 2
             symbol: tc(lint)
         worker-type: aws-provisioner-v1/gecko-{level}-b-android
         worker:
             implementation: docker-worker
             os: linux
             docker-image: {in-tree: desktop-build}
             env:
-                GRADLE_USER_HOME: "/home/worker/workspace/build/src/dotgradle"
+                GRADLE_USER_HOME: "/builds/worker/workspace/build/src/dotgradle"
                 MH_BUILD_POOL: "taskcluster"
                 MH_CUSTOM_BUILD_VARIANT_CFG: "android-lint"
                 MOZHARNESS_ACTIONS: "get-secrets build multi-l10n update"
                 MOZHARNESS_CONFIG: >
                     builds/releng_base_android_64_builds.py
                     disable_signing.py
                     platform_supports_post_upload_to_latest.py
                 MOZHARNESS_SCRIPT: "mozharness/scripts/fx_desktop_build.py"
                 TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android-frontend/releng.manifest"
             artifacts:
               - name: public/android/lint/lint-results-officialAustralisDebug.html
-                path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialAustralisDebug.html
+                path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialAustralisDebug.html
                 type: file
               - name: public/android/lint/lint-results-officialAustralisDebug.xml
-                path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialAustralisDebug.xml
+                path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialAustralisDebug.xml
                 type: file
               - name: public/android/lint/lint-results-officialAustralisDebug_files
-                path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialAustralisDebug_files
+                path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialAustralisDebug_files
                 type: directory
               - name: public/android/lint/lint-results-officialPhotonDebug.html
-                path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialPhotonDebug.html
+                path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialPhotonDebug.html
                 type: file
               - name: public/android/lint/lint-results-officialPhotonDebug.xml
-                path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialPhotonDebug.xml
+                path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialPhotonDebug.xml
                 type: file
               - name: public/android/lint/lint-results-officialPhotonDebug_files
-                path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialPhotonDebug_files
+                path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialPhotonDebug_files
                 type: directory
               - name: public/build
-                path: /home/worker/artifacts/
+                path: /builds/worker/artifacts/
                 type: directory
             caches:
               - name: tooltool-cache
-                mount-point: /home/worker/tooltool-cache
+                mount-point: /builds/worker/tooltool-cache
                 type: persistent
             relengapi-proxy: true
             command:
               # NOTE: this could probably be a job description with run.using = 'mozharness'
               - "/bin/bash"
               - "bin/build.sh"
             max-run-time: 36000
         scopes:
@@ -190,36 +190,36 @@ jobs:
             tier: 2
             symbol: tc(checkstyle)
         worker-type: aws-provisioner-v1/gecko-{level}-b-android
         worker:
             implementation: docker-worker
             os: linux
             docker-image: {in-tree: desktop-build}
             env:
-                GRADLE_USER_HOME: "/home/worker/workspace/build/src/dotgradle"
+                GRADLE_USER_HOME: "/builds/worker/workspace/build/src/dotgradle"
                 MH_BUILD_POOL: "taskcluster"
                 MH_CUSTOM_BUILD_VARIANT_CFG: "android-checkstyle"
                 MOZHARNESS_ACTIONS: "get-secrets build multi-l10n update"
                 MOZHARNESS_CONFIG: >
                     builds/releng_base_android_64_builds.py
                     disable_signing.py
                     platform_supports_post_upload_to_latest.py
                 MOZHARNESS_SCRIPT: "mozharness/scripts/fx_desktop_build.py"
                 TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android-frontend/releng.manifest"
             artifacts:
               - name: public/android/checkstyle/checkstyle.xml
-                path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/reports/checkstyle/checkstyle.xml
+                path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/reports/checkstyle/checkstyle.xml
                 type: file
               - name: public/build
-                path: /home/worker/artifacts/
+                path: /builds/worker/artifacts/
                 type: directory
             caches:
               - name: tooltool-cache
-                mount-point: /home/worker/tooltool-cache
+                mount-point: /builds/worker/tooltool-cache
                 type: persistent
             relengapi-proxy: true
             command:
               # NOTE: this could probably be a job description with run.using = 'mozharness'
               - "/bin/bash"
               - "bin/build.sh"
             max-run-time: 36000
         scopes:
@@ -241,39 +241,39 @@ jobs:
             tier: 2
             symbol: tc(findbugs)
         worker-type: aws-provisioner-v1/gecko-{level}-b-android
         worker:
             implementation: docker-worker
             os: linux
             docker-image: {in-tree: desktop-build}
             env:
-                GRADLE_USER_HOME: "/home/worker/workspace/build/src/dotgradle"
+                GRADLE_USER_HOME: "/builds/worker/workspace/build/src/dotgradle"
                 MH_BUILD_POOL: "taskcluster"
                 MH_CUSTOM_BUILD_VARIANT_CFG: "android-findbugs"
                 MOZHARNESS_ACTIONS: "get-secrets build multi-l10n update"
                 MOZHARNESS_CONFIG: >
                     builds/releng_base_android_64_builds.py
                     disable_signing.py
                     platform_supports_post_upload_to_latest.py
                 MOZHARNESS_SCRIPT: "mozharness/scripts/fx_desktop_build.py"
                 TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android-frontend/releng.manifest"
             artifacts:
               - name: public/android/findbugs/findbugs-officialAustralisDebug-output.html
-                path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/findbugs/findbugs-officialAustralisDebug-output.html
+                path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/findbugs/findbugs-officialAustralisDebug-output.html
                 type: file
               - name: public/android/findbugs/findbugs-officialPhotonDebug-output.html
-                path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/findbugs/findbugs-officialPhotonDebug-output.html
+                path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/findbugs/findbugs-officialPhotonDebug-output.html
                 type: file
               - name: public/build
-                path: /home/worker/artifacts/
+                path: /builds/worker/artifacts/
                 type: directory
             caches:
               - name: tooltool-cache
-                mount-point: /home/worker/tooltool-cache
+                mount-point: /builds/worker/tooltool-cache
                 type: persistent
             relengapi-proxy: true
             command:
               # NOTE: this could probably be a job description with run.using = 'mozharness'
               - "/bin/bash"
               - "bin/build.sh"
             max-run-time: 36000
         scopes:
--- a/taskcluster/ci/build/android.yml
+++ b/taskcluster/ci/build/android.yml
@@ -252,27 +252,27 @@ android-api-15-gradle/opt:
         platform: android-api-15-gradle/opt
         symbol: tc(Bg)
         tier: 2
     worker-type: aws-provisioner-v1/gecko-{level}-b-android
     worker:
         max-run-time: 7200
         env:
             # Bug 1292762 - Set GRADLE_USER_HOME to avoid sdk-manager-plugin intermittent
-            GRADLE_USER_HOME: /home/worker/workspace/build/src/dotgradle
+            GRADLE_USER_HOME: /builds/worker/workspace/build/src/dotgradle
             TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android/releng.manifest"
         artifacts:
           - name: public/android/maven
-            path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/geckoview/maven/
+            path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/geckoview/maven/
             type: directory
           - name: public/build/geckoview_example.apk
-            path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/geckoview_example/outputs/apk/geckoview_example-withGeckoBinaries.apk
+            path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/geckoview_example/outputs/apk/geckoview_example-withGeckoBinaries.apk
             type: file
           - name: public/build
-            path: /home/worker/artifacts/
+            path: /builds/worker/artifacts/
             type: directory
     run:
         using: mozharness
         actions: [get-secrets build multi-l10n update]
         config:
             - builds/releng_base_android_64_builds.py
             - disable_signing.py
             - platform_supports_post_upload_to_latest.py
--- a/taskcluster/ci/hazard/kind.yml
+++ b/taskcluster/ci/hazard/kind.yml
@@ -28,17 +28,17 @@ jobs:
             platform: linux64/debug
             symbol: SM-tc(H)
         worker:
             env:
                 TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/hazard.manifest"
         run:
             using: hazard
             command: >
-                cd /home/worker/checkouts/gecko/taskcluster/scripts/builder
+                cd /builds/worker/checkouts/gecko/taskcluster/scripts/builder
                 && ./build-haz-linux.sh --project shell $HOME/workspace
         when:
             files-changed:
                 - js/public/**
                 - js/src/**
 
     linux64-haz/debug:
         description: "Browser Hazard Analysis Linux"
@@ -50,10 +50,10 @@ jobs:
             symbol: tc(H)
         worker:
             env:
                 TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/hazard.manifest"
         run:
             using: hazard
             mozconfig: "browser/config/mozconfigs/linux64/hazards"
             command: >
-                cd /home/worker/checkouts/gecko/taskcluster/scripts/builder
+                cd /builds/worker/checkouts/gecko/taskcluster/scripts/builder
                 && ./build-haz-linux.sh --project browser $HOME/workspace
--- a/taskcluster/ci/nightly-fennec/docker_build.yml
+++ b/taskcluster/ci/nightly-fennec/docker_build.yml
@@ -7,17 +7,17 @@ task:
 
   scopes:
     # docker build tasks use tc-vcs so include the scope.
     - 'docker-worker:cache:level-{{level}}-{{project}}-tc-vcs'
 
   payload:
 
     cache:
-      level-{{level}}-{{project}}-tc-vcs: '/home/worker/.tc-vcs'
+      level-{{level}}-{{project}}-tc-vcs: '/builds/worker/.tc-vcs'
 
     # All docker builds share a common artifact directory for ease of uploading.
     artifacts:
       'public/build':
         type: directory
-        path: '/home/worker/artifacts/'
+        path: '/builds/worker/artifacts/'
         expires:
           relative-datestamp: '1 year'
--- a/taskcluster/ci/source-test/doc.yml
+++ b/taskcluster/ci/source-test/doc.yml
@@ -7,21 +7,21 @@ sphinx:
         tier: 1
     worker-type: aws-provisioner-v1/gecko-t-linux-xlarge
     worker:
         docker-image: {in-tree: "lint"}
         max-run-time: 1800
         artifacts:
             - type: file
               name: public/docs.tar.gz
-              path: /home/worker/checkouts/gecko/docs.tar.gz
+              path: /builds/worker/checkouts/gecko/docs.tar.gz
     run:
         using: run-task
         command: >
-            cd /home/worker/checkouts/gecko &&
+            cd /builds/worker/checkouts/gecko &&
             ./mach doc --outdir docs-out --no-open &&
             rm -rf docs-out/html/Mozilla_Source_Tree_Docs/_venv &&
             mv docs-out/html/Mozilla_Source_Tree_Docs docs &&
             tar -czf docs.tar.gz docs
     when:
         files-changed:
             - '**/*.py'
             - '**/*.rst'
--- a/taskcluster/ci/source-test/mozlint.yml
+++ b/taskcluster/ci/source-test/mozlint.yml
@@ -7,17 +7,17 @@ mozlint-eslint:
         tier: 1
     worker-type: aws-provisioner-v1/gecko-t-linux-xlarge
     worker:
         docker-image: {in-tree: "lint"}
         max-run-time: 1800
     run:
         using: run-task
         command: >
-            cd /home/worker/checkouts/gecko/ &&
+            cd /builds/worker/checkouts/gecko/ &&
             /build/tooltool.py fetch -m tools/lint/eslint/manifest.tt &&
             tar xvfz eslint.tar.gz &&
             rm eslint.tar.gz &&
             ln -s ../tools/lint/eslint/eslint-plugin-mozilla node_modules &&
             ln -s ../tools/lint/eslint/eslint-plugin-spidermonkey-js node_modules &&
             ./mach lint -l eslint -f treeherder --quiet
     when:
         files-changed:
--- a/taskcluster/ci/source-test/python-tests.yml
+++ b/taskcluster/ci/source-test/python-tests.yml
@@ -58,19 +58,19 @@ mochitest-harness:
     worker:
         by-platform:
             linux64.*:
                 docker-image: {in-tree: "desktop1604-test"}
                 max-run-time: 3600
     run:
         using: run-task
         command: >
-            source /home/worker/scripts/xvfb.sh &&
+            source /builds/worker/scripts/xvfb.sh &&
             start_xvfb '1600x1200x24' 0 &&
-            cd /home/worker/checkouts/gecko &&
+            cd /builds/worker/checkouts/gecko &&
             ./mach python-test --subsuite mochitest
     when:
         files-changed:
             - 'config/mozunit.py'
             - 'python/mach_commands.py'
             - 'testing/mochitest/**'
             - 'testing/mozharness/mozharness/base/log.py'
             - 'testing/mozharness/mozharness/mozilla/structuredlog.py'
@@ -112,17 +112,17 @@ mozharness:
     worker-type: aws-provisioner-v1/gecko-t-linux-xlarge
     worker:
         docker-image: {in-tree: "lint"}
         max-run-time: 1800
     run:
         using: run-task
         cache-dotcache: true
         command: >
-            cd /home/worker/checkouts/gecko/testing/mozharness &&
+            cd /builds/worker/checkouts/gecko/testing/mozharness &&
             /usr/local/bin/tox -e py27-hg4.1
     when:
         files-changed:
             - 'testing/mozharness/**'
 
 mozlint:
     description: python/mozlint unit tests
     platform: linux64/opt
--- a/taskcluster/docker/android-gradle-build/Dockerfile
+++ b/taskcluster/docker/android-gradle-build/Dockerfile
@@ -1,43 +1,43 @@
 # TODO remove VOLUME below when the base image is updated next.
-FROM          taskcluster/centos6-build-upd:0.1.6.20160329195300
+FROM          taskcluster/centos6-build-upd:0.1.7.201706291036
 MAINTAINER    Nick Alexander <nalexander@mozilla.com>
 
 # BEGIN ../desktop-build/Dockerfile
 
 # TODO remove when base image is updated
-VOLUME /home/worker/workspace
-VOLUME /home/worker/tooltool-cache
+VOLUME /builds/worker/workspace
+VOLUME /builds/worker/tooltool-cache
 
 # Add build scripts; these are the entry points from the taskcluster worker, and
 # operate on environment variables
-ADD             bin /home/worker/bin
-RUN             chmod +x /home/worker/bin/*
+ADD             bin /builds/worker/bin
+RUN             chmod +x /builds/worker/bin/*
 
 # Add wrapper scripts for xvfb allowing tasks to easily retry starting up xvfb
 # %include taskcluster/docker/recipes/xvfb.sh
-ADD topsrcdir/taskcluster/docker/recipes/xvfb.sh /home/worker/scripts/xvfb.sh
+ADD topsrcdir/taskcluster/docker/recipes/xvfb.sh /builds/worker/scripts/xvfb.sh
 
 # Add configuration
-COPY            dot-config                    /home/worker/.config
+COPY            dot-config                    /builds/worker/.config
 
 # Generate machine uuid file
 RUN dbus-uuidgen --ensure=/var/lib/dbus/machine-id
 
 # Stubbed out credentials; mozharness looks for this file an issues a WARNING
 # if it's not found, which causes the build to fail.  Note that this needs to
 # be in the parent of the workspace directory and in the directory where
 # mozharness is run (not its --work-dir).  See Bug 1169652.
-ADD           oauth.txt /home/worker/
+ADD           oauth.txt /builds/worker/
 
 # stubbed out buildprops, which keeps mozharness from choking
 # Note that this needs to be in the parent of the workspace directory and in
 # the directory where mozharness is run (not its --work-dir)
-ADD           buildprops.json /home/worker/
+ADD           buildprops.json /builds/worker/
 
 # install tooltool directly from github where tooltool_wrapper.sh et al. expect
 # to find it
 RUN wget -O /builds/tooltool.py https://raw.githubusercontent.com/mozilla/build-tooltool/master/tooltool.py
 RUN chmod +x /builds/tooltool.py
 
 # END ../desktop-build/Dockerfile
 
@@ -81,17 +81,16 @@ RUN echo "${NEXUS_SHA1SUM}  nexus-${NEXU
 RUN sha1sum --check nexus-${NEXUS_VERSION}-bundle.tar.gz.sha1
 
 RUN tar zxf nexus-${NEXUS_VERSION}-bundle.tar.gz \
   && mv /tmp/nexus-${NEXUS_VERSION}/* /opt/sonatype/nexus/ \
   && rm -rf /tmp/nexus-${NEXUS_VERSION} \
   && rm -rf /tmp/nexus-${NEXUS_VERSION}-bundle.tar.gz
 
 # Install tooltool directly from github.
-RUN mkdir /build
 ADD https://raw.githubusercontent.com/mozilla/build-tooltool/master/tooltool.py /build/tooltool.py
 RUN chmod +rx /build/tooltool.py
 
 # Back to the centos6-build workdir, matching desktop-build.
-WORKDIR /home/worker
+WORKDIR /builds/worker
 
 # Set a default command useful for debugging
 CMD ["/bin/bash", "--login"]
--- a/taskcluster/docker/android-gradle-build/VERSION
+++ b/taskcluster/docker/android-gradle-build/VERSION
@@ -1,1 +1,1 @@
-0.0.1
+0.0.2
--- a/taskcluster/docker/android-gradle-build/bin/after.sh
+++ b/taskcluster/docker/android-gradle-build/bin/after.sh
@@ -21,25 +21,25 @@ tar cJf jcentral.tar.xz jcentral
 # ~/.gradle/wrapper/dists/gradle-2.7-all/$PROJECT_HASH/gradle-2.7-all.zip.  We
 # want to remove the version from the internal directory for use via tooltool in
 # a mozconfig.
 cp $GRADLE_USER_HOME/wrapper/dists/gradle-${GRADLE_VERSION}-all/*/gradle-${GRADLE_VERSION}-all.zip gradle-${GRADLE_VERSION}-all.zip
 unzip -q gradle-${GRADLE_VERSION}-all.zip
 mv gradle-${GRADLE_VERSION} gradle-dist
 tar cJf gradle-dist.tar.xz gradle-dist
 
-mkdir -p /home/worker/artifacts
+mkdir -p /builds/worker/artifacts
 # We can't redistribute the Android SDK publicly just yet.  We'll
 # upload to (internal) tooltool eventually.  mv
-# android-sdk-linux.tar.xz /home/worker/artifacts
-mv jcentral.tar.xz /home/worker/artifacts
-mv gradle-dist.tar.xz /home/worker/artifacts
+# android-sdk-linux.tar.xz /builds/worker/artifacts
+mv jcentral.tar.xz /builds/worker/artifacts
+mv gradle-dist.tar.xz /builds/worker/artifacts
 popd
 
 # Bug 1245170: at some point in the future, we'll be able to upload
 # things directly to tooltool.
-# pushd /home/worker/artifacts
+# pushd /builds/worker/artifacts
 # /build/tooltool.py add --visibility=public jcentral.tar.xz
 # /build/tooltool.py add --visibility=public gradle-dist.tar.xz
 # /build/tooltool.py add --visibility=internal android-sdk-linux.tar.xz
 # /build/tooltool.py upload -v --url=http://relengapi/tooltool/ \
 #   --message="No message - Gradle and jcentral archives uploaded from taskcluster."
 # popd
--- a/taskcluster/docker/android-gradle-build/bin/build.sh
+++ b/taskcluster/docker/android-gradle-build/bin/build.sh
@@ -2,26 +2,26 @@
 
 set -x -e -v
 
 # TODO: when bug 1093833 is solved and tasks can run as non-root, reduce this
 # to a simple fail-if-root check
 if [ $(id -u) = 0 ]; then
     # each of the caches we have mounted are owned by root, so update that ownership
     # to 'worker'
-    for cache in /home/worker/.tc-vcs /home/worker/workspace /home/worker/tooltool-cache; do
+    for cache in /builds/worker/.tc-vcs /builds/worker/workspace /builds/worker/tooltool-cache; do
         if [ -d $cache ]; then
             # -R probably isn't necessary forever, but it fixes some poisoned
             # caches for now
             chown -R worker:worker $cache
         fi
     done
 
     # ..then drop privileges by re-running this script
-    exec su worker /home/worker/bin/build.sh
+    exec su worker /builds/worker/bin/build.sh
 fi
 
 ####
 # The default build works for any fx_desktop_build based mozharness job:
 # via linux-build.sh
 ####
 
 . $HOME/bin/checkout-sources.sh
--- a/taskcluster/docker/android-gradle-build/bin/checkout-sources.sh
+++ b/taskcluster/docker/android-gradle-build/bin/checkout-sources.sh
@@ -20,17 +20,17 @@ set -x -e
 
 : TOOLS_REPOSITORY              ${TOOLS_REPOSITORY:=https://hg.mozilla.org/build/tools}
 : TOOLS_BASE_REPOSITORY         ${TOOLS_BASE_REPOSITORY:=${TOOLS_REPOSITORY}}
 : TOOLS_HEAD_REPOSITORY         ${TOOLS_HEAD_REPOSITORY:=${TOOLS_REPOSITORY}}
 : TOOLS_HEAD_REV                ${TOOLS_HEAD_REV:=default}
 : TOOLS_HEAD_REF                ${TOOLS_HEAD_REF:=${TOOLS_HEAD_REV}}
 : TOOLS_DISABLE                 ${TOOLS_DISABLE:=false}
 
-: WORKSPACE                     ${WORKSPACE:=/home/worker/workspace}
+: WORKSPACE                     ${WORKSPACE:=/builds/worker/workspace}
 
 set -v
 
 # check out tools where mozharness expects it to be ($PWD/build/tools and $WORKSPACE/build/tools)
 if [ ! "$TOOLS_DISABLE" = true ]
 then
     tc-vcs checkout $WORKSPACE/build/tools $TOOLS_BASE_REPOSITORY $TOOLS_HEAD_REPOSITORY $TOOLS_HEAD_REV $TOOLS_HEAD_REF
 
--- a/taskcluster/docker/centos6-build-upd/Dockerfile
+++ b/taskcluster/docker/centos6-build-upd/Dockerfile
@@ -1,9 +1,9 @@
-FROM          taskcluster/centos6-build:0.1.6
+FROM          taskcluster/centos6-build:0.1.7
 MAINTAINER    Dustin J. Mitchell <dustin@mozilla.com>
 
 ### update to latest from upstream repositories
 # if this becomes a long list of packages, consider bumping the
 # centos6-build version
 RUN yum update -y
 
 # Set a default command useful for debugging
--- a/taskcluster/docker/centos6-build-upd/VERSION
+++ b/taskcluster/docker/centos6-build-upd/VERSION
@@ -1,1 +1,1 @@
-0.1.6.20160329195300
+0.1.7.201706291036
--- a/taskcluster/docker/centos6-build/Dockerfile
+++ b/taskcluster/docker/centos6-build/Dockerfile
@@ -1,32 +1,34 @@
 FROM          centos:6
 MAINTAINER    Dustin J. Mitchell <dustin@mozilla.com>
 
+RUN mkdir /builds
+
 ### add worker user and setup its workspace
-RUN useradd -d /home/worker -s /bin/bash -m worker
+RUN useradd -d /builds/worker -s /bin/bash -m worker
 # Declare default working folder
-WORKDIR       /home/worker
+WORKDIR       /builds/worker
 
 # This will create a host mounted filesystem when the cache is stripped
 # on Try. This cancels out some of the performance losses of aufs. See
 # bug 1291940.
-VOLUME /home/worker/workspace
-VOLUME /home/worker/tooltool-cache
+VOLUME /builds/worker/workspace
+VOLUME /builds/worker/tooltool-cache
 
 # install non-build specific dependencies in a single layer
 ADD           system-setup.sh   /tmp/system-setup.sh
 RUN           bash /tmp/system-setup.sh
 
 # Builds need the share module enabled
-ADD           hgrc /home/worker/.hgrc
-RUN chown -R worker:worker /home/worker/.hgrc
+ADD           hgrc /builds/worker/.hgrc
+RUN chown -R worker:worker /builds/worker/.hgrc
 
 # Set variable normally configured at login, by the shells parent process, these
 # are taken from GNU su manual
-ENV           HOME          /home/worker
+ENV           HOME          /builds/worker
 ENV           SHELL         /bin/bash
 ENV           USER          worker
 ENV           LOGNAME       worker
 ENV           HOSTNAME      taskcluster-worker
 
 # Set a default command useful for debugging
 CMD ["/bin/bash", "--login"]
--- a/taskcluster/docker/centos6-build/VERSION
+++ b/taskcluster/docker/centos6-build/VERSION
@@ -1,1 +1,1 @@
-0.1.6
+0.1.7
--- a/taskcluster/docker/centos6-build/system-setup.sh
+++ b/taskcluster/docker/centos6-build/system-setup.sh
@@ -445,18 +445,18 @@ EOF
 cd ninja-1.6.0
 ./configure.py --bootstrap
 cp ninja /usr/local/bin/ninja
 # Old versions of Cmake can only find ninja in this location!
 ln -s /usr/local/bin/ninja /usr/local/bin/ninja-build
 
 # note that TC will replace workspace with a cache mount; there's no sense
 # creating anything inside there
-mkdir -p /home/worker/workspace
-chown worker:worker /home/worker/workspace
+mkdir -p /builds/worker/workspace
+chown worker:worker /builds/worker/workspace
 
 # /builds is *not* replaced with a mount in the docker container. The worker
 # user writes to lots of subdirectories, though, so it's owned by that user
 mkdir -p /builds
 chown worker:worker /builds
 
 # remove packages installed for the builds above
 yum shell -y <<'EOF'
--- a/taskcluster/docker/desktop-build/Dockerfile
+++ b/taskcluster/docker/desktop-build/Dockerfile
@@ -1,20 +1,20 @@
 # TODO remove VOLUME below when the base image is updated next.
-FROM          taskcluster/centos6-build-upd:0.1.6.20160329195300
+FROM          taskcluster/centos6-build-upd:0.1.7.201706291036
 MAINTAINER    Dustin J. Mitchell <dustin@mozilla.com>
 
 # TODO remove when base image is updated
-VOLUME /home/worker/workspace
-VOLUME /home/worker/tooltool-cache
+VOLUME /builds/worker/workspace
+VOLUME /builds/worker/tooltool-cache
 
 # Add build scripts; these are the entry points from the taskcluster worker, and
 # operate on environment variables
-ADD             bin /home/worker/bin
-RUN             chmod +x /home/worker/bin/*
+ADD             bin /builds/worker/bin
+RUN             chmod +x /builds/worker/bin/*
 
 # %include python/mozbuild/mozbuild/action/tooltool.py
 ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /builds/tooltool.py
 ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /setup/tooltool.py
 
 # %include testing/mozharness/external_tools/robustcheckout.py
 ADD topsrcdir/testing/mozharness/external_tools/robustcheckout.py /usr/local/mercurial/robustcheckout.py
 
@@ -35,37 +35,37 @@ ADD topsrcdir/taskcluster/docker/recipes
 
 # TODO remove once base image doesn't install Mercurial
 RUN pip uninstall -y Mercurial
 
 RUN bash /setup/system-setup.sh
 
 # Add wrapper scripts for xvfb allowing tasks to easily retry starting up xvfb
 # %include taskcluster/docker/recipes/xvfb.sh
-ADD topsrcdir/taskcluster/docker/recipes/xvfb.sh /home/worker/scripts/xvfb.sh
+ADD topsrcdir/taskcluster/docker/recipes/xvfb.sh /builds/worker/scripts/xvfb.sh
 
 # %include taskcluster/docker/recipes/run-task
-ADD topsrcdir/taskcluster/docker/recipes/run-task /home/worker/bin/run-task
+ADD topsrcdir/taskcluster/docker/recipes/run-task /builds/worker/bin/run-task
 
 # Add configuration
-COPY            dot-config                    /home/worker/.config
+COPY            dot-config                    /builds/worker/.config
 
 # Generate machine uuid file
 RUN dbus-uuidgen --ensure=/var/lib/dbus/machine-id
 
 # Stubbed out credentials; mozharness looks for this file an issues a WARNING
 # if it's not found, which causes the build to fail.  Note that this needs to
 # be in the parent of the workspace directory and in the directory where
 # mozharness is run (not its --work-dir).  See Bug 1169652.
-ADD           oauth.txt /home/worker/
+ADD           oauth.txt /builds/worker/
 
 # stubbed out buildprops, which keeps mozharness from choking
 # Note that this needs to be in the parent of the workspace directory and in
 # the directory where mozharness is run (not its --work-dir)
-ADD           buildprops.json /home/worker/
+ADD           buildprops.json /builds/worker/
 
 # Move installation to base centos6-build image once Bug 1272629 is fixed
 # Install the screen package here to use with xvfb.
 # Install bison to build binutils.
 RUN yum install -y bison screen
 
 # Install libtool.
 RUN yum install -y libtool
--- a/taskcluster/docker/desktop-build/bin/build.sh
+++ b/taskcluster/docker/desktop-build/bin/build.sh
@@ -7,26 +7,26 @@ set -x -e -v
 
 script_args="${@}"
 
 # TODO: when bug 1093833 is solved and tasks can run as non-root, reduce this
 # to a simple fail-if-root check
 if [ $(id -u) = 0 ]; then
     # each of the caches we have mounted are owned by root, so update that ownership
     # to 'worker'
-    for cache in /home/worker/.tc-vcs /home/worker/workspace /home/worker/tooltool-cache; do
+    for cache in /builds/worker/.tc-vcs /builds/worker/workspace /builds/worker/tooltool-cache; do
         if [ -d $cache ]; then
             # -R probably isn't necessary forever, but it fixes some poisoned
             # caches for now
             chown -R worker:worker $cache
         fi
     done
 
     # ..then drop privileges by re-running this script
-    exec su worker -c "/home/worker/bin/build.sh $script_args"
+    exec su worker -c "/builds/worker/bin/build.sh $script_args"
 fi
 
 ####
 # The default build works for any fx_desktop_build based mozharness job:
 # via build-linux.sh
 ####
 
 . $HOME/bin/checkout-sources.sh
--- a/taskcluster/docker/desktop-build/bin/checkout-sources.sh
+++ b/taskcluster/docker/desktop-build/bin/checkout-sources.sh
@@ -20,17 +20,17 @@ set -x -e
 
 : TOOLS_REPOSITORY              ${TOOLS_REPOSITORY:=https://hg.mozilla.org/build/tools}
 : TOOLS_BASE_REPOSITORY         ${TOOLS_BASE_REPOSITORY:=${TOOLS_REPOSITORY}}
 : TOOLS_HEAD_REPOSITORY         ${TOOLS_HEAD_REPOSITORY:=${TOOLS_REPOSITORY}}
 : TOOLS_HEAD_REV                ${TOOLS_HEAD_REV:=default}
 : TOOLS_HEAD_REF                ${TOOLS_HEAD_REF:=${TOOLS_HEAD_REV}}
 : TOOLS_DISABLE                 ${TOOLS_DISABLE:=false}
 
-: WORKSPACE                     ${WORKSPACE:=/home/worker/workspace}
+: WORKSPACE                     ${WORKSPACE:=/builds/worker/workspace}
 
 set -v
 
 # check out tools where mozharness expects it to be ($PWD/build/tools and $WORKSPACE/build/tools)
 if [ ! "$TOOLS_DISABLE" = true ]
 then
     tc-vcs checkout $WORKSPACE/build/tools $TOOLS_BASE_REPOSITORY $TOOLS_HEAD_REPOSITORY $TOOLS_HEAD_REV $TOOLS_HEAD_REF
 
--- a/taskcluster/docker/desktop-test/Dockerfile
+++ b/taskcluster/docker/desktop-test/Dockerfile
@@ -1,79 +1,80 @@
 FROM          ubuntu:12.04
 MAINTAINER    Jonas Finnemann Jensen <jopsen@gmail.com>
 
-RUN useradd -d /home/worker -s /bin/bash -m worker
-WORKDIR /home/worker
+RUN mkdir /builds
+RUN useradd -d /builds/worker -s /bin/bash -m worker
+WORKDIR /builds/worker
 
 # %include python/mozbuild/mozbuild/action/tooltool.py
 ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /setup/tooltool.py
 
 # %include testing/mozharness/external_tools/robustcheckout.py
 ADD topsrcdir/testing/mozharness/external_tools/robustcheckout.py /usr/local/mercurial/robustcheckout.py
 
 # %include taskcluster/docker/recipes/install-mercurial.sh
 ADD topsrcdir/taskcluster/docker/recipes/install-mercurial.sh /tmp/install-mercurial.sh
 
 # Add wrapper scripts for xvfb allowing tasks to easily retry starting up xvfb
 # %include taskcluster/docker/recipes/xvfb.sh
-ADD topsrcdir/taskcluster/docker/recipes/xvfb.sh /home/worker/scripts/xvfb.sh
+ADD topsrcdir/taskcluster/docker/recipes/xvfb.sh /builds/worker/scripts/xvfb.sh
 
 # Add the tooltool manifest containing the minidump_stackwalk binary.
 # %include testing/config/tooltool-manifests/linux64/releng.manifest
 ADD topsrcdir/testing/config/tooltool-manifests/linux64/releng.manifest /tmp/minidump_stackwalk.manifest
 
 # %include taskcluster/docker/recipes/ubuntu1204-test-system-setup.sh
 ADD topsrcdir/taskcluster/docker/recipes/ubuntu1204-test-system-setup.sh /setup/system-setup.sh
 RUN bash /setup/system-setup.sh
 
 # %include taskcluster/docker/recipes/run-task
-ADD topsrcdir/taskcluster/docker/recipes/run-task /home/worker/bin/run-task
+ADD topsrcdir/taskcluster/docker/recipes/run-task /builds/worker/bin/run-task
 
 # %include taskcluster/scripts/tester/test-linux.sh
-ADD topsrcdir/taskcluster/scripts/tester/test-linux.sh /home/worker/bin/test-linux.sh
+ADD topsrcdir/taskcluster/scripts/tester/test-linux.sh /builds/worker/bin/test-linux.sh
 
 # This will create a host mounted filesystem when the cache is stripped
 # on Try. This cancels out some of the performance losses of aufs. See
 # bug 1291940.
-VOLUME /home/worker/checkouts
-VOLUME /home/worker/workspace
+VOLUME /builds/worker/checkouts
+VOLUME /builds/worker/workspace
 
 # Set variable normally configured at login, by the shells parent process, these
 # are taken from GNU su manual
-ENV           HOME          /home/worker
+ENV           HOME          /builds/worker
 ENV           SHELL         /bin/bash
 ENV           USER          worker
 ENV           LOGNAME       worker
 ENV           HOSTNAME      taskcluster-worker
 ENV           LANG          en_US.UTF-8
 ENV           LC_ALL        en_US.UTF-8
 
 # Add utilities and configuration
-COPY           dot-files/config              /home/worker/.config
-COPY           dot-files/pulse               /home/worker/.pulse
+COPY           dot-files/config              /builds/worker/.config
+COPY           dot-files/pulse               /builds/worker/.pulse
 RUN            chmod +x bin/*
 # TODO: remove this when buildbot is gone
-COPY           buildprops.json               /home/worker/buildprops.json
+COPY           buildprops.json               /builds/worker/buildprops.json
 COPY           tc-vcs-config.yml /etc/taskcluster-vcs.yml
 
 # TODO: remove
-ADD            https://raw.githubusercontent.com/taskcluster/buildbot-step/master/buildbot_step /home/worker/bin/buildbot_step
-RUN chmod u+x /home/worker/bin/buildbot_step
+ADD            https://raw.githubusercontent.com/taskcluster/buildbot-step/master/buildbot_step /builds/worker/bin/buildbot_step
+RUN chmod u+x /builds/worker/bin/buildbot_step
 
 # allow the worker user to access video devices
 RUN usermod -a -G video worker
 
 RUN mkdir Documents; mkdir Pictures; mkdir Music; mkdir Videos; mkdir artifacts
 
 # install tc-vcs and tc-npm-cache
 RUN npm install -g taskcluster-vcs@2.3.12 \
  && npm install -g taskcluster-npm-cache@1.1.14 \
  && rm -rf ~/.npm
-ENV PATH $PATH:/home/worker/bin
+ENV PATH $PATH:/builds/worker/bin
 
 # TODO Re-enable worker when bug 1093833 lands
 #USER          worker
 
 # clean up
 RUN rm -Rf .cache && mkdir -p .cache
 
 # Disable Ubuntu update prompt
@@ -90,19 +91,19 @@ EXPOSE 5900
 # This helps not forgetting setting DISPLAY=:0 when running
 # tests outside of test.sh
 ENV DISPLAY :0
 
 # Disable apport (Ubuntu app crash reporter) to avoid stealing focus from test runs
 ADD apport /etc/default/apport
 
 # Disable font antialiasing for now to match releng's setup
-ADD fonts.conf /home/worker/.fonts.conf
+ADD fonts.conf /builds/worker/.fonts.conf
 
 # Set up first-run experience for interactive mode
 ADD motd /etc/taskcluster-motd
 ADD taskcluster-interactive-shell /bin/taskcluster-interactive-shell
 RUN chmod +x /bin/taskcluster-interactive-shell
 
-RUN chown -R worker:worker /home/worker
+RUN chown -R worker:worker /builds/worker
 
 # Set a default command useful for debugging
 CMD ["/bin/bash", "--login"]
--- a/taskcluster/docker/desktop1604-test/Dockerfile
+++ b/taskcluster/docker/desktop1604-test/Dockerfile
@@ -1,13 +1,14 @@
 FROM          ubuntu:16.04
 MAINTAINER    Joel Maher <joel.maher@gmail.com>
 
-RUN useradd -d /home/worker -s /bin/bash -m worker
-WORKDIR /home/worker
+RUN mkdir /builds
+RUN useradd -d /builds/worker -s /bin/bash -m worker
+WORKDIR /builds/worker
 
 # %include python/mozbuild/mozbuild/action/tooltool.py
 ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /setup/tooltool.py
 
 # %include testing/mozharness/external_tools/robustcheckout.py
 ADD topsrcdir/testing/mozharness/external_tools/robustcheckout.py /usr/local/mercurial/robustcheckout.py
 
 # %include taskcluster/docker/recipes/common.sh
@@ -24,62 +25,62 @@ ADD topsrcdir/taskcluster/docker/recipes
 ADD topsrcdir/testing/config/tooltool-manifests/linux64/releng.manifest /tmp/minidump_stackwalk.manifest
 
 # %include taskcluster/docker/recipes/ubuntu1604-test-system-setup.sh
 ADD topsrcdir/taskcluster/docker/recipes/ubuntu1604-test-system-setup.sh /setup/system-setup.sh
 RUN           bash /setup/system-setup.sh
 
 # Add wrapper scripts for xvfb allowing tasks to easily retry starting up xvfb
 # %include taskcluster/docker/recipes/xvfb.sh
-ADD topsrcdir/taskcluster/docker/recipes/xvfb.sh /home/worker/scripts/xvfb.sh
+ADD topsrcdir/taskcluster/docker/recipes/xvfb.sh /builds/worker/scripts/xvfb.sh
 
 # %include taskcluster/docker/recipes/run-task
-ADD topsrcdir/taskcluster/docker/recipes/run-task /home/worker/bin/run-task
+ADD topsrcdir/taskcluster/docker/recipes/run-task /builds/worker/bin/run-task
 
 # %include taskcluster/scripts/tester/test-linux.sh
-ADD topsrcdir/taskcluster/scripts/tester/test-linux.sh /home/worker/bin/test-linux.sh
+ADD topsrcdir/taskcluster/scripts/tester/test-linux.sh /builds/worker/bin/test-linux.sh
 
 # This will create a host mounted filesystem when the cache is stripped
 # on Try. This cancels out some of the performance losses of aufs. See
 # bug 1291940.
-VOLUME /home/worker/checkouts
-VOLUME /home/worker/workspace
+VOLUME /builds/worker/checkouts
+VOLUME /builds/worker/workspace
 
 # Set variable normally configured at login, by the shells parent process, these
 # are taken from GNU su manual
-ENV           HOME          /home/worker
+ENV           HOME          /builds/worker
 ENV           SHELL         /bin/bash
 ENV           USER          worker
 ENV           LOGNAME       worker
 ENV           HOSTNAME      taskcluster-worker
 ENV           LANG          en_US.UTF-8
 ENV           LC_ALL        en_US.UTF-8
 
 # Add utilities and configuration
-COPY           dot-files/config              /home/worker/.config
-COPY           dot-files/pulse               /home/worker/.pulse
+COPY           dot-files/config              /builds/worker/.config
+COPY           dot-files/pulse               /builds/worker/.pulse
 RUN            chmod +x bin/*
 # TODO: remove this when buildbot is gone
-COPY           buildprops.json               /home/worker/buildprops.json
+COPY           buildprops.json               /builds/worker/buildprops.json
 COPY           tc-vcs-config.yml /etc/taskcluster-vcs.yml
 
 # TODO: remove
-ADD            https://raw.githubusercontent.com/taskcluster/buildbot-step/master/buildbot_step /home/worker/bin/buildbot_step
-RUN chmod u+x /home/worker/bin/buildbot_step
+ADD            https://raw.githubusercontent.com/taskcluster/buildbot-step/master/buildbot_step /builds/worker/bin/buildbot_step
+RUN chmod u+x /builds/worker/bin/buildbot_step
 
 # allow the worker user to access video devices
 RUN usermod -a -G video worker
 
 RUN mkdir Documents; mkdir Pictures; mkdir Music; mkdir Videos; mkdir artifacts
 
 # install tc-vcs and tc-npm-cache
 RUN npm install -g taskcluster-vcs@2.3.12 \
  && npm install -g taskcluster-npm-cache@1.1.14 \
  && rm -rf ~/.npm
-ENV PATH $PATH:/home/worker/bin
+ENV PATH $PATH:/builds/worker/bin
 
 # TODO Re-enable worker when bug 1093833 lands
 #USER          worker
 
 # clean up
 RUN rm -Rf .cache && mkdir -p .cache
 
 # Disable Ubuntu update prompt
@@ -99,19 +100,19 @@ EXPOSE 5900
 # This helps not forgetting setting DISPLAY=:0 when running
 # tests outside of test.sh
 ENV DISPLAY :0
 
 # Disable apport (Ubuntu app crash reporter) to avoid stealing focus from test runs
 ADD apport /etc/default/apport
 
 # Disable font antialiasing for now to match releng's setup
-ADD fonts.conf /home/worker/.fonts.conf
+ADD fonts.conf /builds/worker/.fonts.conf
 
 # Set up first-run experience for interactive mode
 ADD motd /etc/taskcluster-motd
 ADD taskcluster-interactive-shell /bin/taskcluster-interactive-shell
 RUN chmod +x /bin/taskcluster-interactive-shell
 
-RUN chown -R worker:worker /home/worker
+RUN chown -R worker:worker /builds/worker
 
 # Set a default command useful for debugging
 CMD ["/bin/bash", "--login"]
--- a/taskcluster/docker/lint/Dockerfile
+++ b/taskcluster/docker/lint/Dockerfile
@@ -1,15 +1,15 @@
 FROM          ubuntu:16.04
 MAINTAINER    Andrew Halberstadt <ahalberstadt@mozilla.com>
 
-RUN useradd -d /home/worker -s /bin/bash -m worker
-WORKDIR /home/worker
+RUN mkdir /builds
+RUN useradd -d /builds/worker -s /bin/bash -m worker
+WORKDIR /builds/worker
 
-RUN mkdir /build
 # %include python/mozbuild/mozbuild/action/tooltool.py
 ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /build/tooltool.py
 
 # %include testing/mozharness/external_tools/robustcheckout.py
 ADD topsrcdir/testing/mozharness/external_tools/robustcheckout.py /usr/local/mercurial/robustcheckout.py
 
 # %include taskcluster/docker/recipes/install-node.sh
 ADD topsrcdir/taskcluster/docker/recipes/install-node.sh /build/install-node.sh
@@ -19,22 +19,22 @@ ADD topsrcdir/taskcluster/docker/recipes
 ADD system-setup.sh /tmp/system-setup.sh
 # %include tools/lint/flake8_/flake8_requirements.txt
 ADD topsrcdir/tools/lint/flake8_/flake8_requirements.txt /tmp/flake8_requirements.txt
 # %include tools/lint/tox/tox_requirements.txt
 ADD topsrcdir/tools/lint/tox/tox_requirements.txt /tmp/tox_requirements.txt
 RUN bash /tmp/system-setup.sh
 
 # %include taskcluster/docker/recipes/run-task
-ADD topsrcdir/taskcluster/docker/recipes/run-task /home/worker/bin/run-task
-RUN chown -R worker:worker /home/worker/bin && chmod 755 /home/worker/bin/*
+ADD topsrcdir/taskcluster/docker/recipes/run-task /builds/worker/bin/run-task
+RUN chown -R worker:worker /builds/worker/bin && chmod 755 /builds/worker/bin/*
 
 # Set variable normally configured at login, by the shells parent process, these
 # are taken from GNU su manual
-ENV           HOME          /home/worker
+ENV           HOME          /builds/worker
 ENV           SHELL         /bin/bash
 ENV           USER          worker
 ENV           LOGNAME       worker
 ENV           HOSTNAME      taskcluster-worker
 ENV           LANG          en_US.UTF-8
 ENV           LC_ALL        en_US.UTF-8
 
 # Set a default command useful for debugging
--- a/taskcluster/scripts/builder/build-l10n.sh
+++ b/taskcluster/scripts/builder/build-l10n.sh
@@ -1,33 +1,33 @@
 #! /bin/bash -vex
 
 set -x -e
 
 echo "running as" $(id)
 
-. /home/worker/scripts/xvfb.sh
+. /builds/worker/scripts/xvfb.sh
 
 ####
 # Taskcluster friendly wrapper for performing fx desktop l10n repacks via mozharness.
 # Based on ./build-linux.sh
 ####
 
 # Inputs, with defaults
 
 : MOZHARNESS_SCRIPT             ${MOZHARNESS_SCRIPT}
 : MOZHARNESS_CONFIG             ${MOZHARNESS_CONFIG}
 : MOZHARNESS_ACTIONS            ${MOZHARNESS_ACTIONS}
 : MOZHARNESS_OPTIONS            ${MOZHARNESS_OPTIONS}
 
-: TOOLTOOL_CACHE                ${TOOLTOOL_CACHE:=/home/worker/tooltool-cache}
+: TOOLTOOL_CACHE                ${TOOLTOOL_CACHE:=/builds/worker/tooltool-cache}
 
 : NEED_XVFB                     ${NEED_XVFB:=false}
 
-: WORKSPACE                     ${WORKSPACE:=/home/worker/workspace}
+: WORKSPACE                     ${WORKSPACE:=/builds/worker/workspace}
 
 set -v
 
 fail() {
     echo # make sure error message is on a new line
     echo "[build-l10n.sh:error]" "${@}"
     exit 1
 }
@@ -81,17 +81,17 @@ fi
 # e.g. enable-pgo
 if [ -n "$MOZHARNESS_OPTIONS" ]; then
     options=""
     for option in $MOZHARNESS_OPTIONS; do
         options="$options --$option"
     done
 fi
 
-cd /home/worker
+cd /builds/worker
 
 python2.7 $WORKSPACE/build/src/testing/${MOZHARNESS_SCRIPT} \
   --disable-mock \
   --revision ${GECKO_HEAD_REV} \
   $actions \
   $options \
   ${config_cmds} \
   --log-level=debug \
--- a/taskcluster/scripts/builder/build-linux.sh
+++ b/taskcluster/scripts/builder/build-linux.sh
@@ -1,37 +1,37 @@
 #! /bin/bash -vex
 
 set -x -e
 
 echo "running as" $(id)
 
-. /home/worker/scripts/xvfb.sh
+. /builds/worker/scripts/xvfb.sh
 
 ####
 # Taskcluster friendly wrapper for performing fx desktop builds via mozharness.
 ####
 
 # Inputs, with defaults
 
 : MOZHARNESS_SCRIPT             ${MOZHARNESS_SCRIPT}
 : MOZHARNESS_CONFIG             ${MOZHARNESS_CONFIG}
 : MOZHARNESS_ACTIONS            ${MOZHARNESS_ACTIONS}
 : MOZHARNESS_OPTIONS            ${MOZHARNESS_OPTIONS}
 
-: TOOLTOOL_CACHE                ${TOOLTOOL_CACHE:=/home/worker/tooltool-cache}
+: TOOLTOOL_CACHE                ${TOOLTOOL_CACHE:=/builds/worker/tooltool-cache}
 
 : NEED_XVFB                     ${NEED_XVFB:=false}
 
 : MH_CUSTOM_BUILD_VARIANT_CFG   ${MH_CUSTOM_BUILD_VARIANT_CFG}
 : MH_BRANCH                     ${MH_BRANCH:=mozilla-central}
 : MH_BUILD_POOL                 ${MH_BUILD_POOL:=staging}
 : MOZ_SCM_LEVEL                 ${MOZ_SCM_LEVEL:=1}
 
-: WORKSPACE                     ${WORKSPACE:=/home/worker/workspace}
+: WORKSPACE                     ${WORKSPACE:=/builds/worker/workspace}
 
 set -v
 
 fail() {
     echo # make sure error message is on a new line
     echo "[build-linux.sh:error]" "${@}"
     exit 1
 }
@@ -107,17 +107,17 @@ fi
 # e.g. enable-pgo
 if [ -n "$MOZHARNESS_OPTIONS" ]; then
     options=""
     for option in $MOZHARNESS_OPTIONS; do
         options="$options --$option"
     done
 fi
 
-cd /home/worker
+cd /builds/worker
 
 python2.7 $WORKSPACE/build/src/testing/${MOZHARNESS_SCRIPT} ${config_cmds} \
   $debug_flag \
   $custom_build_variant_cfg_flag \
   --disable-mock \
   $actions \
   $options \
   --log-level=debug \
--- a/taskcluster/scripts/builder/repackage.sh
+++ b/taskcluster/scripts/builder/repackage.sh
@@ -1,30 +1,30 @@
 #! /bin/bash -vex
 
 set -x -e
 
 echo "running as" $(id)
 
-. /home/worker/scripts/xvfb.sh
+. /builds/worker/scripts/xvfb.sh
 
 ####
 # Taskcluster friendly wrapper for performing fx desktop builds via mozharness.
 ####
 
 # Inputs, with defaults
 
 : MOZHARNESS_SCRIPT             ${MOZHARNESS_SCRIPT}
 : MOZHARNESS_CONFIG             ${MOZHARNESS_CONFIG}
 : MOZHARNESS_ACTIONS            ${MOZHARNESS_ACTIONS}
 : MOZHARNESS_OPTIONS            ${MOZHARNESS_OPTIONS}
 
-: TOOLTOOL_CACHE                ${TOOLTOOL_CACHE:=/home/worker/tooltool-cache}
+: TOOLTOOL_CACHE                ${TOOLTOOL_CACHE:=/builds/worker/tooltool-cache}
 
-: WORKSPACE                     ${WORKSPACE:=/home/worker/workspace}
+: WORKSPACE                     ${WORKSPACE:=/builds/worker/workspace}
 
 set -v
 
 fail() {
     echo # make sure error message is on a new line
     echo "[build-linux.sh:error]" "${@}"
     exit 1
 }
@@ -79,15 +79,15 @@ fi
 # e.g. enable-pgo
 if [ -n "$MOZHARNESS_OPTIONS" ]; then
     options=""
     for option in $MOZHARNESS_OPTIONS; do
         options="$options --$option"
     done
 fi
 
-cd /home/worker
+cd /builds/worker
 
 python2.7 $WORKSPACE/build/src/testing/${MOZHARNESS_SCRIPT} ${config_cmds} \
   $actions \
   $options \
   --log-level=debug \
   --work-dir=$WORKSPACE/build \
--- a/taskcluster/scripts/misc/build-clang-windows-helper32.sh
+++ b/taskcluster/scripts/misc/build-clang-windows-helper32.sh
@@ -1,15 +1,15 @@
 #!/bin/bash
 
 set -x -e -v
 
 # This script is for building clang-cl on Windows.
 
-: TOOLTOOL_CACHE                ${TOOLTOOL_CACHE:=/home/worker/tooltool-cache}
+: TOOLTOOL_CACHE                ${TOOLTOOL_CACHE:=/builds/worker/tooltool-cache}
 export TOOLTOOL_CACHE
 
 TOOLTOOL_AUTH_FILE=/c/builds/relengapi.tok
 if [ ! -e ${TOOLTOOL_AUTH_FILE} ]; then
     echo cannot find ${TOOLTOOL_AUTH_FILE}
     exit 1
 fi
 
--- a/taskcluster/scripts/misc/build-clang-windows-helper64.sh
+++ b/taskcluster/scripts/misc/build-clang-windows-helper64.sh
@@ -1,15 +1,15 @@
 #!/bin/bash
 
 set -x -e -v
 
 # This script is for building clang-cl on Windows.
 
-: TOOLTOOL_CACHE                ${TOOLTOOL_CACHE:=/home/worker/tooltool-cache}
+: TOOLTOOL_CACHE                ${TOOLTOOL_CACHE:=/builds/worker/tooltool-cache}
 export TOOLTOOL_CACHE
 
 TOOLTOOL_AUTH_FILE=/c/builds/relengapi.tok
 if [ ! -e ${TOOLTOOL_AUTH_FILE} ]; then
     echo cannot find ${TOOLTOOL_AUTH_FILE}
     exit 1
 fi
 
--- a/taskcluster/scripts/misc/tooltool-download.sh
+++ b/taskcluster/scripts/misc/tooltool-download.sh
@@ -1,10 +1,10 @@
 # Fetch a tooltool manifest.
 
 cd $HOME/workspace/build/src
 
-: TOOLTOOL_CACHE                ${TOOLTOOL_CACHE:=/home/worker/tooltool-cache}
+: TOOLTOOL_CACHE                ${TOOLTOOL_CACHE:=/builds/worker/tooltool-cache}
 export TOOLTOOL_CACHE
 
 ./mach artifact toolchain -v --tooltool-url=http://relengapi/tooltool/ --tooltool-manifest "${TOOLTOOL_MANIFEST}"${TOOLTOOL_CACHE:+ --cache-dir ${TOOLTOOL_CACHE}}
 
 cd $OLDPWD
--- a/taskcluster/taskgraph/action.yml
+++ b/taskcluster/taskgraph/action.yml
@@ -23,46 +23,46 @@ routes:
   - "tc-treeherder-stage.v2.{{project}}.{{head_rev}}.{{pushlog_id}}"
 
 payload:
   env:
     GECKO_BASE_REPOSITORY: 'https://hg.mozilla.org/mozilla-unified'
     GECKO_HEAD_REPOSITORY: '{{{head_repository}}}'
     GECKO_HEAD_REF: '{{head_ref}}'
     GECKO_HEAD_REV: '{{head_rev}}'
-    HG_STORE_PATH: /home/worker/checkouts/hg-store
+    HG_STORE_PATH: /builds/worker/checkouts/hg-store
 
   cache:
-    level-{{level}}-checkouts: /home/worker/checkouts
+    level-{{level}}-checkouts: /builds/worker/checkouts
 
   features:
     taskclusterProxy: true
 
   # Note: This task is built server side without the context or tooling that
   # exist in tree so we must hard code the version
   image: 'taskcluster/decision:0.1.7'
 
   # Virtually no network or other potentially risky operations happen as part
   # of the task timeout aside from the initial clone. We intentionally have
   # set this to a lower value _all_ decision tasks should use a root
   # repository which is cached.
   maxRunTime: 1800
 
   command:
-    - /home/worker/bin/run-task
-    - '--vcs-checkout=/home/worker/checkouts/gecko'
+    - /builds/worker/bin/run-task
+    - '--vcs-checkout=/builds/worker/checkouts/gecko'
     - '--'
     - bash
     - -cx
     - >
-        cd /home/worker/checkouts/gecko &&
-        ln -s /home/worker/artifacts artifacts &&
+        cd /builds/worker/checkouts/gecko &&
+        ln -s /builds/worker/artifacts artifacts &&
         ./mach --log-no-times taskgraph {{action}} {{action_args}}
 
   artifacts:
     'public':
       type: 'directory'
-      path: '/home/worker/artifacts'
+      path: '/builds/worker/artifacts'
       expires: '{{#from_now}}7 days{{/from_now}}'
 
 extra:
   treeherder:
     symbol: A
--- a/taskcluster/taskgraph/transforms/android_stuff.py
+++ b/taskcluster/taskgraph/transforms/android_stuff.py
@@ -27,25 +27,25 @@ def setup_task(config, tasks):
             'MOZ_SCM_LEVEL': config.params['level'],
             'MH_BRANCH': config.params['project'],
         })
 
         task['worker'].setdefault('caches', []).append({
             'type': 'persistent',
             'name': 'level-{}-{}-tc-vcs'.format(
                 config.params['level'], config.params['project']),
-            'mount-point': "/home/worker/.tc-vcs",
+            'mount-point': "/builds/worker/.tc-vcs",
         })
 
         if int(config.params['level']) > 1:
             task['worker'].setdefault('caches', []).append({
                 'type': 'persistent',
                 'name': 'level-{}-{}-build-{}-workspace'.format(
                     config.params['level'], config.params['project'], task['name']),
-                'mount-point': "/home/worker/workspace",
+                'mount-point': "/builds/worker/workspace",
             })
 
         # Need appropriate scopes for secrets, from the 'build' section
         task['worker']['taskcluster-proxy'] = True
         task['scopes'].append(SECRET_SCOPE.format(
             'build', config.params['level'], '*'))
 
         del task['name']
--- a/taskcluster/taskgraph/transforms/job/common.py
+++ b/taskcluster/taskgraph/transforms/job/common.py
@@ -23,37 +23,37 @@ def docker_worker_add_workspace_cache(co
 
     taskdesc['worker'].setdefault('caches', []).append({
         'type': 'persistent',
         'name': 'level-{}-{}-build-{}-{}-workspace'.format(
             config.params['level'], config.params['project'],
             taskdesc['attributes']['build_platform'],
             taskdesc['attributes']['build_type'],
         ),
-        'mount-point': "/home/worker/workspace",
+        'mount-point': "/builds/worker/workspace",
     })
     if extra:
         taskdesc['worker']['caches'][-1]['name'] += '-{}'.format(
             extra
         )
 
 
 def docker_worker_add_tc_vcs_cache(config, job, taskdesc):
     taskdesc['worker'].setdefault('caches', []).append({
         'type': 'persistent',
         'name': 'level-{}-{}-tc-vcs'.format(
             config.params['level'], config.params['project']),
-        'mount-point': "/home/worker/.tc-vcs",
+        'mount-point': "/builds/worker/.tc-vcs",
     })
 
 
 def docker_worker_add_public_artifacts(config, job, taskdesc):
     taskdesc['worker'].setdefault('artifacts', []).append({
         'name': 'public/build',
-        'path': '/home/worker/artifacts/',
+        'path': '/builds/worker/artifacts/',
         'type': 'directory',
     })
 
 
 def docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc):
     """Add the GECKO_BASE_* and GECKO_HEAD_* env vars to the worker."""
     env = taskdesc['worker'].setdefault('env', {})
     env.update({
@@ -81,17 +81,17 @@ def support_vcs_checkout(config, job, ta
             #
             # ``level-%s-checkouts`` was initially used and contained a number
             # of backwards incompatible changes, such as moving HG_STORE_PATH
             # from a separate cache to this cache.
             #
             # ``v1`` was introduced to provide a clean break from the unversioned
             # cache.
             'name': 'level-%s-checkouts-v1' % level,
-            'mount-point': '/home/worker/checkouts',
+            'mount-point': '/builds/worker/checkouts',
         })
 
     taskdesc['worker'].setdefault('env', {}).update({
         'GECKO_BASE_REPOSITORY': config.params['base_repository'],
         'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
         'GECKO_HEAD_REV': config.params['head_rev'],
         'HG_STORE_PATH': '~/checkouts/hg-store',
     })
--- a/taskcluster/taskgraph/transforms/job/hazard.py
+++ b/taskcluster/taskgraph/transforms/job/hazard.py
@@ -58,28 +58,28 @@ def docker_worker_hazard(config, job, ta
     # script parameters
     if run.get('mozconfig'):
         env['MOZCONFIG'] = run['mozconfig']
 
     # tooltool downloads
     worker['caches'].append({
         'type': 'persistent',
         'name': 'tooltool-cache',
-        'mount-point': '/home/worker/tooltool-cache',
+        'mount-point': '/builds/worker/tooltool-cache',
     })
     worker['relengapi-proxy'] = True
     taskdesc['scopes'].extend([
         'docker-worker:relengapi-proxy:tooltool.download.public',
     ])
-    env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache'
+    env['TOOLTOOL_CACHE'] = '/builds/worker/tooltool-cache'
 
     # build-haz-linux.sh needs this otherwise it assumes the checkout is in
     # the workspace.
-    env['GECKO_DIR'] = '/home/worker/checkouts/gecko'
+    env['GECKO_DIR'] = '/builds/worker/checkouts/gecko'
 
     worker['command'] = [
-        '/home/worker/bin/run-task',
-        '--chown-recursive', '/home/worker/tooltool-cache',
-        '--chown-recursive', '/home/worker/workspace',
-        '--vcs-checkout', '/home/worker/checkouts/gecko',
+        '/builds/worker/bin/run-task',
+        '--chown-recursive', '/builds/worker/tooltool-cache',
+        '--chown-recursive', '/builds/worker/workspace',
+        '--vcs-checkout', '/builds/worker/checkouts/gecko',
         '--',
         '/bin/bash', '-c', run['command']
     ]
--- a/taskcluster/taskgraph/transforms/job/mozharness.py
+++ b/taskcluster/taskgraph/transforms/job/mozharness.py
@@ -138,41 +138,41 @@ def mozharness_on_docker_worker_setup(co
         env['NEED_XVFB'] = 'true'
 
     # tooltool downloads
     if run['tooltool-downloads']:
         worker['relengapi-proxy'] = True
         worker['caches'].append({
             'type': 'persistent',
             'name': 'tooltool-cache',
-            'mount-point': '/home/worker/tooltool-cache',
+            'mount-point': '/builds/worker/tooltool-cache',
         })
         taskdesc['scopes'].extend([
             'docker-worker:relengapi-proxy:tooltool.download.public',
         ])
         if run['tooltool-downloads'] == 'internal':
             taskdesc['scopes'].append(
                 'docker-worker:relengapi-proxy:tooltool.download.internal')
-        env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache'
+        env['TOOLTOOL_CACHE'] = '/builds/worker/tooltool-cache'
 
     # Retry if mozharness returns TBPL_RETRY
     worker['retry-exit-status'] = 4
 
     docker_worker_setup_secrets(config, job, taskdesc)
 
     command = [
-        '/home/worker/bin/run-task',
+        '/builds/worker/bin/run-task',
         # Various caches/volumes are default owned by root:root.
-        '--chown-recursive', '/home/worker/workspace',
-        '--chown-recursive', '/home/worker/tooltool-cache',
-        '--vcs-checkout', '/home/worker/workspace/build/src',
-        '--tools-checkout', '/home/worker/workspace/build/tools',
+        '--chown-recursive', '/builds/worker/workspace',
+        '--chown-recursive', '/builds/worker/tooltool-cache',
+        '--vcs-checkout', '/builds/worker/workspace/build/src',
+        '--tools-checkout', '/builds/worker/workspace/build/tools',
         '--',
     ]
-    command.append("/home/worker/workspace/build/src/{}".format(
+    command.append("/builds/worker/workspace/build/src/{}".format(
         run.get('job-script',
                 "taskcluster/scripts/builder/build-linux.sh"
                 )))
 
     worker['command'] = command
 
 
 @run_job_using("generic-worker", "mozharness", schema=mozharness_run_schema)
--- a/taskcluster/taskgraph/transforms/job/mozharness_test.py
+++ b/taskcluster/taskgraph/transforms/job/mozharness_test.py
@@ -59,36 +59,36 @@ def mozharness_test_on_docker(config, jo
     worker['allow-ptrace'] = True  # required for all tests, for crashreporter
     worker['loopback-video'] = test['loopback-video']
     worker['loopback-audio'] = test['loopback-audio']
     worker['max-run-time'] = test['max-run-time']
     worker['retry-exit-status'] = test['retry-exit-status']
 
     artifacts = [
         # (artifact name prefix, in-image path)
-        ("public/logs/", "/home/worker/workspace/build/upload/logs/"),
-        ("public/test", "/home/worker/artifacts/"),
-        ("public/test_info/", "/home/worker/workspace/build/blobber_upload_dir/"),
+        ("public/logs/", "/builds/worker/workspace/build/upload/logs/"),
+        ("public/test", "/builds/worker/artifacts/"),
+        ("public/test_info/", "/builds/worker/workspace/build/blobber_upload_dir/"),
     ]
 
     installer_url = get_artifact_url('<build>', mozharness['build-artifact-name'])
     mozharness_url = get_artifact_url('<build>',
                                       'public/build/mozharness.zip')
 
     worker['artifacts'] = [{
         'name': prefix,
-        'path': os.path.join('/home/worker/workspace', path),
+        'path': os.path.join('/builds/worker/workspace', path),
         'type': 'directory',
     } for (prefix, path) in artifacts]
 
     worker['caches'] = [{
         'type': 'persistent',
         'name': 'level-{}-{}-test-workspace'.format(
             config.params['level'], config.params['project']),
-        'mount-point': "/home/worker/workspace",
+        'mount-point': "/builds/worker/workspace",
     }]
 
     env = worker['env'] = {
         'MOZHARNESS_CONFIG': ' '.join(mozharness['config']),
         'MOZHARNESS_SCRIPT': mozharness['script'],
         'MOZILLA_BUILD_URL': {'task-reference': installer_url},
         'NEED_PULSEAUDIO': 'true',
         'NEED_WINDOW_MANAGER': 'true',
@@ -110,48 +110,48 @@ def mozharness_test_on_docker(config, jo
 
     # handle some of the mozharness-specific options
 
     if mozharness['tooltool-downloads']:
         worker['relengapi-proxy'] = True
         worker['caches'].append({
             'type': 'persistent',
             'name': 'tooltool-cache',
-            'mount-point': '/home/worker/tooltool-cache',
+            'mount-point': '/builds/worker/tooltool-cache',
         })
         taskdesc['scopes'].extend([
             'docker-worker:relengapi-proxy:tooltool.download.internal',
             'docker-worker:relengapi-proxy:tooltool.download.public',
         ])
 
     if test['reboot']:
         raise Exception('reboot: {} not supported on generic-worker'.format(test['reboot']))
 
     # assemble the command line
     command = [
-        '/home/worker/bin/run-task',
+        '/builds/worker/bin/run-task',
         # The workspace cache/volume is default owned by root:root.
-        '--chown', '/home/worker/workspace',
+        '--chown', '/builds/worker/workspace',
     ]
 
     # Support vcs checkouts regardless of whether the task runs from
     # source or not in case it is needed on an interactive loaner.
     support_vcs_checkout(config, job, taskdesc)
 
     # If we have a source checkout, run mozharness from it instead of
     # downloading a zip file with the same content.
     if test['checkout']:
-        command.extend(['--vcs-checkout', '/home/worker/checkouts/gecko'])
-        env['MOZHARNESS_PATH'] = '/home/worker/checkouts/gecko/testing/mozharness'
+        command.extend(['--vcs-checkout', '/builds/worker/checkouts/gecko'])
+        env['MOZHARNESS_PATH'] = '/builds/worker/checkouts/gecko/testing/mozharness'
     else:
         env['MOZHARNESS_URL'] = {'task-reference': mozharness_url}
 
     command.extend([
         '--',
-        '/home/worker/bin/test-linux.sh',
+        '/builds/worker/bin/test-linux.sh',
     ])
 
     if mozharness.get('no-read-buildbot-config'):
         command.append("--no-read-buildbot-config")
     command.extend([
         {"task-reference": "--installer-url=" + installer_url},
         {"task-reference": "--test-packages-url=" + test_packages_url(taskdesc)},
     ])
--- a/taskcluster/taskgraph/transforms/job/run_task.py
+++ b/taskcluster/taskgraph/transforms/job/run_task.py
@@ -14,17 +14,17 @@ from voluptuous import Required, Any
 
 run_task_schema = Schema({
     Required('using'): 'run-task',
 
     # if true, add a cache at ~worker/.cache, which is where things like pip
     # tend to hide their caches.  This cache is never added for level-1 jobs.
     Required('cache-dotcache', default=False): bool,
 
-    # if true (the default), perform a checkout in /home/worker/checkouts/gecko
+    # if true (the default), perform a checkout in /builds/worker/checkouts/gecko
     Required('checkout', default=True): bool,
 
     # The command arguments to pass to the `run-task` script, after the
     # checkout arguments.  If a list, it will be passed directly; otherwise
     # it will be included in a single argument to `bash -cx`.
     Required('command'): Any([basestring], basestring),
 })
 
@@ -40,23 +40,23 @@ def docker_worker_run_task(config, job, 
     run = job['run']
     worker = taskdesc['worker'] = job['worker']
     common_setup(config, job, taskdesc)
 
     if run.get('cache-dotcache') and int(config.params['level']) > 1:
         worker['caches'].append({
             'type': 'persistent',
             'name': 'level-{level}-{project}-dotcache'.format(**config.params),
-            'mount-point': '/home/worker/.cache',
+            'mount-point': '/builds/worker/.cache',
         })
 
     run_command = run['command']
     if isinstance(run_command, basestring):
         run_command = ['bash', '-cx', run_command]
-    command = ['/home/worker/bin/run-task']
+    command = ['/builds/worker/bin/run-task']
     if run['checkout']:
         command.append('--vcs-checkout=~/checkouts/gecko')
     command.append('--fetch-hgfingerprint')
     command.append('--')
     command.extend(run_command)
     worker['command'] = command
 
 
--- a/taskcluster/taskgraph/transforms/job/spidermonkey.py
+++ b/taskcluster/taskgraph/transforms/job/spidermonkey.py
@@ -35,17 +35,17 @@ def docker_worker_spidermonkey(config, j
     worker['artifacts'] = []
     worker['caches'] = []
 
     if int(config.params['level']) > 1:
         worker['caches'].append({
             'type': 'persistent',
             'name': 'level-{}-{}-build-spidermonkey-workspace'.format(
                 config.params['level'], config.params['project']),
-            'mount-point': "/home/worker/workspace",
+            'mount-point': "/builds/worker/workspace",
         })
 
     docker_worker_add_public_artifacts(config, job, taskdesc)
 
     env = worker.setdefault('env', {})
     env.update({
         'MOZHARNESS_DISABLE': 'true',
         'SPIDERMONKEY_VARIANT': run['spidermonkey-variant'],
@@ -53,30 +53,30 @@ def docker_worker_spidermonkey(config, j
         'MOZ_SCM_LEVEL': config.params['level'],
     })
 
     # tooltool downloads; note that this script downloads using the API
     # endpoiint directly, rather than via relengapi-proxy
     worker['caches'].append({
         'type': 'persistent',
         'name': 'tooltool-cache',
-        'mount-point': '/home/worker/tooltool-cache',
+        'mount-point': '/builds/worker/tooltool-cache',
     })
-    env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache'
+    env['TOOLTOOL_CACHE'] = '/builds/worker/tooltool-cache'
 
     support_vcs_checkout(config, job, taskdesc)
 
     script = "build-sm.sh"
     if run['using'] == 'spidermonkey-package':
         script = "build-sm-package.sh"
     elif run['using'] == 'spidermonkey-mozjs-crate':
         script = "build-sm-mozjs-crate.sh"
 
     worker['command'] = [
-        '/home/worker/bin/run-task',
-        '--chown-recursive', '/home/worker/workspace',
-        '--chown-recursive', '/home/worker/tooltool-cache',
-        '--vcs-checkout', '/home/worker/workspace/build/src',
+        '/builds/worker/bin/run-task',
+        '--chown-recursive', '/builds/worker/workspace',
+        '--chown-recursive', '/builds/worker/tooltool-cache',
+        '--vcs-checkout', '/builds/worker/workspace/build/src',
         '--',
         '/bin/bash',
         '-c',
-        'cd /home/worker && workspace/build/src/taskcluster/scripts/builder/%s' % script
+        'cd /builds/worker && workspace/build/src/taskcluster/scripts/builder/%s' % script
     ]
--- a/taskcluster/taskgraph/transforms/job/toolchain.py
+++ b/taskcluster/taskgraph/transforms/job/toolchain.py
@@ -74,17 +74,17 @@ def docker_worker_toolchain(config, job,
     taskdesc['run-on-projects'] = ['trunk', 'try']
 
     worker = taskdesc['worker']
     worker['artifacts'] = []
     worker['caches'] = []
 
     worker['artifacts'].append({
         'name': 'public',
-        'path': '/home/worker/workspace/artifacts/',
+        'path': '/builds/worker/workspace/artifacts/',
         'type': 'directory',
     })
 
     docker_worker_add_tc_vcs_cache(config, job, taskdesc)
     docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc)
     support_vcs_checkout(config, job, taskdesc)
 
     env = worker['env']
@@ -96,41 +96,41 @@ def docker_worker_toolchain(config, job,
     })
 
     # tooltool downloads.  By default we download using the API endpoint, but
     # the job can optionally request relengapi-proxy (for example when downloading
     # internal tooltool resources.  So we define the tooltool cache unconditionally.
     worker['caches'].append({
         'type': 'persistent',
         'name': 'tooltool-cache',
-        'mount-point': '/home/worker/tooltool-cache',
+        'mount-point': '/builds/worker/tooltool-cache',
     })
-    env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache'
+    env['TOOLTOOL_CACHE'] = '/builds/worker/tooltool-cache'
 
     # tooltool downloads
     worker['relengapi-proxy'] = False  # but maybe enabled for tooltool below
     if run['tooltool-downloads']:
         worker['relengapi-proxy'] = True
         taskdesc['scopes'].extend([
             'docker-worker:relengapi-proxy:tooltool.download.public',
         ])
         if run['tooltool-downloads'] == 'internal':
             taskdesc['scopes'].append(
                 'docker-worker:relengapi-proxy:tooltool.download.internal')
 
     worker['command'] = [
-        '/home/worker/bin/run-task',
+        '/builds/worker/bin/run-task',
         # Various caches/volumes are default owned by root:root.
-        '--chown-recursive', '/home/worker/workspace',
-        '--chown-recursive', '/home/worker/tooltool-cache',
-        '--vcs-checkout=/home/worker/workspace/build/src',
+        '--chown-recursive', '/builds/worker/workspace',
+        '--chown-recursive', '/builds/worker/tooltool-cache',
+        '--vcs-checkout=/builds/worker/workspace/build/src',
         '--',
         'bash',
         '-c',
-        'cd /home/worker && '
+        'cd /builds/worker && '
         './workspace/build/src/taskcluster/scripts/misc/{}'.format(
             run['script'])
     ]
 
     add_optimizations(config, run, taskdesc)
 
 
 @run_job_using("generic-worker", "toolchain-script", schema=toolchain_run_schema)
--- a/taskcluster/taskgraph/transforms/marionette_harness.py
+++ b/taskcluster/taskgraph/transforms/marionette_harness.py
@@ -26,12 +26,12 @@ def setup_task(config, tasks):
             'MOZ_BUILD_DATE': config.params['moz_build_date'],
             'MOZ_SCM_LEVEL': config.params['level'],
         })
 
         task['worker']['caches'] = [{
             'type': 'persistent',
             'name': 'level-{}-{}-tc-vcs'.format(
                 config.params['level'], config.params['project']),
-            'mount-point': "/home/worker/.tc-vcs",
+            'mount-point': "/builds/worker/.tc-vcs",
         }]
 
         yield task
--- a/taskcluster/taskgraph/transforms/repackage.py
+++ b/taskcluster/taskgraph/transforms/repackage.py
@@ -134,21 +134,21 @@ def make_job_description(config, jobs):
                     '{}/artifacts/public/build/target.tar.gz'.format(signing_task_ref)
             task_env.update(
                 SIGNED_INPUT={'task-reference': input_string},
                 UNSIGNED_MAR={'task-reference': "{}mar".format(mar_prefix)},
             )
             mozharness_config = ['repackage/osx_signed.py']
             output_files = [{
                 'type': 'file',
-                'path': '/home/worker/workspace/build/artifacts/target.dmg',
+                'path': '/builds/worker/workspace/build/artifacts/target.dmg',
                 'name': 'public/build/{}target.dmg'.format(locale_output_path),
             }, {
                 'type': 'file',
-                'path': '/home/worker/workspace/build/artifacts/target.complete.mar',
+                'path': '/builds/worker/workspace/build/artifacts/target.complete.mar',
                 'name': 'public/build/{}target.complete.mar'.format(locale_output_path),
             }]
         else:
             raise Exception("Unexpected build platform for repackage")
 
         run = {
             'using': 'mozharness',
             'script': 'mozharness/scripts/repackage.py',
--- a/testing/mozharness/configs/android/androidarm_4_3.py
+++ b/testing/mozharness/configs/android/androidarm_4_3.py
@@ -4,18 +4,18 @@ config = {
     "buildbot_json_path": "buildprops.json",
     "hostutils_manifest_path": "testing/config/tooltool-manifests/linux64/hostutils.manifest",
     "robocop_package_name": "org.mozilla.roboexample.test",
     "marionette_address": "localhost:2828",
     "marionette_test_manifest": "unit-tests.ini",
     "download_tooltool": True,
     "tooltool_servers": ['http://relengapi/tooltool/'],
     "tooltool_manifest_path": "testing/config/tooltool-manifests/androidarm_4_3/releng.manifest",
-    "tooltool_cache": "/home/worker/tooltool_cache",
-    "avds_dir": "/home/worker/workspace/build/.android",
+    "tooltool_cache": "/builds/worker/tooltool_cache",
+    "avds_dir": "/builds/worker/workspace/build/.android",
     "emulator_manifest": """
         [
         {
         "size": 140097024,
         "digest": "51781032335c09103e8509b1a558bf22a7119392cf1ea301c49c01bdf21ff0ceb37d260bc1c322cd9b903252429fb01830fc27e4632be30cd345c95bf4b1a39b",
         "algorithm": "sha512",
         "filename": "android-sdk_r24.0.2-linux.tgz",
         "unpack": "True"
--- a/testing/mozharness/configs/android/androidx86.py
+++ b/testing/mozharness/configs/android/androidx86.py
@@ -1,18 +1,18 @@
 import os
 
 config = {
     "buildbot_json_path": "buildprops.json",
     "hostutils_manifest_path": "testing/config/tooltool-manifests/linux64/hostutils.manifest",
     "tooltool_manifest_path": "testing/config/tooltool-manifests/androidx86/releng.manifest",
-    "tooltool_cache": "/home/worker/tooltool_cache",
+    "tooltool_cache": "/builds/worker/tooltool_cache",
     "download_tooltool": True,
     "tooltool_servers": ['http://relengapi/tooltool/'],
-    "avds_dir": "/home/worker/workspace/build/.android",
+    "avds_dir": "/builds/worker/workspace/build/.android",
     "emulator_manifest": """
         [
         {
         "size": 193383673,
         "digest": "6609e8b95db59c6a3ad60fc3dcfc358b2c8ec8b4dda4c2780eb439e1c5dcc5d550f2e47ce56ba14309363070078d09b5287e372f6e95686110ff8a2ef1838221",
         "algorithm": "sha512",
         "filename": "android-sdk18_0.r18moz1.orig.tar.gz",
         "unpack": "True"
--- a/testing/mozharness/configs/builds/build_pool_specifics.py
+++ b/testing/mozharness/configs/builds/build_pool_specifics.py
@@ -33,12 +33,12 @@ config = {
     "taskcluster": {
         'graph_server': 'graphs.mozilla.org',
         'stage_server': 'ignored',
         # use the relengapi proxy to talk to tooltool
         "tooltool_servers": ['http://relengapi/tooltool/'],
         "tooltool_url": 'http://relengapi/tooltool/',
         'upload_env': {
             'UPLOAD_HOST': 'localhost',
-            'UPLOAD_PATH': '/home/worker/artifacts',
+            'UPLOAD_PATH': '/builds/worker/artifacts',
         },
     },
 }
--- a/testing/mozharness/configs/builds/releng_base_android_64_builds.py
+++ b/testing/mozharness/configs/builds/releng_base_android_64_builds.py
@@ -78,17 +78,17 @@ config = {
     'enable_max_vsize': False,
     'use_package_as_marfile': True,
     'env': {
         'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
         'DISPLAY': ':2',
         'HG_SHARE_BASE_DIR': '/builds/hg-shared',
         'MOZ_OBJDIR': 'obj-firefox',
         'TINDERBOX_OUTPUT': '1',
-        'TOOLTOOL_CACHE': '/home/worker/tooltool-cache',
+        'TOOLTOOL_CACHE': '/builds/worker/tooltool-cache',
         'TOOLTOOL_HOME': '/builds',
         'CCACHE_DIR': '/builds/ccache',
         'CCACHE_COMPRESS': '1',
         'CCACHE_UMASK': '002',
         'LC_ALL': 'C',
         'PATH': '/tools/buildbot/bin:/usr/local/bin:/bin:/usr/bin',
         'SHIP_LICENSED_FONTS': '1',
     },
--- a/testing/mozharness/configs/builds/releng_base_linux_32_builds.py
+++ b/testing/mozharness/configs/builds/releng_base_linux_32_builds.py
@@ -74,17 +74,17 @@ config = {
     'stage_platform': 'linux',
     'publish_nightly_en_US_routes': True,
     'env': {
         'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
         'DISPLAY': ':2',
         'HG_SHARE_BASE_DIR': '/builds/hg-shared',
         'MOZ_OBJDIR': 'obj-firefox',
         'TINDERBOX_OUTPUT': '1',
-        'TOOLTOOL_CACHE': '/home/worker/tooltool-cache',
+        'TOOLTOOL_CACHE': '/builds/worker/tooltool-cache',
         'TOOLTOOL_HOME': '/builds',
         'MOZ_CRASHREPORTER_NO_REPORT': '1',
         'CCACHE_DIR': '/builds/ccache',
         'CCACHE_COMPRESS': '1',
         'CCACHE_UMASK': '002',
         'LC_ALL': 'C',
         # 32 bit specific
         'PATH': '/tools/buildbot/bin:/usr/local/bin:/usr/lib/ccache:\
--- a/testing/mozharness/configs/builds/releng_base_linux_64_builds.py
+++ b/testing/mozharness/configs/builds/releng_base_linux_64_builds.py
@@ -73,17 +73,17 @@ config = {
     'stage_platform': 'linux64',
     'publish_nightly_en_US_routes': True,
     'env': {
         'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
         'DISPLAY': ':2',
         'HG_SHARE_BASE_DIR': '/builds/hg-shared',
         'MOZ_OBJDIR': 'obj-firefox',
         'TINDERBOX_OUTPUT': '1',
-        'TOOLTOOL_CACHE': '/home/worker/tooltool-cache',
+        'TOOLTOOL_CACHE': '/builds/worker/tooltool-cache',
         'TOOLTOOL_HOME': '/builds',
         'MOZ_CRASHREPORTER_NO_REPORT': '1',
         'CCACHE_DIR': '/builds/ccache',
         'CCACHE_COMPRESS': '1',
         'CCACHE_UMASK': '002',
         'LC_ALL': 'C',
         ## 64 bit specific
         'PATH': '/tools/buildbot/bin:/usr/local/bin:/usr/lib64/ccache:/bin:\
--- a/testing/mozharness/configs/builds/releng_base_mac_64_cross_builds.py
+++ b/testing/mozharness/configs/builds/releng_base_mac_64_cross_builds.py
@@ -44,17 +44,17 @@ config = {
     'base_name': 'OS X 10.7 %(branch)s',
     'platform': 'macosx64',
     'stage_platform': 'macosx64',
     'env': {
         'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
         'HG_SHARE_BASE_DIR': '/builds/hg-shared',
         'MOZ_OBJDIR': 'obj-firefox',
         'TINDERBOX_OUTPUT': '1',
-        'TOOLTOOL_CACHE': '/home/worker/tooltool-cache',
+        'TOOLTOOL_CACHE': '/builds/worker/tooltool-cache',
         'TOOLTOOL_HOME': '/builds',
         'MOZ_CRASHREPORTER_NO_REPORT': '1',
         'CCACHE_DIR': '/builds/ccache',
         'CCACHE_COMPRESS': '1',
         'CCACHE_UMASK': '002',
         'LC_ALL': 'C',
         ## 64 bit specific
         'PATH': '/tools/buildbot/bin:/usr/local/bin:/usr/lib64/ccache:/bin:\
--- a/testing/mozharness/configs/builds/releng_sub_linux_configs/64_add-on-devel.py
+++ b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_add-on-devel.py
@@ -27,15 +27,15 @@ config = {
         'TOOLTOOL_CACHE': '/builds/tooltool_cache',
         'TOOLTOOL_HOME': '/builds',
         'MOZ_CRASHREPORTER_NO_REPORT': '1',
         'CCACHE_DIR': '/builds/ccache',
         'CCACHE_COMPRESS': '1',
         'CCACHE_UMASK': '002',
         'LC_ALL': 'C',
         ## 64 bit specific
-        'PATH': '/home/worker/workspace/build/src/gcc/bin:/tools/buildbot/bin:/usr/local/bin:/usr/lib64/ccache:/bin:\
+        'PATH': '/builds/worker/workspace/build/src/gcc/bin:/tools/buildbot/bin:/usr/local/bin:/usr/lib64/ccache:/bin:\
 /usr/bin:/usr/local/sbin:/usr/sbin:/sbin:/tools/git/bin:/tools/python27/bin:\
 /tools/python27-mercurial/bin:/home/cltbld/bin',
     },
     'src_mozconfig': 'browser/config/mozconfigs/linux64/add-on-devel',
     #######################
 }
--- a/testing/mozharness/configs/builds/releng_sub_linux_configs/64_artifact.py
+++ b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_artifact.py
@@ -44,17 +44,17 @@ config = {
     'stage_platform': 'linux64',
     'publish_nightly_en_US_routes': False,
     'env': {
         'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
         'DISPLAY': ':2',
         'HG_SHARE_BASE_DIR': '/builds/hg-shared',
         'MOZ_OBJDIR': 'obj-firefox',
         'TINDERBOX_OUTPUT': '1',
-        'TOOLTOOL_CACHE': '/home/worker/tooltool-cache',
+        'TOOLTOOL_CACHE': '/builds/worker/tooltool-cache',
         'TOOLTOOL_HOME': '/builds',
         'MOZ_CRASHREPORTER_NO_REPORT': '1',
         'CCACHE_DIR': '/builds/ccache',
         'CCACHE_COMPRESS': '1',
         'CCACHE_UMASK': '002',
         'LC_ALL': 'C',
         ## 64 bit specific
         'PATH': '/tools/buildbot/bin:/usr/local/bin:/usr/lib64/ccache:/bin:\
--- a/testing/mozharness/configs/builds/releng_sub_linux_configs/64_stat_and_debug.py
+++ b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_stat_and_debug.py
@@ -34,17 +34,17 @@ config = {
     'stage_platform': 'linux64-st-an-opt',
     'publish_nightly_en_US_routes': False,
     'env': {
         'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
         'DISPLAY': ':2',
         'HG_SHARE_BASE_DIR': '/builds/hg-shared',
         'MOZ_OBJDIR': 'obj-firefox',
         'TINDERBOX_OUTPUT': '1',
-        'TOOLTOOL_CACHE': '/home/worker/tooltool-cache',
+        'TOOLTOOL_CACHE': '/builds/worker/tooltool-cache',
         'TOOLTOOL_HOME': '/builds',
         'MOZ_CRASHREPORTER_NO_REPORT': '1',
         'CCACHE_DIR': '/builds/ccache',
         'CCACHE_COMPRESS': '1',
         'CCACHE_UMASK': '002',
         'LC_ALL': 'C',
         ## 64 bit specific
         'PATH': '/tools/buildbot/bin:/usr/local/bin:/usr/lib64/ccache:/bin:\
--- a/testing/mozharness/configs/builds/releng_sub_linux_configs/64_stat_and_opt.py
+++ b/testing/mozharness/configs/builds/releng_sub_linux_configs/64_stat_and_opt.py
@@ -34,17 +34,17 @@ config = {
     'stage_platform': 'linux64-st-an',
     'publish_nightly_en_US_routes': False,
     'env': {
         'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
         'DISPLAY': ':2',
         'HG_SHARE_BASE_DIR': '/builds/hg-shared',
         'MOZ_OBJDIR': 'obj-firefox',
         'TINDERBOX_OUTPUT': '1',
-        'TOOLTOOL_CACHE': '/home/worker/tooltool-cache',
+        'TOOLTOOL_CACHE': '/builds/worker/tooltool-cache',
         'TOOLTOOL_HOME': '/builds',
         'MOZ_CRASHREPORTER_NO_REPORT': '1',
         'CCACHE_DIR': '/builds/ccache',
         'CCACHE_COMPRESS': '1',
         'CCACHE_UMASK': '002',
         'LC_ALL': 'C',
         ## 64 bit specific
         'PATH': '/tools/buildbot/bin:/usr/local/bin:/usr/lib64/ccache:/bin:\
--- a/testing/mozharness/configs/firefox_ui_tests/taskcluster.py
+++ b/testing/mozharness/configs/firefox_ui_tests/taskcluster.py
@@ -11,10 +11,10 @@ config = {
     },
 
     "find_links": [
         "http://pypi.pub.build.mozilla.org/pub",
     ],
     "pip_index": False,
 
     "download_minidump_stackwalk": True,
-    "tooltool_cache": "/home/worker/tooltool-cache",
+    "tooltool_cache": "/builds/worker/tooltool-cache",
 }
--- a/testing/mozharness/configs/marionette/prod_config.py
+++ b/testing/mozharness/configs/marionette/prod_config.py
@@ -32,17 +32,17 @@ config = {
         'run-tests',
     ],
     "default_blob_upload_servers": [
          "https://blobupload.elasticbeanstalk.com",
     ],
     "blob_uploader_auth_file" : os.path.join(os.getcwd(), "oauth.txt"),
     "download_symbols": "ondemand",
     "download_minidump_stackwalk": True,
-    "tooltool_cache": "/home/worker/tooltool-cache",
+    "tooltool_cache": "/builds/worker/tooltool-cache",
     "suite_definitions": {
         "marionette_desktop": {
             "options": [
                 "-vv",
                 "--log-raw=%(raw_log_file)s",
                 "--log-errorsummary=%(error_summary_file)s",
                 "--log-html=%(html_report_file)s",
                 "--binary=%(binary)s",
--- a/testing/mozharness/configs/single_locale/tc_android-api-15.py
+++ b/testing/mozharness/configs/single_locale/tc_android-api-15.py
@@ -6,13 +6,13 @@ config = {
     "tooltool_config": {
         "manifest": "mobile/android/config/tooltool-manifests/android/releng.manifest",
         "output_dir": "%(abs_work_dir)s/src",
     },
     "tooltool_servers": ['http://relengapi/tooltool/'],
 
     "upload_env": {
         'UPLOAD_HOST': 'localhost',
-        'UPLOAD_PATH': '/home/worker/artifacts/',
+        'UPLOAD_PATH': '/builds/worker/artifacts/',
     },
     "mozilla_dir": "src/",
     "simple_name_move": True,
 }
--- a/testing/mozharness/configs/single_locale/tc_linux32.py
+++ b/testing/mozharness/configs/single_locale/tc_linux32.py
@@ -15,13 +15,13 @@ config = {
         "DIST": "%(abs_objdir)s",
         "LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
         "L10NBASEDIR": "../../l10n",
         "MOZ_MAKE_COMPLETE_MAR": "1",
         'TOOLTOOL_CACHE': os.environ.get('TOOLTOOL_CACHE'),
     },
     "upload_env": {
         'UPLOAD_HOST': 'localhost',
-        'UPLOAD_PATH': '/home/worker/artifacts/',
+        'UPLOAD_PATH': '/builds/worker/artifacts/',
     },
     "mozilla_dir": "src/",
     "simple_name_move": True,
 }
--- a/testing/mozharness/configs/single_locale/tc_linux64.py
+++ b/testing/mozharness/configs/single_locale/tc_linux64.py
@@ -15,13 +15,13 @@ config = {
         "DIST": "%(abs_objdir)s",
         "LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
         "L10NBASEDIR": "../../l10n",
         "MOZ_MAKE_COMPLETE_MAR": "1",
         'TOOLTOOL_CACHE': os.environ.get('TOOLTOOL_CACHE'),
     },
     "upload_env": {
         'UPLOAD_HOST': 'localhost',
-        'UPLOAD_PATH': '/home/worker/artifacts/',
+        'UPLOAD_PATH': '/builds/worker/artifacts/',
     },
     "mozilla_dir": "src/",
     "simple_name_move": True,
 }
--- a/testing/mozharness/configs/single_locale/tc_macosx64.py
+++ b/testing/mozharness/configs/single_locale/tc_macosx64.py
@@ -15,17 +15,17 @@ config = {
         "DIST": "%(abs_objdir)s",
         "LOCALE_MERGEDIR": "%(abs_merge_dir)s/",
         "L10NBASEDIR": "../../l10n",
         "MOZ_MAKE_COMPLETE_MAR": "1",
         'TOOLTOOL_CACHE': os.environ.get('TOOLTOOL_CACHE'),
     },
     "upload_env": {
         'UPLOAD_HOST': 'localhost',
-        'UPLOAD_PATH': '/home/worker/artifacts/',
+        'UPLOAD_PATH': '/builds/worker/artifacts/',
     },
 
     "tooltool_url": 'http://relengapi/tooltool/',
     'tooltool_manifest_src': "browser/config/tooltool-manifests/macosx64/cross-l10n.manifest",
     "mozilla_dir": "src/",
     "simple_name_move": True,
 }
 
--- a/testing/mozharness/configs/unittests/linux_unittest.py
+++ b/testing/mozharness/configs/unittests/linux_unittest.py
@@ -303,13 +303,13 @@ config = {
                             "cppunittest": [],
                             "jittest": [],
                             "mozbase": [],
                             },
     "blob_uploader_auth_file": os.path.join(os.getcwd(), "oauth.txt"),
     "download_minidump_stackwalk": True,
     "minidump_stackwalk_path": MINIDUMP_STACKWALK_PATH,
     "minidump_tooltool_manifest_path": TOOLTOOL_MANIFEST_PATH,
-    "tooltool_cache": "/home/worker/tooltool-cache",
+    "tooltool_cache": "/builds/worker/tooltool-cache",
     "download_nodejs": True,
     "nodejs_path": NODEJS_PATH,
     "nodejs_tooltool_manifest_path": NODEJS_TOOLTOOL_MANIFEST_PATH,
 }
--- a/testing/mozharness/mozharness/mozilla/testing/codecoverage.py
+++ b/testing/mozharness/mozharness/mozilla/testing/codecoverage.py
@@ -77,17 +77,17 @@ class CodeCoverageMixin(object):
         # Create the grcov directory, get the tooltool manifest, and finally
         # download and unpack the grcov binary.
         self.grcov_dir = tempfile.mkdtemp()
         manifest = os.path.join(dirs.get('abs_test_install_dir', os.path.join(dirs['abs_work_dir'], 'tests')), \
             'config/tooltool-manifests/linux64/ccov.manifest')
 
         tooltool_path = self._fetch_tooltool_py()
         cmd = [tooltool_path, '--url', 'https://api.pub.build.mozilla.org/tooltool/', 'fetch', \
-            '-m', manifest, '-o', '-c', '/home/worker/tooltool-cache']
+            '-m', manifest, '-o', '-c', '/builds/worker/tooltool-cache']
         self.run_command(cmd, cwd=self.grcov_dir)
         self.run_command(['tar', '-jxvf', os.path.join(self.grcov_dir, 'grcov-linux-standalone-x86_64.tar.bz2'), \
             '-C', self.grcov_dir], cwd=self.grcov_dir)
 
     @PostScriptAction('run-tests')
     def _package_coverage_data(self, action, success=None):
         if not self.code_coverage_enabled:
             return
@@ -126,17 +126,17 @@ class CodeCoverageMixin(object):
             self.run_command(command, cwd=self.jsvm_dir)
 
             # GRCOV post-processing
             # Download the gcno fom the build machine.
             self.download_file(self.url_to_gcno, file_name=None, parent_dir=self.grcov_dir)
 
             # Run grcov on the zipped .gcno and .gcda files.
             grcov_command = [os.path.join(self.grcov_dir, 'grcov'), '-t', 'lcov' , '-p', \
-                             '/home/worker/workspace/build/src/', '-z', \
+                             '/builds/worker/workspace/build/src/', '-z', \
                              os.path.join(self.grcov_dir, 'target.code-coverage-gcno.zip'), file_path_gcda]
 
             # 'grcov_output' will be a tuple, the first variable is the path to the lcov output,
             # the other is the path to the standard error output.
             grcov_output = self.get_output_from_command(grcov_command, cwd=self.grcov_dir, \
                 silent=True, tmpfile_base_path=os.path.join(self.grcov_dir, 'grcov_lcov_output'), \
                 save_tmpfiles=True, return_type='files')
             new_output_name = grcov_output[0] + '.info'