Backed out Bug 1333255, which are changesets:
authorJohan Lorenzo <jlorenzo@mozilla.com>
Thu, 16 Mar 2017 15:38:32 +0100
changeset 500001 2df483309b6c130a346c9f10042e538071501c17
parent 500000 54717fcafa7edd5996e69c4f4ce0cb781869844e
child 500086 eb2ae8e1aa932e5319d5dbd1fdb1d574cfc0a216
push id49603
push userbmo:jlorenzo@mozilla.com
push dateThu, 16 Mar 2017 15:05:10 +0000
bugs1333255
milestone54.0a1
Backed out Bug 1333255, which are changesets: ea72464d8e6b eded37d02c46 c94448aa6851 f30595d9d0a7 7d523705b7f0 6818bc4b6e4b 75d1c9a2832b b08ccd2c80c5 de68c1961b50 f53592e9d731 848332bc134a 4da50aa3f6cb 326514698b7c
taskcluster/ci/android-stuff/kind.yml
taskcluster/ci/artifact-build/kind.yml
taskcluster/ci/balrog/kind.yml
taskcluster/ci/beetmover-checksums/kind.yml
taskcluster/ci/beetmover-l10n/kind.yml
taskcluster/ci/beetmover/kind.yml
taskcluster/ci/build-signing/kind.yml
taskcluster/ci/build/kind.yml
taskcluster/ci/checksums-signing/kind.yml
taskcluster/ci/docker-image/image.yml
taskcluster/ci/docker-image/kind.yml
taskcluster/ci/hazard/kind.yml
taskcluster/ci/l10n/kind.yml
taskcluster/ci/nightly-l10n-signing/kind.yml
taskcluster/ci/nightly-l10n/kind.yml
taskcluster/ci/source-test/kind.yml
taskcluster/ci/spidermonkey/kind.yml
taskcluster/ci/static-analysis/kind.yml
taskcluster/ci/test/kind.yml
taskcluster/ci/toolchain/kind.yml
taskcluster/ci/upload-symbols/kind.yml
taskcluster/ci/valgrind/kind.yml
taskcluster/docker/index-task/Dockerfile
taskcluster/docker/index-task/README
taskcluster/docker/index-task/insert-indexes.js
taskcluster/docker/index-task/npm-shrinkwrap.json
taskcluster/docker/index-task/package.json
taskcluster/docs/index.rst
taskcluster/docs/loading.rst
taskcluster/docs/optimization.rst
taskcluster/docs/taskgraph.rst
taskcluster/mach_commands.py
taskcluster/taskgraph/decision.py
taskcluster/taskgraph/generator.py
taskcluster/taskgraph/loader/__init__.py
taskcluster/taskgraph/loader/balrog.py
taskcluster/taskgraph/loader/beetmover.py
taskcluster/taskgraph/loader/beetmover_checksums.py
taskcluster/taskgraph/loader/checksums_signing.py
taskcluster/taskgraph/loader/post_build.py
taskcluster/taskgraph/loader/repacks.py
taskcluster/taskgraph/loader/signing.py
taskcluster/taskgraph/loader/test.py
taskcluster/taskgraph/loader/transform.py
taskcluster/taskgraph/morph.py
taskcluster/taskgraph/optimize.py
taskcluster/taskgraph/task.py
taskcluster/taskgraph/task/__init__.py
taskcluster/taskgraph/task/balrog.py
taskcluster/taskgraph/task/base.py
taskcluster/taskgraph/task/beetmover.py
taskcluster/taskgraph/task/beetmover_checksums.py
taskcluster/taskgraph/task/checksums_signing.py
taskcluster/taskgraph/task/docker_image.py
taskcluster/taskgraph/task/post_build.py
taskcluster/taskgraph/task/repacks.py
taskcluster/taskgraph/task/signing.py
taskcluster/taskgraph/task/test.py
taskcluster/taskgraph/task/transform.py
taskcluster/taskgraph/taskgraph.py
taskcluster/taskgraph/test/test_create.py
taskcluster/taskgraph/test/test_decision.py
taskcluster/taskgraph/test/test_generator.py
taskcluster/taskgraph/test/test_optimize.py
taskcluster/taskgraph/test/test_target_tasks.py
taskcluster/taskgraph/test/test_task_docker_image.py
taskcluster/taskgraph/test/test_taskgraph.py
taskcluster/taskgraph/test/test_try_option_syntax.py
taskcluster/taskgraph/test/util.py
taskcluster/taskgraph/transforms/docker_image.py
taskcluster/taskgraph/transforms/job/__init__.py
taskcluster/taskgraph/transforms/job/toolchain.py
taskcluster/taskgraph/transforms/task.py
taskcluster/taskgraph/transforms/tests.py
--- a/taskcluster/ci/android-stuff/kind.yml
+++ b/taskcluster/ci/android-stuff/kind.yml
@@ -1,17 +1,17 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 # The name of this kind should suggest it's not meant to be permanent.  This is
 # a temporary place to generate these tasks in Bug 1286075 until they are
 # rewritten in a better way.
 
-loader: taskgraph.loader.transform:loader
+implementation: taskgraph.task.transform:TransformTask
 
 transforms:
    - taskgraph.transforms.android_stuff:transforms
    - taskgraph.transforms.task:transforms
 
 jobs:
     android-api-15-gradle-dependencies:
         description: "Android armv7 API 15+ gradle dependencies"
@@ -53,19 +53,19 @@ jobs:
             command:
               - "/bin/bash"
               - "-c"
               - "/home/worker/bin/before.sh && /home/worker/bin/build.sh && /home/worker/bin/after.sh && true\n"
             max-run-time: 36000
         scopes:
           - docker-worker:relengapi-proxy:tooltool.download.internal
           - docker-worker:relengapi-proxy:tooltool.download.public
-        optimizations:
-          - - files-changed
-            - - "mobile/android/config/**"
+        when:
+            files-changed:
+              - "mobile/android/config/**"
               - "testing/mozharness/configs/builds/releng_sub_android_configs/*gradle_dependencies.py"
               - "**/*.gradle"
 
     android-test:
         description: "Android armv7 unit tests"
         attributes:
             build_platform: android-test
             build_type: opt
@@ -103,19 +103,19 @@ jobs:
             command:
               # NOTE: this could probably be a job description with run.using = 'mozharness'
               - "/bin/bash"
               - "bin/build.sh"
             max-run-time: 36000
         scopes:
           - docker-worker:relengapi-proxy:tooltool.download.internal
           - docker-worker:relengapi-proxy:tooltool.download.public
-        optimizations:
-          - - files-changed
-            - - "mobile/android/base/**"
+        when:
+            files-changed:
+              - "mobile/android/base/**"
               - "mobile/android/tests/background/junit4/**"
 
     android-lint:
         description: "Android lint"
         attributes:
             build_platform: android-lint
             build_type: opt
         treeherder:
@@ -158,19 +158,19 @@ jobs:
             command:
               # NOTE: this could probably be a job description with run.using = 'mozharness'
               - "/bin/bash"
               - "bin/build.sh"
             max-run-time: 36000
         scopes:
           - docker-worker:relengapi-proxy:tooltool.download.internal
           - docker-worker:relengapi-proxy:tooltool.download.public
-        optimizations:
-          - - files-changed
-            - - "mobile/android/**/*.java"
+        when:
+            files-changed:
+              - "mobile/android/**/*.java"
               - "mobile/android/**/*.jpeg"
               - "mobile/android/**/*.jpg"
               - "mobile/android/**/*.png"
               - "mobile/android/**/*.svg"
               - "mobile/android/**/*.xml" # Manifest & android resources
               - "mobile/android/**/build.gradle"
 
     android-checkstyle:
@@ -212,19 +212,19 @@ jobs:
             command:
               # NOTE: this could probably be a job description with run.using = 'mozharness'
               - "/bin/bash"
               - "bin/build.sh"
             max-run-time: 36000
         scopes:
           - docker-worker:relengapi-proxy:tooltool.download.internal
           - docker-worker:relengapi-proxy:tooltool.download.public
-        optimizations:
-          - - files-changed
-            - - "mobile/android/**/checkstyle.xml"
+        when:
+            files-changed:
+              - "mobile/android/**/checkstyle.xml"
               - "mobile/android/**/*.gradle"
               - "mobile/android/**/*.java"
 
     android-findbugs:
         description: "Android findbugs"
         attributes:
             build_platform: android-findbugs
             build_type: opt
@@ -262,12 +262,12 @@ jobs:
             command:
               # NOTE: this could probably be a job description with run.using = 'mozharness'
               - "/bin/bash"
               - "bin/build.sh"
             max-run-time: 36000
         scopes:
           - docker-worker:relengapi-proxy:tooltool.download.internal
           - docker-worker:relengapi-proxy:tooltool.download.public
-        optimizations:
-          - - files-changed
-            - - "mobile/android/**/*.gradle"
+        when:
+            files-changed:
+              - "mobile/android/**/*.gradle"
               - "mobile/android/**/*.java"
--- a/taskcluster/ci/artifact-build/kind.yml
+++ b/taskcluster/ci/artifact-build/kind.yml
@@ -1,13 +1,13 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
-loader: taskgraph.loader.transform:loader
+implementation: taskgraph.task.transform:TransformTask
 
 transforms:
    - taskgraph.transforms.build_attrs:transforms
    - taskgraph.transforms.job:transforms
    - taskgraph.transforms.task:transforms
 
 jobs:
     linux64-artifact/opt:
--- a/taskcluster/ci/balrog/kind.yml
+++ b/taskcluster/ci/balrog/kind.yml
@@ -1,13 +1,13 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
-loader: taskgraph.loader.balrog:loader
+implementation: taskgraph.task.balrog:BalrogTask
 
 transforms:
    - taskgraph.transforms.balrog:transforms
    - taskgraph.transforms.task:transforms
 
 kind-dependencies:
   - beetmover
   - beetmover-l10n
--- a/taskcluster/ci/beetmover-checksums/kind.yml
+++ b/taskcluster/ci/beetmover-checksums/kind.yml
@@ -1,12 +1,12 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
-loader: taskgraph.loader.beetmover_checksums:loader
+implementation: taskgraph.task.beetmover_checksums:BeetmoverChecksumsTask
 
 transforms:
    - taskgraph.transforms.beetmover_checksums:transforms
    - taskgraph.transforms.task:transforms
 
 kind-dependencies:
   - checksums-signing
--- a/taskcluster/ci/beetmover-l10n/kind.yml
+++ b/taskcluster/ci/beetmover-l10n/kind.yml
@@ -1,13 +1,13 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
-loader: taskgraph.loader.beetmover:loader
+implementation: taskgraph.task.beetmover:BeetmoverTask
 
 transforms:
    - taskgraph.transforms.beetmover_l10n:transforms
    - taskgraph.transforms.beetmover:transforms
    - taskgraph.transforms.task:transforms
 
 kind-dependencies:
   - nightly-l10n-signing
--- a/taskcluster/ci/beetmover/kind.yml
+++ b/taskcluster/ci/beetmover/kind.yml
@@ -1,12 +1,12 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
-loader: taskgraph.loader.beetmover:loader
+implementation: taskgraph.task.beetmover:BeetmoverTask
 
 transforms:
    - taskgraph.transforms.beetmover:transforms
    - taskgraph.transforms.task:transforms
 
 kind-dependencies:
   - build-signing
--- a/taskcluster/ci/build-signing/kind.yml
+++ b/taskcluster/ci/build-signing/kind.yml
@@ -1,13 +1,13 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
-loader: taskgraph.loader.signing:loader
+implementation: taskgraph.task.signing:SigningTask
 
 transforms:
    - taskgraph.transforms.build_signing:transforms
    - taskgraph.transforms.signing:transforms
    - taskgraph.transforms.task:transforms
 
 kind-dependencies:
   - build
--- a/taskcluster/ci/build/kind.yml
+++ b/taskcluster/ci/build/kind.yml
@@ -1,13 +1,13 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
-loader: taskgraph.loader.transform:loader
+implementation: taskgraph.task.transform:TransformTask
 
 transforms:
    - taskgraph.transforms.build:transforms
    - taskgraph.transforms.build_attrs:transforms
    - taskgraph.transforms.job:transforms
    - taskgraph.transforms.task:transforms
 
 jobs-from:
--- a/taskcluster/ci/checksums-signing/kind.yml
+++ b/taskcluster/ci/checksums-signing/kind.yml
@@ -1,13 +1,13 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
-loader: taskgraph.loader.checksums_signing:loader
+implementation: taskgraph.task.checksums_signing:ChecksumsSigningTask
 
 transforms:
    - taskgraph.transforms.checksums_signing:transforms
    - taskgraph.transforms.task:transforms
 
 kind-dependencies:
   - beetmover
   - beetmover-l10n
new file mode 100644
--- /dev/null
+++ b/taskcluster/ci/docker-image/image.yml
@@ -0,0 +1,68 @@
+---
+task:
+  created:
+    relative-datestamp: "0 seconds"
+  deadline:
+    relative-datestamp: "24 hours"
+  metadata:
+    name: 'Docker Image Build: {{image_name}}'
+    description: 'Build the docker image {{image_name}} for use by dependent tasks'
+    source: '{{source}}'
+    owner: mozilla-taskcluster-maintenance@mozilla.com
+  tags:
+    createdForUser: '{{owner}}'
+
+  workerType: gecko-images
+  provisionerId: aws-provisioner-v1
+  schedulerId: task-graph-scheduler
+
+  routes:
+      # Indexing routes to avoid building the same image twice
+      - index.{{index_image_prefix}}.level-{{level}}.{{image_name}}.latest
+      - index.{{index_image_prefix}}.level-{{level}}.{{image_name}}.pushdate.{{year}}.{{month}}-{{day}}-{{pushtime}}
+      - index.{{index_image_prefix}}.level-{{level}}.{{image_name}}.hash.{{context_hash}}
+      # Treeherder routes
+      - tc-treeherder.v2.{{project}}.{{head_rev}}.{{pushlog_id}}
+      - tc-treeherder-stage.v2.{{project}}.{{head_rev}}.{{pushlog_id}}
+
+  scopes:
+      - secrets:get:project/taskcluster/gecko/hgfingerprint
+      - docker-worker:cache:level-{{level}}-imagebuilder-v1
+
+  payload:
+    env:
+      HASH: '{{context_hash}}'
+      PROJECT: '{{project}}'
+      CONTEXT_URL: '{{context_url}}'
+      IMAGE_NAME: '{{image_name}}'
+      GECKO_BASE_REPOSITORY: '{{base_repository}}'
+      GECKO_HEAD_REPOSITORY: '{{head_repository}}'
+      GECKO_HEAD_REV: '{{head_rev}}'
+      HG_STORE_PATH: '/home/worker/checkouts/hg-store'
+    cache:
+      'level-{{level}}-imagebuilder-v1': '/home/worker/checkouts'
+    features:
+      dind: true
+      chainOfTrust: true
+      taskclusterProxy: true
+    image: '{{#docker_image}}image_builder{{/docker_image}}'
+    maxRunTime: 3600
+    artifacts:
+      '{{artifact_path}}':
+        type: 'file'
+        path: '/home/worker/workspace/artifacts/image.tar.zst'
+        expires:
+          relative-datestamp: "1 year"
+  extra:
+    imageMeta: # Useful when converting back from JSON in action tasks
+      level: '{{level}}'
+      contextHash: '{{context_hash}}'
+      imageName: '{{image_name}}'
+    treeherderEnv:
+      - staging
+      - production
+    treeherder:
+      jobKind: other
+      build:
+        platform: 'taskcluster-images'
+      groupSymbol: 'I'
--- a/taskcluster/ci/docker-image/kind.yml
+++ b/taskcluster/ci/docker-image/kind.yml
@@ -1,30 +1,19 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
-loader: taskgraph.loader.transform:loader
-
-transforms:
-  - taskgraph.transforms.docker_image:transforms
-  - taskgraph.transforms.task:transforms
+implementation: 'taskgraph.task.docker_image:DockerImageTask'
+images_path: '../../../taskcluster/docker'
 
 # make a task for each docker-image we might want.  For the moment, since we
 # write artifacts for each, these are whitelisted, but ideally that will change
 # (to use subdirectory clones of the proper directory), at which point we can
 # generate tasks for every docker image in the directory, secure in the
 # knowledge that unnecessary images will be omitted from the target task graph
-jobs:
-  desktop-test:
-    symbol: I(dt)
-  desktop1604-test:
-    symbol: I(dt16t)
-  desktop-build:
-    symbol: I(db)
-  tester:
-    symbol: I(tst)
-  lint:
-    symbol: I(lnt)
-  android-gradle-build:
-    symbol: I(agb)
-  index-task:
-    symbol: I(idx)
+images:
+  desktop-test: dt
+  desktop1604-test: dt16t
+  desktop-build: db
+  tester: tst
+  lint: lnt
+  android-gradle-build: agb
--- a/taskcluster/ci/hazard/kind.yml
+++ b/taskcluster/ci/hazard/kind.yml
@@ -1,13 +1,13 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
-loader: taskgraph.loader.transform:loader
+implementation: taskgraph.task.transform:TransformTask
 
 transforms:
    - taskgraph.transforms.build_attrs:transforms
    - taskgraph.transforms.job:transforms
    - taskgraph.transforms.task:transforms
 
 job-defaults:
     treeherder:
--- a/taskcluster/ci/l10n/kind.yml
+++ b/taskcluster/ci/l10n/kind.yml
@@ -1,13 +1,13 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
-loader: taskgraph.loader.repacks:loader
+implementation: taskgraph.task.repacks:RepackTask
 
 
 transforms:
    - taskgraph.transforms.l10n:transforms
    - taskgraph.transforms.job:transforms
    - taskgraph.transforms.task:transforms
 
 kind-dependencies:
--- a/taskcluster/ci/nightly-l10n-signing/kind.yml
+++ b/taskcluster/ci/nightly-l10n-signing/kind.yml
@@ -1,13 +1,13 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
-loader: taskgraph.loader.signing:loader
+implementation: taskgraph.task.signing:SigningTask
 
 transforms:
    - taskgraph.transforms.nightly_l10n_signing:transforms
    - taskgraph.transforms.signing:transforms
    - taskgraph.transforms.task:transforms
 
 kind-dependencies:
   - nightly-l10n
--- a/taskcluster/ci/nightly-l10n/kind.yml
+++ b/taskcluster/ci/nightly-l10n/kind.yml
@@ -1,13 +1,13 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
-loader: taskgraph.loader.repacks:loader
+implementation: taskgraph.task.repacks:RepackTask
 
 transforms:
    - taskgraph.transforms.l10n:transforms
    - taskgraph.transforms.job:transforms
    - taskgraph.transforms.task:transforms
 
 kind-dependencies:
    - build
--- a/taskcluster/ci/source-test/kind.yml
+++ b/taskcluster/ci/source-test/kind.yml
@@ -1,13 +1,13 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
-loader: taskgraph.loader.transform:loader
+implementation: taskgraph.task.transform:TransformTask
 
 transforms:
    - taskgraph.transforms.build_attrs:transforms
    - taskgraph.transforms.job:transforms
    - taskgraph.transforms.task:transforms
 
 jobs-from:
     - python-tests.yml
--- a/taskcluster/ci/spidermonkey/kind.yml
+++ b/taskcluster/ci/spidermonkey/kind.yml
@@ -1,13 +1,13 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
-loader: taskgraph.loader.transform:loader
+implementation: taskgraph.task.transform:TransformTask
 
 transforms:
    - taskgraph.transforms.build_attrs:transforms
    - taskgraph.transforms.job:transforms
    - taskgraph.transforms.task:transforms
 
 job-defaults:
     treeherder:
--- a/taskcluster/ci/static-analysis/kind.yml
+++ b/taskcluster/ci/static-analysis/kind.yml
@@ -1,13 +1,13 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
-loader: taskgraph.loader.transform:loader
+implementation: taskgraph.task.transform:TransformTask
 
 transforms:
    - taskgraph.transforms.build_attrs:transforms
    - taskgraph.transforms.job:transforms
    - taskgraph.transforms.task:transforms
 
 job-defaults:
     index:
--- a/taskcluster/ci/test/kind.yml
+++ b/taskcluster/ci/test/kind.yml
@@ -1,9 +1,9 @@
-loader: taskgraph.loader.test:loader
+implementation: taskgraph.task.test:TestTask
 
 kind-dependencies:
     - build
 
 transforms:
    - taskgraph.transforms.tests:transforms
    - taskgraph.transforms.job:transforms
    - taskgraph.transforms.task:transforms
--- a/taskcluster/ci/toolchain/kind.yml
+++ b/taskcluster/ci/toolchain/kind.yml
@@ -1,13 +1,13 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
-loader: taskgraph.loader.transform:loader
+implementation: taskgraph.task.transform:TransformTask
 
 transforms:
    - taskgraph.transforms.build_attrs:transforms
    - taskgraph.transforms.job:transforms
    - taskgraph.transforms.task:transforms
 
 jobs-from:
    - linux.yml
--- a/taskcluster/ci/upload-symbols/kind.yml
+++ b/taskcluster/ci/upload-symbols/kind.yml
@@ -1,13 +1,13 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
-loader: taskgraph.loader.post_build:loader
+implementation: taskgraph.task.post_build:PostBuildTask
 
 transforms:
    - taskgraph.transforms.upload_symbols:transforms
    - taskgraph.transforms.task:transforms
 
 kind-dependencies:
     - build
 
--- a/taskcluster/ci/valgrind/kind.yml
+++ b/taskcluster/ci/valgrind/kind.yml
@@ -1,13 +1,13 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
-loader: taskgraph.loader.transform:loader
+implementation: taskgraph.task.transform:TransformTask
 
 transforms:
    - taskgraph.transforms.build_attrs:transforms
    - taskgraph.transforms.job:transforms
    - taskgraph.transforms.task:transforms
 
 jobs:
     linux64-valgrind/opt:
deleted file mode 100644
--- a/taskcluster/docker/index-task/Dockerfile
+++ /dev/null
@@ -1,11 +0,0 @@
-FROM node:6-alpine
-
-ENV       NODE_ENV        production
-RUN       mkdir /app
-ADD       insert-indexes.js   /app/
-ADD       package.json        /app/
-ADD       npm-shrinkwrap.json /app/
-WORKDIR   /app
-RUN       npm install && npm cache clean
-
-ENTRYPOINT ["node"]
deleted file mode 100644
--- a/taskcluster/docker/index-task/README
+++ /dev/null
@@ -1,36 +0,0 @@
-Index-Image
-===========
-
-This image is designed to be used for indexing other tasks. It takes a task
-definition as follows:
-```js
-{
-  ...,
-  scopes: [
-    'index:insert-task:my-index.namespace',
-    'index:insert-task:...',
-  ],
-  payload: {
-    image: '...',
-    env: {
-      TARGET_TASKID: '<taskId-to-be-indexed>',
-    },
-    command: [
-      'insert-indexes.js',
-      'my-index.namespace.one',
-      'my-index.namespace.two',
-      '....',
-    ],
-    features: {
-      taskclusterProxy: true,
-    },
-    maxRunTime: 600,
-  },
-}
-```
-
-As can be seen the `taskId` to be indexed is given by the environment variable
-`TARGET_TASKID` and the `command` arguments specifies namespaces that it must
-be index under. It is **important** to also include scopes on the form
-`index:insert-task:<...>` for all namespaces `<...>` given as `command`
-arguments.
deleted file mode 100644
--- a/taskcluster/docker/index-task/insert-indexes.js
+++ /dev/null
@@ -1,44 +0,0 @@
-let taskcluster = require('taskcluster-client');
-
-// Create instance of index client
-let index = new taskcluster.Index({
-  delayFactor:    750,  // Good solid delay for background process
-  retries:        8,    // A few extra retries for robustness
-  baseUrl:        'taskcluster/index/v1',
-});
-
-// Create queue instance for fetching taskId
-let queue = new taskcluster.Queue();
-
-// Load input
-let taskId = process.env.TARGET_TASKID;
-let namespaces = process.argv.slice(2);
-
-// Validate input
-if (!taskId) {
-  console.log('Expected target task as environment variable: TARGET_TASKID');
-  process.exit(1);
-}
-
-// Fetch task definition to get expiration and then insert into index
-queue.task(taskId).then(task => task.expires).then(expires => {
-  return Promise.all(namespaces.map(namespace => {
-    console.log('Inserting %s into index under: %s', taskId, namespace);
-    return index.insertTask(namespace, {
-      taskId,
-      rank: 0,
-      data: {},
-      expires,
-    });
-  }));
-}).then(() => {
-  console.log('indexing successfully completed.');
-  process.exit(0);
-}).catch(err => {
-  console.log('Error:\n%s', err);
-  if (err.stack) {
-    console.log('Stack:\n%s', err.stack());
-  }
-  console.log('Properties:\n%j', err);
-  throw err;
-}).catch(() => process.exit(1));
deleted file mode 100644
--- a/taskcluster/docker/index-task/npm-shrinkwrap.json
+++ /dev/null
@@ -1,309 +0,0 @@
-{
-  "dependencies": {
-    "amqplib": {
-      "version": "0.5.1",
-      "from": "amqplib@>=0.5.1 <0.6.0",
-      "resolved": "https://registry.npmjs.org/amqplib/-/amqplib-0.5.1.tgz"
-    },
-    "asap": {
-      "version": "1.0.0",
-      "from": "asap@>=1.0.0 <1.1.0",
-      "resolved": "https://registry.npmjs.org/asap/-/asap-1.0.0.tgz"
-    },
-    "async": {
-      "version": "0.9.2",
-      "from": "async@>=0.9.0 <0.10.0",
-      "resolved": "https://registry.npmjs.org/async/-/async-0.9.2.tgz"
-    },
-    "bitsyntax": {
-      "version": "0.0.4",
-      "from": "bitsyntax@>=0.0.4 <0.1.0",
-      "resolved": "https://registry.npmjs.org/bitsyntax/-/bitsyntax-0.0.4.tgz"
-    },
-    "bluebird": {
-      "version": "3.4.7",
-      "from": "bluebird@>=3.4.6 <4.0.0",
-      "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.4.7.tgz"
-    },
-    "boom": {
-      "version": "2.10.1",
-      "from": "boom@>=2.0.0 <3.0.0",
-      "resolved": "https://registry.npmjs.org/boom/-/boom-2.10.1.tgz"
-    },
-    "buffer-more-ints": {
-      "version": "0.0.2",
-      "from": "buffer-more-ints@0.0.2",
-      "resolved": "https://registry.npmjs.org/buffer-more-ints/-/buffer-more-ints-0.0.2.tgz"
-    },
-    "combined-stream": {
-      "version": "0.0.7",
-      "from": "combined-stream@>=0.0.4 <0.1.0",
-      "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.7.tgz"
-    },
-    "component-emitter": {
-      "version": "1.2.1",
-      "from": "component-emitter@>=1.2.0 <1.3.0",
-      "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.2.1.tgz"
-    },
-    "cookiejar": {
-      "version": "2.0.6",
-      "from": "cookiejar@2.0.6",
-      "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.0.6.tgz"
-    },
-    "core-util-is": {
-      "version": "1.0.2",
-      "from": "core-util-is@>=1.0.0 <1.1.0",
-      "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz"
-    },
-    "cryptiles": {
-      "version": "2.0.5",
-      "from": "cryptiles@>=2.0.0 <3.0.0",
-      "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-2.0.5.tgz"
-    },
-    "debug": {
-      "version": "2.6.0",
-      "from": "debug@>=2.1.3 <3.0.0",
-      "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.0.tgz"
-    },
-    "delayed-stream": {
-      "version": "0.0.5",
-      "from": "delayed-stream@0.0.5",
-      "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz"
-    },
-    "eventsource": {
-      "version": "0.1.6",
-      "from": "eventsource@0.1.6",
-      "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-0.1.6.tgz",
-      "optional": true
-    },
-    "extend": {
-      "version": "3.0.0",
-      "from": "extend@3.0.0",
-      "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.0.tgz"
-    },
-    "faye-websocket": {
-      "version": "0.11.1",
-      "from": "faye-websocket@>=0.11.0 <0.12.0",
-      "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.1.tgz",
-      "optional": true
-    },
-    "form-data": {
-      "version": "0.2.0",
-      "from": "form-data@0.2.0",
-      "resolved": "https://registry.npmjs.org/form-data/-/form-data-0.2.0.tgz"
-    },
-    "formidable": {
-      "version": "1.0.17",
-      "from": "formidable@>=1.0.14 <1.1.0",
-      "resolved": "https://registry.npmjs.org/formidable/-/formidable-1.0.17.tgz"
-    },
-    "hawk": {
-      "version": "2.3.1",
-      "from": "hawk@>=2.3.1 <3.0.0",
-      "resolved": "https://registry.npmjs.org/hawk/-/hawk-2.3.1.tgz"
-    },
-    "hoek": {
-      "version": "2.16.3",
-      "from": "hoek@>=2.0.0 <3.0.0",
-      "resolved": "https://registry.npmjs.org/hoek/-/hoek-2.16.3.tgz"
-    },
-    "inherits": {
-      "version": "2.0.3",
-      "from": "inherits@>=2.0.1 <2.1.0",
-      "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz"
-    },
-    "isarray": {
-      "version": "0.0.1",
-      "from": "isarray@0.0.1",
-      "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz"
-    },
-    "json3": {
-      "version": "3.3.2",
-      "from": "json3@>=3.3.2 <4.0.0",
-      "resolved": "https://registry.npmjs.org/json3/-/json3-3.3.2.tgz",
-      "optional": true
-    },
-    "lodash": {
-      "version": "3.10.1",
-      "from": "lodash@>=3.6.0 <4.0.0",
-      "resolved": "https://registry.npmjs.org/lodash/-/lodash-3.10.1.tgz"
-    },
-    "methods": {
-      "version": "1.1.2",
-      "from": "methods@>=1.1.1 <1.2.0",
-      "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz"
-    },
-    "mime": {
-      "version": "1.3.4",
-      "from": "mime@1.3.4",
-      "resolved": "https://registry.npmjs.org/mime/-/mime-1.3.4.tgz"
-    },
-    "mime-db": {
-      "version": "1.12.0",
-      "from": "mime-db@>=1.12.0 <1.13.0",
-      "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.12.0.tgz"
-    },
-    "mime-types": {
-      "version": "2.0.14",
-      "from": "mime-types@>=2.0.3 <2.1.0",
-      "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.0.14.tgz"
-    },
-    "ms": {
-      "version": "0.7.2",
-      "from": "ms@0.7.2",
-      "resolved": "https://registry.npmjs.org/ms/-/ms-0.7.2.tgz"
-    },
-    "original": {
-      "version": "1.0.0",
-      "from": "original@>=0.0.5",
-      "resolved": "https://registry.npmjs.org/original/-/original-1.0.0.tgz",
-      "optional": true,
-      "dependencies": {
-        "url-parse": {
-          "version": "1.0.5",
-          "from": "url-parse@>=1.0.0 <1.1.0",
-          "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.0.5.tgz",
-          "optional": true
-        }
-      }
-    },
-    "promise": {
-      "version": "6.1.0",
-      "from": "promise@>=6.1.0 <7.0.0",
-      "resolved": "https://registry.npmjs.org/promise/-/promise-6.1.0.tgz"
-    },
-    "qs": {
-      "version": "2.3.3",
-      "from": "qs@2.3.3",
-      "resolved": "https://registry.npmjs.org/qs/-/qs-2.3.3.tgz"
-    },
-    "querystringify": {
-      "version": "0.0.4",
-      "from": "querystringify@>=0.0.0 <0.1.0",
-      "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-0.0.4.tgz"
-    },
-    "readable-stream": {
-      "version": "1.1.14",
-      "from": "readable-stream@>=1.0.0 <2.0.0 >=1.1.9",
-      "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz"
-    },
-    "reduce-component": {
-      "version": "1.0.1",
-      "from": "reduce-component@1.0.1",
-      "resolved": "https://registry.npmjs.org/reduce-component/-/reduce-component-1.0.1.tgz"
-    },
-    "requires-port": {
-      "version": "1.0.0",
-      "from": "requires-port@>=1.0.0 <1.1.0",
-      "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz"
-    },
-    "slugid": {
-      "version": "1.1.0",
-      "from": "slugid@>=1.1.0 <2.0.0",
-      "resolved": "https://registry.npmjs.org/slugid/-/slugid-1.1.0.tgz"
-    },
-    "sntp": {
-      "version": "1.0.9",
-      "from": "sntp@>=1.0.0 <2.0.0",
-      "resolved": "https://registry.npmjs.org/sntp/-/sntp-1.0.9.tgz"
-    },
-    "sockjs-client": {
-      "version": "1.1.2",
-      "from": "sockjs-client@>=1.0.3 <2.0.0",
-      "resolved": "https://registry.npmjs.org/sockjs-client/-/sockjs-client-1.1.2.tgz",
-      "optional": true
-    },
-    "string_decoder": {
-      "version": "0.10.31",
-      "from": "string_decoder@>=0.10.0 <0.11.0",
-      "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz"
-    },
-    "superagent": {
-      "version": "1.7.2",
-      "from": "superagent@>=1.7.0 <1.8.0",
-      "resolved": "https://registry.npmjs.org/superagent/-/superagent-1.7.2.tgz",
-      "dependencies": {
-        "readable-stream": {
-          "version": "1.0.27-1",
-          "from": "readable-stream@1.0.27-1",
-          "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.27-1.tgz"
-        }
-      }
-    },
-    "superagent-hawk": {
-      "version": "0.0.6",
-      "from": "superagent-hawk@>=0.0.6 <0.0.7",
-      "resolved": "https://registry.npmjs.org/superagent-hawk/-/superagent-hawk-0.0.6.tgz",
-      "dependencies": {
-        "boom": {
-          "version": "0.4.2",
-          "from": "boom@>=0.4.0 <0.5.0",
-          "resolved": "https://registry.npmjs.org/boom/-/boom-0.4.2.tgz"
-        },
-        "cryptiles": {
-          "version": "0.2.2",
-          "from": "cryptiles@>=0.2.0 <0.3.0",
-          "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-0.2.2.tgz"
-        },
-        "hawk": {
-          "version": "1.0.0",
-          "from": "hawk@>=1.0.0 <1.1.0",
-          "resolved": "https://registry.npmjs.org/hawk/-/hawk-1.0.0.tgz"
-        },
-        "hoek": {
-          "version": "0.9.1",
-          "from": "hoek@>=0.9.0 <0.10.0",
-          "resolved": "https://registry.npmjs.org/hoek/-/hoek-0.9.1.tgz"
-        },
-        "qs": {
-          "version": "0.6.6",
-          "from": "qs@>=0.6.6 <0.7.0",
-          "resolved": "https://registry.npmjs.org/qs/-/qs-0.6.6.tgz"
-        },
-        "sntp": {
-          "version": "0.2.4",
-          "from": "sntp@>=0.2.0 <0.3.0",
-          "resolved": "https://registry.npmjs.org/sntp/-/sntp-0.2.4.tgz"
-        }
-      }
-    },
-    "superagent-promise": {
-      "version": "0.2.0",
-      "from": "superagent-promise@>=0.2.0 <0.3.0",
-      "resolved": "https://registry.npmjs.org/superagent-promise/-/superagent-promise-0.2.0.tgz"
-    },
-    "taskcluster-client": {
-      "version": "1.6.3",
-      "from": "taskcluster-client@>=1.6.2 <2.0.0",
-      "resolved": "https://registry.npmjs.org/taskcluster-client/-/taskcluster-client-1.6.3.tgz"
-    },
-    "url-join": {
-      "version": "0.0.1",
-      "from": "url-join@>=0.0.1 <0.0.2",
-      "resolved": "https://registry.npmjs.org/url-join/-/url-join-0.0.1.tgz"
-    },
-    "url-parse": {
-      "version": "1.1.7",
-      "from": "url-parse@>=1.1.1 <2.0.0",
-      "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.1.7.tgz",
-      "optional": true
-    },
-    "uuid": {
-      "version": "2.0.3",
-      "from": "uuid@>=2.0.1 <3.0.0",
-      "resolved": "https://registry.npmjs.org/uuid/-/uuid-2.0.3.tgz"
-    },
-    "websocket-driver": {
-      "version": "0.6.5",
-      "from": "websocket-driver@>=0.5.1",
-      "resolved": "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.6.5.tgz",
-      "optional": true
-    },
-    "websocket-extensions": {
-      "version": "0.1.1",
-      "from": "websocket-extensions@>=0.1.1",
-      "resolved": "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.1.tgz",
-      "optional": true
-    }
-  }
-}
deleted file mode 100644
--- a/taskcluster/docker/index-task/package.json
+++ /dev/null
@@ -1,9 +0,0 @@
-{
-  "private": true,
-  "scripts":{
-    "start": "node index.js"
-  },
-  "dependencies": {
-    "taskcluster-client": "^1.6.2"
-  }
-}
--- a/taskcluster/docs/index.rst
+++ b/taskcluster/docs/index.rst
@@ -19,16 +19,15 @@ referring to the source where necessary.
 particular goal in mind and would rather avoid becoming a task-graph expert,
 check out the :doc:`how-to section <how-tos>`.
 
 .. toctree::
 
     taskgraph
     loading
     transforms
-    optimization
     yaml-templates
     docker-images
     cron
     how-tos
     in-tree-actions
     action-spec
     reference
--- a/taskcluster/docs/loading.rst
+++ b/taskcluster/docs/loading.rst
@@ -1,42 +1,39 @@
 Loading Tasks
 =============
 
 The full task graph generation involves creating tasks for each kind.  Kinds
-are ordered to satisfy ``kind-dependencies``, and then the ``loader`` specified
-in ``kind.yml`` is used to load the tasks for that kind. It should point to
-a Python function like::
+are ordered to satisfy ``kind-dependencies``, and then the ``implementation``
+specified in ``kind.yml`` is used to load the tasks for that kind.
+
+Specifically, the class's ``load_tasks`` class method is called, and returns a
+list of new ``Task`` instances.
 
-    def loader(cls, kind, path, config, parameters, loaded_tasks):
-        pass
+TransformTask
+-------------
 
-The ``kind`` is the name of the kind; the configuration for that kind
-named this class.
-
-The ``path`` is the path to the configuration directory for the kind. This
-can be used to load extra data, templates, etc.
+Most kinds generate their tasks by starting with a set of items describing the
+jobs that should be performed and transforming them into task definitions.
+This is the familiar ``transforms`` key in ``kind.yml`` and is further
+documented in :doc:`transforms`.
 
-The ``parameters`` give details on which to base the task generation. See
-:ref:`parameters` for details.
-
-At the time this method is called, all kinds on which this kind depends
-(that is, specified in the ``kind-dependencies`` key in ``config``)
-have already loaded their tasks, and those tasks are available in
-the list ``loaded_tasks``.
+Such kinds generally specify their tasks in a common format: either based on a
+``jobs`` property in ``kind.yml``, or on YAML files listed in ``jobs-from``.
+This is handled by the ``TransformTask`` class in
+``taskcluster/taskgraph/task/transform.py``.
 
-The return value is a list of inputs to the transforms listed in the kind's
-``transforms`` property. The specific format for the input depends on the first
-transform - whatever it expects. The final transform should be
-``taskgraph.transform.task:transforms``, which produces the output format the
-task-graph generation infrastructure expects.
+For kinds producing tasks that depend on other tasks -- for example, signing
+tasks depend on build tasks -- ``TransformTask`` has a ``get_inputs`` method
+that can be overridden in subclasses and written to return a set of items based
+on tasks that already exist.  You can see a nice example of this behavior in
+``taskcluster/taskgraph/task/post_build.py``.
 
-The ``transforms`` key in ``kind.yml`` is further documented in
-:doc:`transforms`.  For more information on how all of this works, consult the
-docstrings and comments in the source code itself.
+For more information on how all of this works, consult the docstrings and
+comments in the source code itself.
 
 Try option syntax
 -----------------
 
 The ``parse-commit`` optional field specified in ``kind.yml`` links to a
 function to parse the command line options in the ``--message`` mach parameter.
 Currently, the only valid value is ``taskgraph.try_option_syntax:parse_message``.
 The parsed arguments are stored in ``config.config['args']``, it corresponds
deleted file mode 100644
--- a/taskcluster/docs/optimization.rst
+++ /dev/null
@@ -1,44 +0,0 @@
-Optimization
-============
-
-The objective of optimization to remove as many tasks from the graph as
-possible, as efficiently as possible, thereby delivering useful results as
-quickly as possible.  For example, ideally if only a test script is modified in
-a push, then the resulting graph contains only the corresponding test suite
-task.
-
-A task is said to be "optimized" when it is either replaced with an equivalent,
-already-existing task, or dropped from the graph entirely.
-
-Optimization Functions
-----------------------
-
-During the optimization phase of task-graph generation, each task is optimized
-in post-order, meaning that each task's dependencies will be optimized before
-the task itself is optimized.
-
-Each task has a ``task.optimizations`` property describing the optimization
-methods that apply.  Each is specified as a list of method and arguments. For
-example::
-
-    task.optimizations = [
-        ['seta'],
-        ['files-changed', ['js/**', 'tests/**']],
-    ]
-
-These methods are defined in ``taskcluster/taskgraph/optimize.py``.  They are
-applied in order, and the first to return a success value causes the task to
-be optimized.
-
-Each method can return either a taskId (indicating that the given task can be
-replaced) or indicate that the task can be optimized away. If a task on which
-others depend is optimized away, task-graph generation will fail.
-
-Optimizing Target Tasks
------------------------
-
-In some cases, such as try pushes, tasks in the target task set have been
-explicitly requested and are thus excluded from optimization. In other cases,
-the target task set is almost the entire task graph, so targetted tasks are
-considered for optimization.  This behavior is controlled with the
-``optimize_target_tasks`` parameter.
--- a/taskcluster/docs/taskgraph.rst
+++ b/taskcluster/docs/taskgraph.rst
@@ -91,24 +91,20 @@ Graph generation, as run via ``mach task
 #. For all kinds, generate all tasks.  The result is the "full task set"
 #. Create dependency links between tasks using kind-specific mechanisms.  The
    result is the "full task graph".
 #. Filter the target tasks (based on a series of filters, such as try syntax,
    tree-specific specifications, etc). The result is the "target task set".
 #. Based on the full task graph, calculate the transitive closure of the target
    task set.  That is, the target tasks and all requirements of those tasks.
    The result is the "target task graph".
-#. Optimize the target task graph using task-specific optimization methods.
+#. Optimize the target task graph based on kind-specific optimization methods.
    The result is the "optimized task graph" with fewer nodes than the target
-   task graph.  See :ref:`optimization`.
-#. Morph the graph. Morphs are like syntactic sugar: they keep the same meaning,
-   but express it in a lower-level way. These generally work around limitations
-   in the TaskCluster platform, such as number of dependencies or routes in
-   a task.
-#. Create tasks for all tasks in the morphed task graph.
+   task graph.
+#. Create tasks for all tasks in the optimized task graph.
 
 Transitive Closure
 ..................
 
 Transitive closure is a fancy name for this sort of operation:
 
  * start with a set of tasks
  * add all tasks on which any of those tasks depend
@@ -122,16 +118,42 @@ Then repeat: the test docker image task 
 tasks, but those build tasks depend on the build docker image task.  So add
 that build docker image task.  Repeat again: this time, none of the tasks in
 the set depend on a task not in the set, so nothing changes and the process is
 complete.
 
 And as you can see, the graph we've built now includes everything we wanted
 (the test jobs) plus everything required to do that (docker images, builds).
 
+Optimization
+------------
+
+The objective of optimization to remove as many tasks from the graph as
+possible, as efficiently as possible, thereby delivering useful results as
+quickly as possible.  For example, ideally if only a test script is modified in
+a push, then the resulting graph contains only the corresponding test suite
+task.
+
+A task is said to be "optimized" when it is either replaced with an equivalent,
+already-existing task, or dropped from the graph entirely.
+
+A task can be optimized if all of its dependencies can be optimized and none of
+its inputs have changed.  For a task on which no other tasks depend (a "leaf
+task"), the optimizer can determine what has changed by looking at the
+version-control history of the push: if the relevant files are not modified in
+the push, then it considers the inputs unchanged.  For tasks on which other
+tasks depend ("non-leaf tasks"), the optimizer must replace the task with
+another, equivalent task, so it generates a hash of all of the inputs and uses
+that to search for a matching, existing task.
+
+In some cases, such as try pushes, tasks in the target task set have been
+explicitly requested and are thus excluded from optimization. In other cases,
+the target task set is almost the entire task graph, so targetted tasks are
+considered for optimization.  This behavior is controlled with the
+``optimize_target_tasks`` parameter.
 
 Action Tasks
 ------------
 
 Action Tasks are tasks which help you to schedule new jobs via Treeherder's
 "Add New Jobs" feature. The Decision Task creates a YAML file named
 ``action.yml`` which can be used to schedule Action Tasks after suitably replacing
 ``{{decision_task_id}}`` and ``{{task_labels}}``, which correspond to the decision
@@ -207,38 +229,36 @@ Task graphs -- both the graph artifacts 
 output by the ``--json`` option to the ``mach taskgraph`` commands -- are JSON
 objects, keyed by label, or for optimized task graphs, by taskId.  For
 convenience, the decision task also writes out ``label-to-taskid.json``
 containing a mapping from label to taskId.  Each task in the graph is
 represented as a JSON object.
 
 Each task has the following properties:
 
-``kind``
-   The name of this task's kind
-
 ``task_id``
    The task's taskId (only for optimized task graphs)
 
 ``label``
    The task's label
 
 ``attributes``
    The task's attributes
 
 ``dependencies``
    The task's in-graph dependencies, represented as an object mapping
    dependency name to label (or to taskId for optimized task graphs)
 
-``optimizations``
-   The optimizations to be applied to this task
-
 ``task``
    The task's TaskCluster task definition.
 
+``kind_implementation``
+   The module and the class name which was used to implement this particular task.
+   It is always of the form ``<module-path>:<object-path>``
+
 The results from each command are in the same format, but with some differences
 in the content:
 
 * The ``tasks`` and ``target`` subcommands both return graphs with no edges.
   That is, just collections of tasks without any dependencies indicated.
 
 * The ``optimized`` subcommand returns tasks that have been assigned taskIds.
   The dependencies array, too, contains taskIds instead of labels, with
--- a/taskcluster/mach_commands.py
+++ b/taskcluster/mach_commands.py
@@ -101,21 +101,16 @@ class MachCommands(MachCommandBase):
     def taskgraph_target_taskgraph(self, **options):
         return self.show_taskgraph('target_task_graph', options)
 
     @ShowTaskGraphSubCommand('taskgraph', 'optimized',
                              description="Show the optimized taskgraph")
     def taskgraph_optimized(self, **options):
         return self.show_taskgraph('optimized_task_graph', options)
 
-    @ShowTaskGraphSubCommand('taskgraph', 'morphed',
-                             description="Show the morphed taskgraph")
-    def taskgraph_morphed(self, **options):
-        return self.show_taskgraph('morphed_task_graph', options)
-
     @SubCommand('taskgraph', 'decision',
                 description="Run the decision task")
     @CommandArgument('--root', '-r',
                      default='taskcluster/ci',
                      help="root of the taskgraph definition relative to topsrcdir")
     @CommandArgument('--base-repository',
                      required=True,
                      help='URL for "base" repository to clone')
--- a/taskcluster/taskgraph/decision.py
+++ b/taskcluster/taskgraph/decision.py
@@ -112,21 +112,21 @@ def taskgraph_decision(options):
     # this is just a test to check whether the from_json() function is working
     _, _ = TaskGraph.from_json(full_task_json)
 
     # write out the target task set to allow reproducing this as input
     write_artifact('target-tasks.json', tgg.target_task_set.tasks.keys())
 
     # write out the optimized task graph to describe what will actually happen,
     # and the map of labels to taskids
-    write_artifact('task-graph.json', tgg.morphed_task_graph.to_json())
+    write_artifact('task-graph.json', tgg.optimized_task_graph.to_json())
     write_artifact('label-to-taskid.json', tgg.label_to_taskid)
 
     # actually create the graph
-    create_tasks(tgg.morphed_task_graph, tgg.label_to_taskid, parameters)
+    create_tasks(tgg.optimized_task_graph, tgg.label_to_taskid, parameters)
 
 
 def get_decision_parameters(options):
     """
     Load parameters from the command-line options for 'taskgraph decision'.
     This also applies per-project parameters, based on the given project.
 
     """
--- a/taskcluster/taskgraph/generator.py
+++ b/taskcluster/taskgraph/generator.py
@@ -6,71 +6,54 @@ from __future__ import absolute_import, 
 import logging
 import os
 import yaml
 import copy
 
 from . import filter_tasks
 from .graph import Graph
 from .taskgraph import TaskGraph
-from .task import Task
 from .optimize import optimize_task_graph
-from .morph import morph
 from .util.python_path import find_object
-from .transforms.base import TransformSequence, TransformConfig
 from .util.verify import (
     verify_docs,
     verify_task_graph_symbol,
     verify_gecko_v2_routes,
 )
 
 logger = logging.getLogger(__name__)
 
 
 class Kind(object):
 
     def __init__(self, name, path, config):
         self.name = name
         self.path = path
         self.config = config
 
-    def _get_loader(self):
+    def _get_impl_class(self):
+        # load the class defined by implementation
         try:
-            loader = self.config['loader']
+            impl = self.config['implementation']
         except KeyError:
-            raise KeyError("{!r} does not define `loader`".format(self.path))
-        return find_object(loader)
+            raise KeyError("{!r} does not define implementation".format(self.path))
+        return find_object(impl)
 
     def load_tasks(self, parameters, loaded_tasks):
-        loader = self._get_loader()
+        impl_class = self._get_impl_class()
         config = copy.deepcopy(self.config)
 
         if 'parse-commit' in self.config:
             parse_commit = find_object(config['parse-commit'])
             config['args'] = parse_commit(parameters['message'])
         else:
             config['args'] = None
 
-        inputs = loader(self.name, self.path, config, parameters, loaded_tasks)
-
-        transforms = TransformSequence()
-        for xform_path in config['transforms']:
-            transform = find_object(xform_path)
-            transforms.add(transform)
-
-        # perform the transformations on the loaded inputs
-        trans_config = TransformConfig(self.name, self.path, config, parameters)
-        tasks = [Task(self.name,
-                      label=task_dict['label'],
-                      attributes=task_dict['attributes'],
-                      task=task_dict['task'],
-                      optimizations=task_dict.get('optimizations'),
-                      dependencies=task_dict.get('dependencies'))
-                 for task_dict in transforms(trans_config, inputs)]
-        return tasks
+        return impl_class.load_tasks(self.name, self.path, config,
+                                     parameters, loaded_tasks)
 
 
 class TaskGraphGenerator(object):
     """
     The central controller for taskgraph.  This handles all phases of graph
     generation.  The task is generated from all of the kinds defined in
     subdirectories of the generator's root directory.
 
@@ -163,27 +146,16 @@ class TaskGraphGenerator(object):
         """
         A dictionary mapping task label to assigned taskId.  This property helps
         in interpreting `optimized_task_graph`.
 
         @type: dictionary
         """
         return self._run_until('label_to_taskid')
 
-    @property
-    def morphed_task_graph(self):
-        """
-        The optimized task graph, with any subsequent morphs applied. This graph
-        will have the same meaning as the optimized task graph, but be in a form
-        more palatable to TaskCluster.
-
-        @type: TaskGraph
-        """
-        return self._run_until('morphed_task_graph')
-
     def _load_kinds(self):
         for path in os.listdir(self.root_dir):
             path = os.path.join(self.root_dir, path)
             if not os.path.isdir(path):
                 continue
             kind_name = os.path.basename(path)
 
             kind_yml = os.path.join(path, 'kind.yml')
@@ -223,17 +195,17 @@ class TaskGraphGenerator(object):
         full_task_set = TaskGraph(all_tasks, Graph(set(all_tasks), set()))
         self.verify_attributes(all_tasks)
         self.verify_run_using()
         yield 'full_task_set', full_task_set
 
         logger.info("Generating full task graph")
         edges = set()
         for t in full_task_set:
-            for depname, dep in t.dependencies.iteritems():
+            for dep, depname in t.get_dependencies(full_task_set):
                 edges.add((t.label, dep, depname))
 
         full_task_graph = TaskGraph(all_tasks,
                                     Graph(full_task_set.graph.nodes, edges))
         full_task_graph.for_each_task(verify_task_graph_symbol, scratch_pad={})
         full_task_graph.for_each_task(verify_gecko_v2_routes, scratch_pad={})
         logger.info("Full task graph contains %d tasks and %d dependencies" % (
             len(full_task_set.graph.nodes), len(edges)))
@@ -251,40 +223,33 @@ class TaskGraphGenerator(object):
             logger.info('Filter %s pruned %d tasks (%d remain)' % (
                 fltr.__name__,
                 old_len - len(target_tasks),
                 len(target_tasks)))
 
         yield 'target_task_set', target_task_set
 
         logger.info("Generating target task graph")
-        # include all docker-image build tasks here, in case they are needed for a graph morph
-        docker_image_tasks = set(t.label for t in full_task_graph.tasks.itervalues()
-                                 if t.attributes['kind'] == 'docker-image')
-        target_graph = full_task_graph.graph.transitive_closure(target_tasks | docker_image_tasks)
+        target_graph = full_task_graph.graph.transitive_closure(target_tasks)
         target_task_graph = TaskGraph(
             {l: all_tasks[l] for l in target_graph.nodes},
             target_graph)
         yield 'target_task_graph', target_task_graph
 
         logger.info("Generating optimized task graph")
         do_not_optimize = set()
+
         if not self.parameters.get('optimize_target_tasks', True):
             do_not_optimize = target_task_set.graph.nodes
         optimized_task_graph, label_to_taskid = optimize_task_graph(target_task_graph,
                                                                     self.parameters,
                                                                     do_not_optimize)
-
+        yield 'label_to_taskid', label_to_taskid
         yield 'optimized_task_graph', optimized_task_graph
 
-        morphed_task_graph, label_to_taskid = morph(optimized_task_graph, label_to_taskid)
-
-        yield 'label_to_taskid', label_to_taskid
-        yield 'morphed_task_graph', morphed_task_graph
-
     def _run_until(self, name):
         while name not in self._run_results:
             try:
                 k, v = self._run.next()
             except StopIteration:
                 raise AttributeError("No such run result {}".format(name))
             self._run_results[k] = v
         return self._run_results[name]
deleted file mode 100644
deleted file mode 100644
--- a/taskcluster/taskgraph/loader/balrog.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-from __future__ import absolute_import, print_function, unicode_literals
-
-
-def loader(kind, path, config, params, loaded_tasks):
-    """
-    Load tasks implementing balrog submission jobs.  These depend on beetmover
-    jobs and submit the update to balrog as available after the files are moved
-    into place
-    """
-    if config.get('kind-dependencies', []) != ["beetmover", "beetmover-l10n"]:
-        raise Exception("Balrog kinds must depend on beetmover kinds")
-    for task in loaded_tasks:
-        if not task.attributes.get('nightly'):
-            continue
-        if task.kind not in config.get('kind-dependencies', []):
-            continue
-        beetmover_task = {}
-        beetmover_task['dependent-task'] = task
-
-        yield beetmover_task
deleted file mode 100644
--- a/taskcluster/taskgraph/loader/beetmover.py
+++ /dev/null
@@ -1,24 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-from __future__ import absolute_import, print_function, unicode_literals
-
-
-def loader(kind, path, config, params, loaded_tasks):
-    """
-    Generate inputs implementing beetmover jobs.  These depend on nightly build
-    and signing jobs and transfer the artifacts to S3 after build and signing
-    are completed.
-    """
-    if config.get('kind-dependencies', []) != ["build-signing"] and \
-       config.get('kind-dependencies', []) != ["nightly-l10n-signing"]:
-        raise Exception("Beetmover kinds must depend on builds or signing builds")
-    for task in loaded_tasks:
-        if not task.attributes.get('nightly'):
-            continue
-        if task.kind not in config.get('kind-dependencies'):
-            continue
-        beetmover_task = {'dependent-task': task}
-
-        yield beetmover_task
deleted file mode 100644
--- a/taskcluster/taskgraph/loader/beetmover_checksums.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-from __future__ import absolute_import, print_function, unicode_literals
-
-
-def loader(kind, path, config, params, loaded_tasks):
-    """
-    A task implementing a beetmover job specific for checksums.These depend on
-    the checksums signing jobs and transfer the checksums files to S3 after
-    it's being generated and signed.
-    """
-    if config.get('kind-dependencies', []) != ["checksums-signing"]:
-        raise Exception("Beetmover checksums tasks depend on checksums signing tasks")
-    for task in loaded_tasks:
-        if not task.attributes.get('nightly'):
-            continue
-        if task.kind not in config.get('kind-dependencies'):
-            continue
-        beetmover_checksums_task = {'dependent-task': task}
-
-        yield beetmover_checksums_task
deleted file mode 100644
--- a/taskcluster/taskgraph/loader/checksums_signing.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-from __future__ import absolute_import, print_function, unicode_literals
-
-
-def loader(kind, path, config, params, loaded_tasks):
-    """
-    Generate tasks implementing checksums signing jobs.  These depend on beetmover jobs
-    and sign the checksums after its being generated by beetmover
-    """
-
-    if (config.get('kind-dependencies', []) != ["beetmover", "beetmover-l10n"]):
-        raise Exception("Checksums signing tasks must depend on beetmover tasks")
-    for task in loaded_tasks:
-        if not task.attributes.get('nightly'):
-            continue
-        if task.kind not in config.get('kind-dependencies'):
-            continue
-        checksums_signing_task = {'dependent-task': task}
-
-        yield checksums_signing_task
deleted file mode 100644
--- a/taskcluster/taskgraph/loader/post_build.py
+++ /dev/null
@@ -1,50 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public License,
-# v. 2.0. If a copy of the MPL was not distributed with this file, You can
-# obtain one at http://mozilla.org/MPL/2.0/.
-
-from __future__ import absolute_import, print_function, unicode_literals
-
-import copy
-import logging
-
-from ..util.yaml import load_yaml
-
-logger = logging.getLogger(__name__)
-
-
-def loader(kind, path, config, params, loaded_tasks):
-    """
-    Generate tasks implementing post-build jobs.  These depend on builds and perform
-    various followup tasks after a that build has completed.
-
-    The `only-for-build-platforms` kind configuration, if specified, will limit
-    the build platforms for which a post-build task will be created.
-
-    The `job-template' kind configuration points to a yaml file which will
-    be used to create the input to the transforms.  It will have added to it
-    keys `build-label`, the label for the build task, and `build-platform`, its
-    platform.
-    """
-    if config.get('kind-dependencies', []) != ["build"]:
-        raise Exception("PostBuildTask kinds must depend on builds")
-
-    only_platforms = config.get('only-for-build-platforms')
-    prototype = load_yaml(path, config.get('job-template'))
-
-    for task in loaded_tasks:
-        if task.kind != 'build':
-            continue
-
-        build_platform = task.attributes.get('build_platform')
-        build_type = task.attributes.get('build_type')
-        if not build_platform or not build_type:
-            continue
-        platform = "{}/{}".format(build_platform, build_type)
-        if only_platforms and platform not in only_platforms:
-            continue
-
-        post_task = copy.deepcopy(prototype)
-        post_task['build-label'] = task.label
-        post_task['build-platform'] = platform
-        post_task['build-task'] = task
-        yield post_task
deleted file mode 100644
--- a/taskcluster/taskgraph/loader/repacks.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-from __future__ import absolute_import, print_function, unicode_literals
-
-
-def loader(kind, path, config, params, loaded_tasks):
-    """
-    Generate tasks implementing l10n repack jobs.  These may depend on build
-    jobs and do a repack of them
-    """
-    only_platforms = config.get('only-for-build-platforms')
-
-    for task in loaded_tasks:
-        if task.kind not in config.get('kind-dependencies'):
-            continue
-
-        build_platform = task.attributes.get('build_platform')
-        build_type = task.attributes.get('build_type')
-        if not build_platform or not build_type:
-            continue
-        platform = "{}/{}".format(build_platform, build_type)
-        if only_platforms and platform not in only_platforms:
-            continue
-
-        repack_task = {'dependent-task': task}
-
-        if config.get('job-template'):
-            repack_task.update(config.get('job-template'))
-
-        yield repack_task
deleted file mode 100644
--- a/taskcluster/taskgraph/loader/signing.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-from __future__ import absolute_import, print_function, unicode_literals
-
-
-def loader(kind, path, config, params, loaded_tasks):
-    """
-    Generate tasks implementing signing jobs.  These depend on nightly build
-    jobs and sign the artifacts after a build has completed.
-    """
-    if (config.get('kind-dependencies', []) != ["build"] and
-            config.get('kind-dependencies', []) != ["nightly-l10n"]):
-        raise Exception("Signing kinds must depend on builds or l10n repacks")
-    for task in loaded_tasks:
-        if task.kind not in config.get('kind-dependencies'):
-            continue
-        if not task.attributes.get('nightly'):
-            continue
-        signing_task = {'dependent-task': task}
-
-        yield signing_task
deleted file mode 100644
--- a/taskcluster/taskgraph/loader/test.py
+++ /dev/null
@@ -1,113 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-from __future__ import absolute_import, print_function, unicode_literals
-
-import copy
-import logging
-
-from ..util.yaml import load_yaml
-
-logger = logging.getLogger(__name__)
-
-
-def loader(kind, path, config, params, loaded_tasks):
-    """
-    Generate tasks implementing Gecko tests.
-    """
-
-    # the kind on which this one depends
-    if len(config.get('kind-dependencies', [])) != 1:
-        raise Exception(
-            "Test kinds must have exactly one item in kind-dependencies")
-    dep_kind = config['kind-dependencies'][0]
-
-    # get build tasks, keyed by build platform
-    builds_by_platform = get_builds_by_platform(dep_kind, loaded_tasks)
-
-    # get the test platforms for those build tasks
-    test_platforms_cfg = load_yaml(path, 'test-platforms.yml')
-    test_platforms = get_test_platforms(test_platforms_cfg, builds_by_platform)
-
-    # expand the test sets for each of those platforms
-    test_sets_cfg = load_yaml(path, 'test-sets.yml')
-    test_platforms = expand_tests(test_sets_cfg, test_platforms)
-
-    # load the test descriptions
-    test_descriptions = load_yaml(path, 'tests.yml')
-
-    # generate all tests for all test platforms
-    for test_platform_name, test_platform in test_platforms.iteritems():
-        for test_name in test_platform['test-names']:
-            test = copy.deepcopy(test_descriptions[test_name])
-            test['build-platform'] = test_platform['build-platform']
-            test['test-platform'] = test_platform_name
-            test['build-label'] = test_platform['build-label']
-            test['test-name'] = test_name
-            if test_platform['nightly']:
-                test.setdefault('attributes', {})['nightly'] = True
-
-            logger.debug("Generating tasks for test {} on platform {}".format(
-                test_name, test['test-platform']))
-            yield test
-
-
-def get_builds_by_platform(dep_kind, loaded_tasks):
-    """Find the build tasks on which tests will depend, keyed by
-    platform/type.  Returns a dictionary mapping build platform to task."""
-    builds_by_platform = {}
-    for task in loaded_tasks:
-        if task.kind != dep_kind:
-            continue
-
-        build_platform = task.attributes.get('build_platform')
-        build_type = task.attributes.get('build_type')
-        if not build_platform or not build_type:
-            continue
-        platform = "{}/{}".format(build_platform, build_type)
-        if platform in builds_by_platform:
-            raise Exception("multiple build jobs for " + platform)
-        builds_by_platform[platform] = task
-    return builds_by_platform
-
-
-def get_test_platforms(test_platforms_cfg, builds_by_platform):
-    """Get the test platforms for which test tasks should be generated,
-    based on the available build platforms.  Returns a dictionary mapping
-    test platform to {test-set, build-platform, build-label}."""
-    test_platforms = {}
-    for test_platform, cfg in test_platforms_cfg.iteritems():
-        build_platform = cfg['build-platform']
-        if build_platform not in builds_by_platform:
-            logger.warning(
-                "No build task with platform {}; ignoring test platform {}".format(
-                    build_platform, test_platform))
-            continue
-        test_platforms[test_platform] = {
-            'nightly': builds_by_platform[build_platform].attributes.get('nightly', False),
-            'build-platform': build_platform,
-            'build-label': builds_by_platform[build_platform].label,
-        }
-        test_platforms[test_platform].update(cfg)
-    return test_platforms
-
-
-def expand_tests(test_sets_cfg, test_platforms):
-    """Expand the test sets in `test_platforms` out to sets of test names.
-    Returns a dictionary like `get_test_platforms`, with an additional
-    `test-names` key for each test platform, containing a set of test
-    names."""
-    rv = {}
-    for test_platform, cfg in test_platforms.iteritems():
-        test_sets = cfg['test-sets']
-        if not set(test_sets) < set(test_sets_cfg):
-            raise Exception(
-                "Test sets {} for test platform {} are not defined".format(
-                    ', '.join(test_sets), test_platform))
-        test_names = set()
-        for test_set in test_sets:
-            test_names.update(test_sets_cfg[test_set])
-        rv[test_platform] = cfg.copy()
-        rv[test_platform]['test-names'] = test_names
-    return rv
deleted file mode 100644
--- a/taskcluster/taskgraph/loader/transform.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-from __future__ import absolute_import, print_function, unicode_literals
-
-import logging
-import itertools
-
-from ..util.templates import merge
-from ..util.yaml import load_yaml
-
-logger = logging.getLogger(__name__)
-
-
-def loader(kind, path, config, params, loaded_tasks):
-    """
-    Get the input elements that will be transformed into tasks in a generic
-    way.  The elements themselves are free-form, and become the input to the
-    first transform.
-
-    By default, this reads jobs from the `jobs` key, or from yaml files
-    named by `jobs-from`.  The entities are read from mappings, and the
-    keys to those mappings are added in the `name` key of each entity.
-
-    If there is a `job-defaults` config, then every job is merged with it.
-    This provides a simple way to set default values for all jobs of a
-    kind.  More complex defaults should be implemented with custom
-    transforms.
-
-    Other kind implementations can use a different loader function to
-    produce inputs and hand them to `transform_inputs`.
-    """
-    def jobs():
-        defaults = config.get('job-defaults')
-        jobs = config.get('jobs', {}).iteritems()
-        jobs_from = itertools.chain.from_iterable(
-            load_yaml(path, filename).iteritems()
-            for filename in config.get('jobs-from', {}))
-        for name, job in itertools.chain(jobs, jobs_from):
-            if defaults:
-                job = merge(defaults, job)
-            yield name, job
-
-    for name, job in jobs():
-        job['name'] = name
-        logger.debug("Generating tasks for {} {}".format(kind, name))
-        yield job
deleted file mode 100644
--- a/taskcluster/taskgraph/morph.py
+++ /dev/null
@@ -1,136 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-"""
-Graph morphs are modifications to task-graphs that take place *after* the
-optimization phase.
-
-These graph morphs are largely invisible to developers running `./mach`
-locally, so they should be limited to changes that do not modify the meaning of
-the graph.
-"""
-
-# Note that the translation of `{'task-reference': '..'}` is handled in the
-# optimization phase (since optimization involves dealing with taskIds
-# directly).  Similarly, `{'relative-datestamp': '..'}` is handled at the last
-# possible moment during task creation.
-
-from __future__ import absolute_import, print_function, unicode_literals
-
-import logging
-
-from slugid import nice as slugid
-from .task import Task
-from .graph import Graph
-from .taskgraph import TaskGraph
-
-logger = logging.getLogger(__name__)
-MAX_ROUTES = 10
-
-
-def amend_taskgraph(taskgraph, label_to_taskid, to_add):
-    """Add the given tasks to the taskgraph, returning a new taskgraph"""
-    new_tasks = taskgraph.tasks.copy()
-    new_edges = taskgraph.graph.edges.copy()
-    for task in to_add:
-        new_tasks[task.task_id] = task
-        assert task.label not in label_to_taskid
-        label_to_taskid[task.label] = task.task_id
-        for depname, dep in task.dependencies.iteritems():
-            new_edges.add((task.task_id, dep, depname))
-
-    taskgraph = TaskGraph(new_tasks, Graph(set(new_tasks), new_edges))
-    return taskgraph, label_to_taskid
-
-
-def derive_misc_task(task, purpose, image, label_to_taskid):
-    """Create the shell of a task that depends on `task` and on the given docker
-    image."""
-    label = '{}-{}'.format(purpose, task.label)
-
-    # this is why all docker image tasks are included in the target task graph: we
-    # need to find them in label_to_taskid, if if nothing else required them
-    image_taskid = label_to_taskid['build-docker-image-' + image]
-
-    task_def = {
-        'provisionerId': 'aws-provisioner-v1',
-        'workerType': 'gecko-misc',
-        'dependencies': [task.task_id, image_taskid],
-        'created': {'relative-timestamp': '0 seconds'},
-        'deadline': task.task['deadline'],
-        # no point existing past the parent task's deadline
-        'expires': task.task['deadline'],
-        'metadata': {
-            'name': label,
-            'description': '{} for {}'.format(purpose, task.task['metadata']['description']),
-            'owner': task.task['metadata']['owner'],
-            'source': task.task['metadata']['source'],
-        },
-        'scopes': [],
-        'payload': {
-            'image': {
-                'path': 'public/image.tar.zst',
-                'taskId': image_taskid,
-                'type': 'task-image',
-            },
-            'features': {
-                'taskclusterProxy': True,
-            },
-            'maxRunTime': 600,
-        }
-    }
-    dependencies = {
-        'parent': task.task_id,
-        'docker-image': image_taskid,
-    }
-    task = Task(kind='misc', label=label, attributes={}, task=task_def,
-                dependencies=dependencies)
-    task.task_id = slugid()
-    return task
-
-
-def make_index_task(parent_task, label_to_taskid):
-    index_paths = [r.split('.', 1)[1] for r in parent_task.task['routes']
-                   if r.startswith('index.')]
-    parent_task.task['routes'] = [r for r in parent_task.task['routes']
-                                  if not r.startswith('index.')]
-
-    task = derive_misc_task(parent_task, 'index-task',
-                            'index-task', label_to_taskid)
-    task.task['scopes'] = [
-        'index:insert-task:{}'.format(path) for path in index_paths]
-    task.task['payload']['command'] = ['insert-indexes.js'] + index_paths
-    task.task['payload']['env'] = {
-        "TARGET_TASKID": parent_task.task_id,
-    }
-    return task
-
-
-def add_index_tasks(taskgraph, label_to_taskid):
-    """
-    The TaskCluster queue only allows 10 routes on a task, but we have tasks
-    with many more routes, for purposes of indexing. This graph morph adds
-    "index tasks" that depend on such tasks and do the index insertions
-    directly, avoiding the limits on task.routes.
-    """
-    logger.debug('Morphing: adding index tasks')
-
-    added = []
-    for label, task in taskgraph.tasks.iteritems():
-        if len(task.task.get('routes', [])) <= MAX_ROUTES:
-            continue
-        added.append(make_index_task(task, label_to_taskid))
-
-    if added:
-        taskgraph, label_to_taskid = amend_taskgraph(
-            taskgraph, label_to_taskid, added)
-        logger.info('Added {} index tasks'.format(len(added)))
-
-    return taskgraph, label_to_taskid
-
-
-def morph(taskgraph, label_to_taskid):
-    """Apply all morphs"""
-    taskgraph, label_to_taskid = add_index_tasks(taskgraph, label_to_taskid)
-    return taskgraph, label_to_taskid
--- a/taskcluster/taskgraph/optimize.py
+++ b/taskcluster/taskgraph/optimize.py
@@ -1,31 +1,23 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
-
 import logging
 import re
-import os
-import requests
 
 from .graph import Graph
-from . import files_changed
 from .taskgraph import TaskGraph
-from .util.seta import is_low_value_task
-from .util.taskcluster import find_task_id
 from slugid import nice as slugid
 
 logger = logging.getLogger(__name__)
 TASK_REFERENCE_PATTERN = re.compile('<([^>]+)>')
 
-_optimizations = {}
-
 
 def optimize_task_graph(target_task_graph, params, do_not_optimize, existing_tasks=None):
     """
     Perform task optimization, without optimizing tasks named in
     do_not_optimize.
     """
     named_links_dict = target_task_graph.graph.named_links_dict()
     label_to_taskid = {}
@@ -63,31 +55,16 @@ def resolve_task_references(label, task_
                 return TASK_REFERENCE_PATTERN.sub(repl, val['task-reference'])
             else:
                 return {k: recurse(v) for k, v in val.iteritems()}
         else:
             return val
     return recurse(task_def)
 
 
-def optimize_task(task, params):
-    """
-    Optimize a single task by running its optimizations in order until one
-    succeeds.
-    """
-    for opt in task.optimizations:
-        opt_type, args = opt[0], opt[1:]
-        opt_fn = _optimizations[opt_type]
-        optimized, task_id = opt_fn(task, params, *args)
-        if optimized or task_id:
-            return optimized, task_id
-
-    return False, None
-
-
 def annotate_task_graph(target_task_graph, params, do_not_optimize,
                         named_links_dict, label_to_taskid, existing_tasks):
     """
     Annotate each task in the graph with .optimized (boolean) and .task_id
     (possibly None), following the rules for optimization and calling the task
     kinds' `optimize_task` method.
 
     As a side effect, label_to_taskid is updated with labels for all optimized
@@ -113,17 +90,17 @@ def annotate_task_graph(target_task_grap
         if label in do_not_optimize:
             optimized = False
         # Let's check whether this task has been created before
         elif existing_tasks is not None and label in existing_tasks:
             optimized = True
             replacement_task_id = existing_tasks[label]
         # otherwise, examine the task itself (which may be an expensive operation)
         else:
-            optimized, replacement_task_id = optimize_task(task, params)
+            optimized, replacement_task_id = task.optimize(params)
 
         task.optimized = optimized
         task.task_id = replacement_task_id
         if replacement_task_id:
             label_to_taskid[label] = replacement_task_id
 
         if optimized:
             if replacement_task_id:
@@ -172,65 +149,8 @@ def get_subgraph(annotated_task_graph, n
         (left, right, name)
         for (left, right, name) in edges_by_taskid
         if left in tasks_by_taskid and right in tasks_by_taskid
         )
 
     return TaskGraph(
         tasks_by_taskid,
         Graph(set(tasks_by_taskid), edges_by_taskid))
-
-
-def optimization(name):
-    def wrap(func):
-        if name in _optimizations:
-            raise Exception("multiple optimizations with name {}".format(name))
-        _optimizations[name] = func
-        return func
-    return wrap
-
-
-@optimization('index-search')
-def opt_index_search(task, params, index_path):
-    try:
-        task_id = find_task_id(
-            index_path,
-            use_proxy=bool(os.environ.get('TASK_ID')))
-
-        return True, task_id
-    except requests.exceptions.HTTPError:
-        pass
-
-    return False, None
-
-
-@optimization('seta')
-def opt_seta(task, params):
-    bbb_task = False
-
-    # for bbb tasks we need to send in the buildbot buildername
-    if task.task.get('provisionerId', '') == 'buildbot-bridge':
-        label = task.task.get('payload').get('buildername')
-        bbb_task = True
-    else:
-        label = task.label
-
-    # we would like to return 'False, None' while it's high_value_task
-    # and we wouldn't optimize it. Otherwise, it will return 'True, None'
-    if is_low_value_task(label,
-                         params.get('project'),
-                         params.get('pushlog_id'),
-                         params.get('pushdate'),
-                         bbb_task):
-        # Always optimize away low-value tasks
-        return True, None
-    else:
-        return False, None
-
-
-@optimization('files-changed')
-def opt_files_changed(task, params, file_patterns):
-    changed = files_changed.check(params, file_patterns)
-    if not changed:
-        logger.debug('no files found matching a pattern in `when.files-changed` for ' +
-                     task.label)
-        return True, None
-    return False, None
deleted file mode 100644
--- a/taskcluster/taskgraph/task.py
+++ /dev/null
@@ -1,86 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-from __future__ import absolute_import, print_function, unicode_literals
-
-
-class Task(object):
-    """
-    Representation of a task in a TaskGraph.  Each Task has, at creation:
-
-    - kind: the name of the task kind
-    - label; the label for this task
-    - attributes: a dictionary of attributes for this task (used for filtering)
-    - task: the task definition (JSON-able dictionary)
-    - optimizations: optimizations to apply to the task (see taskgraph.optimize)
-    - dependencies: tasks this one depends on, in the form {name: label}, for example
-      {'build': 'build-linux64/opt', 'docker-image': 'build-docker-image-desktop-test'}
-
-    And later, as the task-graph processing proceeds:
-
-    - task_id -- TaskCluster taskId under which this task will be created
-    - optimized -- true if this task need not be performed
-
-    This class is just a convenience wraper for the data type and managing
-    display, comparison, serialization, etc. It has no functionality of its own.
-    """
-    def __init__(self, kind, label, attributes, task,
-                 optimizations=None, dependencies=None):
-        self.kind = kind
-        self.label = label
-        self.attributes = attributes
-        self.task = task
-
-        self.task_id = None
-        self.optimized = False
-
-        self.attributes['kind'] = kind
-
-        self.optimizations = optimizations or []
-        self.dependencies = dependencies or {}
-
-    def __eq__(self, other):
-        return self.kind == other.kind and \
-            self.label == other.label and \
-            self.attributes == other.attributes and \
-            self.task == other.task and \
-            self.task_id == other.task_id and \
-            self.optimizations == other.optimizations and \
-            self.dependencies == other.dependencies
-
-    def __repr__(self):
-        return ('Task({kind!r}, {label!r}, {attributes!r}, {task!r}, '
-                'optimizations={optimizations!r}, '
-                'dependencies={dependencies!r})'.format(**self.__dict__))
-
-    def to_json(self):
-        rv = {
-            'kind': self.kind,
-            'label': self.label,
-            'attributes': self.attributes,
-            'dependencies': self.dependencies,
-            'optimizations': self.optimizations,
-            'task': self.task,
-        }
-        if self.task_id:
-            rv['task_id'] = self.task_id
-        return rv
-
-    @classmethod
-    def from_json(cls, task_dict):
-        """
-        Given a data structure as produced by taskgraph.to_json, re-construct
-        the original Task object.  This is used to "resume" the task-graph
-        generation process, for example in Action tasks.
-        """
-        rv = cls(
-            kind=task_dict['kind'],
-            label=task_dict['label'],
-            attributes=task_dict['attributes'],
-            task=task_dict['task'],
-            optimizations=task_dict['optimizations'],
-            dependencies=task_dict.get('dependencies'))
-        if 'task_id' in task_dict:
-            rv.task_id = task_dict['task_id']
-        return rv
new file mode 100644
new file mode 100644
--- /dev/null
+++ b/taskcluster/taskgraph/task/balrog.py
@@ -0,0 +1,28 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from . import transform
+
+
+class BalrogTask(transform.TransformTask):
+    """
+    A task implementing a balrog submission job.  These depend on beetmover jobs
+    and submits the update to balrog as available after the files are moved into place
+    """
+
+    @classmethod
+    def get_inputs(cls, kind, path, config, params, loaded_tasks):
+        if config.get('kind-dependencies', []) != ["beetmover", "beetmover-l10n"]:
+            raise Exception("Balrog kinds must depend on beetmover kinds")
+        for task in loaded_tasks:
+            if not task.attributes.get('nightly'):
+                continue
+            if task.kind not in config.get('kind-dependencies', []):
+                continue
+            beetmover_task = {}
+            beetmover_task['dependent-task'] = task
+
+            yield beetmover_task
new file mode 100644
--- /dev/null
+++ b/taskcluster/taskgraph/task/base.py
@@ -0,0 +1,125 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import abc
+import os
+import requests
+from taskgraph.util.taskcluster import find_task_id
+
+
+class Task(object):
+    """
+    Representation of a task in a TaskGraph.  Each Task has, at creation:
+
+    - kind: the name of the task kind
+    - label; the label for this task
+    - attributes: a dictionary of attributes for this task (used for filtering)
+    - task: the task definition (JSON-able dictionary)
+
+    And later, as the task-graph processing proceeds:
+
+    - task_id -- TaskCluster taskId under which this task will be created
+    - optimized -- true if this task need not be performed
+
+    A kind represents a collection of tasks that share common characteristics.
+    For example, all build jobs.  Each instance of a kind is intialized with a
+    path from which it draws its task configuration.  The instance is free to
+    store as much local state as it needs.
+    """
+    __metaclass__ = abc.ABCMeta
+
+    def __init__(self, kind, label, attributes, task, index_paths=None):
+        self.kind = kind
+        self.label = label
+        self.attributes = attributes
+        self.task = task
+
+        self.task_id = None
+        self.optimized = False
+
+        self.attributes['kind'] = kind
+
+        self.index_paths = index_paths or ()
+
+    def __eq__(self, other):
+        return self.kind == other.kind and \
+            self.label == other.label and \
+            self.attributes == other.attributes and \
+            self.task == other.task and \
+            self.task_id == other.task_id and \
+            self.index_paths == other.index_paths
+
+    @classmethod
+    @abc.abstractmethod
+    def load_tasks(cls, kind, path, config, parameters, loaded_tasks):
+        """
+        Load the tasks for a given kind.
+
+        The `kind` is the name of the kind; the configuration for that kind
+        named this class.
+
+        The `path` is the path to the configuration directory for the kind.  This
+        can be used to load extra data, templates, etc.
+
+        The `parameters` give details on which to base the task generation.
+        See `taskcluster/docs/parameters.rst` for details.
+
+        At the time this method is called, all kinds on which this kind depends
+        (that is, specified in the `kind-dependencies` key in `self.config`
+        have already loaded their tasks, and those tasks are available in
+        the list `loaded_tasks`.
+
+        The return value is a list of Task instances.
+        """
+
+    @abc.abstractmethod
+    def get_dependencies(self, taskgraph):
+        """
+        Get the set of task labels this task depends on, by querying the full
+        task set, given as `taskgraph`.
+
+        Returns a list of (task_label, dependency_name) pairs describing the
+        dependencies.
+        """
+
+    def optimize(self, params):
+        """
+        Determine whether this task can be optimized, and if it can, what taskId
+        it should be replaced with.
+
+        The return value is a tuple `(optimized, taskId)`.  If `optimized` is
+        true, then the task will be optimized (in other words, not included in
+        the task graph).  If the second argument is a taskid, then any
+        dependencies on this task will isntead depend on that taskId.  It is an
+        error to return no taskId for a task on which other tasks depend.
+
+        The default optimizes when a taskId can be found for one of the index
+        paths attached to the task.
+        """
+        for index_path in self.index_paths:
+            try:
+                task_id = find_task_id(
+                    index_path,
+                    use_proxy=bool(os.environ.get('TASK_ID')))
+
+                return True, task_id
+            except requests.exceptions.HTTPError:
+                pass
+
+        return False, None
+
+    @classmethod
+    def from_json(cls, task_dict):
+        """
+        Given a data structure as produced by taskgraph.to_json, re-construct
+        the original Task object.  This is used to "resume" the task-graph
+        generation process, for example in Action tasks.
+        """
+        return cls(
+            kind=task_dict['attributes']['kind'],
+            label=task_dict['label'],
+            attributes=task_dict['attributes'],
+            task=task_dict['task'])
new file mode 100644
--- /dev/null
+++ b/taskcluster/taskgraph/task/beetmover.py
@@ -0,0 +1,28 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from . import transform
+
+
+class BeetmoverTask(transform.TransformTask):
+    """
+    A task implementing a beetmover job.  These depend on nightly build and signing
+    jobs and transfer the artifacts to S3 after build and signing are completed.
+    """
+
+    @classmethod
+    def get_inputs(cls, kind, path, config, params, loaded_tasks):
+        if config.get('kind-dependencies', []) != ["build-signing"] and \
+           config.get('kind-dependencies', []) != ["nightly-l10n-signing"]:
+            raise Exception("Beetmover kinds must depend on builds or signing builds")
+        for task in loaded_tasks:
+            if not task.attributes.get('nightly'):
+                continue
+            if task.kind not in config.get('kind-dependencies'):
+                continue
+            beetmover_task = {'dependent-task': task}
+
+            yield beetmover_task
new file mode 100644
--- /dev/null
+++ b/taskcluster/taskgraph/task/beetmover_checksums.py
@@ -0,0 +1,28 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from . import transform
+
+
+class BeetmoverChecksumsTask(transform.TransformTask):
+    """
+    A task implementing a beetmover job specific for checksums.These depend on
+    the checksums signing jobs and transfer the checksums files to S3 after
+    it's being generated and signed.
+    """
+
+    @classmethod
+    def get_inputs(cls, kind, path, config, params, loaded_tasks):
+        if config.get('kind-dependencies', []) != ["checksums-signing"]:
+            raise Exception("Beetmover checksums tasks depend on checksums signing tasks")
+        for task in loaded_tasks:
+            if not task.attributes.get('nightly'):
+                continue
+            if task.kind not in config.get('kind-dependencies'):
+                continue
+            beetmover_checksums_task = {'dependent-task': task}
+
+            yield beetmover_checksums_task
new file mode 100644
--- /dev/null
+++ b/taskcluster/taskgraph/task/checksums_signing.py
@@ -0,0 +1,27 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from . import transform
+
+
+class ChecksumsSigningTask(transform.TransformTask):
+    """
+    A task implementing a checksums signing job.  These depend on beetmover jobs
+    and sign the checksums after its being generated by beetmover
+    """
+
+    @classmethod
+    def get_inputs(cls, kind, path, config, params, loaded_tasks):
+        if (config.get('kind-dependencies', []) != ["beetmover", "beetmover-l10n"]):
+            raise Exception("Checksums signing tasks must depend on beetmover tasks")
+        for task in loaded_tasks:
+            if not task.attributes.get('nightly'):
+                continue
+            if task.kind not in config.get('kind-dependencies'):
+                continue
+            checksums_signing_task = {'dependent-task': task}
+
+            yield checksums_signing_task
new file mode 100644
--- /dev/null
+++ b/taskcluster/taskgraph/task/docker_image.py
@@ -0,0 +1,117 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import logging
+import os
+import urllib2
+
+from . import base
+from .. import GECKO
+from taskgraph.util.docker import (
+    docker_image,
+    generate_context_hash,
+    INDEX_PREFIX,
+)
+from taskgraph.util.taskcluster import get_artifact_url
+from taskgraph.util.templates import Templates
+
+logger = logging.getLogger(__name__)
+
+
+class DockerImageTask(base.Task):
+
+    @classmethod
+    def load_tasks(cls, kind, path, config, params, loaded_tasks):
+        parameters = {
+            'pushlog_id': params.get('pushlog_id', 0),
+            'pushdate': params['moz_build_date'],
+            'pushtime': params['moz_build_date'][8:],
+            'year': params['moz_build_date'][0:4],
+            'month': params['moz_build_date'][4:6],
+            'day': params['moz_build_date'][6:8],
+            'project': params['project'],
+            'docker_image': docker_image,
+            'base_repository': params['base_repository'] or params['head_repository'],
+            'head_repository': params['head_repository'],
+            'head_ref': params['head_ref'] or params['head_rev'],
+            'head_rev': params['head_rev'],
+            'owner': params['owner'],
+            'level': params['level'],
+            'source': '{repo}file/{rev}/taskcluster/ci/docker-image/image.yml'
+                      .format(repo=params['head_repository'], rev=params['head_rev']),
+            'index_image_prefix': INDEX_PREFIX,
+            'artifact_path': 'public/image.tar.zst',
+        }
+
+        tasks = []
+        templates = Templates(path)
+        for image_name, image_symbol in config['images'].iteritems():
+            context_path = os.path.join('taskcluster', 'docker', image_name)
+            context_hash = generate_context_hash(GECKO, context_path, image_name)
+
+            image_parameters = dict(parameters)
+            image_parameters['image_name'] = image_name
+            image_parameters['context_hash'] = context_hash
+
+            image_task = templates.load('image.yml', image_parameters)
+            attributes = {'image_name': image_name}
+
+            # unique symbol for different docker image
+            if 'extra' in image_task['task']:
+                image_task['task']['extra']['treeherder']['symbol'] = image_symbol
+
+            # As an optimization, if the context hash exists for a high level, that image
+            # task ID will be used.  The reasoning behind this is that eventually everything ends
+            # up on level 3 at some point if most tasks use this as a common image
+            # for a given context hash, a worker within Taskcluster does not need to contain
+            # the same image per branch.
+            index_paths = ['{}.level-{}.{}.hash.{}'.format(
+                                INDEX_PREFIX, level, image_name, context_hash)
+                           for level in reversed(range(int(params['level']), 4))]
+
+            tasks.append(cls(kind, 'build-docker-image-' + image_name,
+                             task=image_task['task'], attributes=attributes,
+                             index_paths=index_paths))
+
+        return tasks
+
+    def get_dependencies(self, taskgraph):
+        return []
+
+    def optimize(self, params):
+        optimized, taskId = super(DockerImageTask, self).optimize(params)
+        if optimized and taskId:
+            try:
+                # Only return the task ID if the artifact exists for the indexed
+                # task.
+                request = urllib2.Request(get_artifact_url(
+                    taskId, 'public/image.tar.zst',
+                    use_proxy=bool(os.environ.get('TASK_ID'))))
+                request.get_method = lambda: 'HEAD'
+                urllib2.urlopen(request)
+
+                # HEAD success on the artifact is enough
+                return True, taskId
+            except urllib2.HTTPError:
+                pass
+
+        return False, None
+
+    @classmethod
+    def from_json(cls, task_dict):
+        # Generating index_paths for optimization
+        imgMeta = task_dict['task']['extra']['imageMeta']
+        image_name = imgMeta['imageName']
+        context_hash = imgMeta['contextHash']
+        index_paths = ['{}.level-{}.{}.hash.{}'.format(
+                            INDEX_PREFIX, level, image_name, context_hash)
+                       for level in reversed(range(int(imgMeta['level']), 4))]
+        docker_image_task = cls(kind='docker-image',
+                                label=task_dict['label'],
+                                attributes=task_dict['attributes'],
+                                task=task_dict['task'],
+                                index_paths=index_paths)
+        return docker_image_task
new file mode 100644
--- /dev/null
+++ b/taskcluster/taskgraph/task/post_build.py
@@ -0,0 +1,54 @@
+# This Source Code Form is subject to the terms of the Mozilla Public License,
+# v. 2.0. If a copy of the MPL was not distributed with this file, You can
+# obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import copy
+import logging
+
+from . import transform
+from ..util.yaml import load_yaml
+
+logger = logging.getLogger(__name__)
+
+
+class PostBuildTask(transform.TransformTask):
+    """
+    A task implementing a post-build job.  These depend on jobs and perform
+    various followup tasks after a build has completed.
+
+    The `only-for-build-platforms` kind configuration, if specified, will limit
+    the build platforms for which a post-build task will be created.
+
+    The `job-template' kind configuration points to a yaml file which will
+    be used to create the input to the transforms.  It will have added to it
+    keys `build-label`, the label for the build task, and `build-platform`, its
+    platform.
+    """
+
+    @classmethod
+    def get_inputs(cls, kind, path, config, params, loaded_tasks):
+        if config.get('kind-dependencies', []) != ["build"]:
+            raise Exception("PostBuildTask kinds must depend on builds")
+
+        only_platforms = config.get('only-for-build-platforms')
+        prototype = load_yaml(path, config.get('job-template'))
+
+        for task in loaded_tasks:
+            if task.kind != 'build':
+                continue
+
+            build_platform = task.attributes.get('build_platform')
+            build_type = task.attributes.get('build_type')
+            if not build_platform or not build_type:
+                continue
+            platform = "{}/{}".format(build_platform, build_type)
+            if only_platforms and platform not in only_platforms:
+                continue
+
+            post_task = copy.deepcopy(prototype)
+            post_task['build-label'] = task.label
+            post_task['build-platform'] = platform
+            post_task['build-task'] = task
+            yield post_task
new file mode 100644
--- /dev/null
+++ b/taskcluster/taskgraph/task/repacks.py
@@ -0,0 +1,37 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from . import transform
+
+
+class RepackTask(transform.TransformTask):
+    """
+    A task implementing a l10n repack job.  These may depend on build jobs and
+    do a repack of them
+    """
+
+    @classmethod
+    def get_inputs(cls, kind, path, config, params, loaded_tasks):
+        only_platforms = config.get('only-for-build-platforms')
+
+        for task in loaded_tasks:
+            if task.kind not in config.get('kind-dependencies'):
+                continue
+
+            build_platform = task.attributes.get('build_platform')
+            build_type = task.attributes.get('build_type')
+            if not build_platform or not build_type:
+                continue
+            platform = "{}/{}".format(build_platform, build_type)
+            if only_platforms and platform not in only_platforms:
+                continue
+
+            repack_task = {'dependent-task': task}
+
+            if config.get('job-template'):
+                repack_task.update(config.get('job-template'))
+
+            yield repack_task
new file mode 100644
--- /dev/null
+++ b/taskcluster/taskgraph/task/signing.py
@@ -0,0 +1,28 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from . import transform
+
+
+class SigningTask(transform.TransformTask):
+    """
+    A task implementing a signing job.  These depend on nightly build jobs and
+    sign the artifacts after a build has completed.
+    """
+
+    @classmethod
+    def get_inputs(cls, kind, path, config, params, loaded_tasks):
+        if (config.get('kind-dependencies', []) != ["build"] and
+                config.get('kind-dependencies', []) != ["nightly-l10n"]):
+            raise Exception("Signing kinds must depend on builds or l10n repacks")
+        for task in loaded_tasks:
+            if task.kind not in config.get('kind-dependencies'):
+                continue
+            if not task.attributes.get('nightly'):
+                continue
+            signing_task = {'dependent-task': task}
+
+            yield signing_task
new file mode 100644
--- /dev/null
+++ b/taskcluster/taskgraph/task/test.py
@@ -0,0 +1,114 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import copy
+import logging
+
+from . import transform
+from ..util.yaml import load_yaml
+
+logger = logging.getLogger(__name__)
+
+
+class TestTask(transform.TransformTask):
+    """
+    A task implementing a Gecko test.
+    """
+
+    @classmethod
+    def get_inputs(cls, kind, path, config, params, loaded_tasks):
+
+        # the kind on which this one depends
+        if len(config.get('kind-dependencies', [])) != 1:
+            raise Exception("TestTask kinds must have exactly one item in kind-dependencies")
+        dep_kind = config['kind-dependencies'][0]
+
+        # get build tasks, keyed by build platform
+        builds_by_platform = cls.get_builds_by_platform(dep_kind, loaded_tasks)
+
+        # get the test platforms for those build tasks
+        test_platforms_cfg = load_yaml(path, 'test-platforms.yml')
+        test_platforms = cls.get_test_platforms(test_platforms_cfg, builds_by_platform)
+
+        # expand the test sets for each of those platforms
+        test_sets_cfg = load_yaml(path, 'test-sets.yml')
+        test_platforms = cls.expand_tests(test_sets_cfg, test_platforms)
+
+        # load the test descriptions
+        test_descriptions = load_yaml(path, 'tests.yml')
+
+        # generate all tests for all test platforms
+        for test_platform_name, test_platform in test_platforms.iteritems():
+            for test_name in test_platform['test-names']:
+                test = copy.deepcopy(test_descriptions[test_name])
+                test['build-platform'] = test_platform['build-platform']
+                test['test-platform'] = test_platform_name
+                test['build-label'] = test_platform['build-label']
+                test['test-name'] = test_name
+
+                logger.debug("Generating tasks for test {} on platform {}".format(
+                    test_name, test['test-platform']))
+                yield test
+
+    @classmethod
+    def get_builds_by_platform(cls, dep_kind, loaded_tasks):
+        """Find the build tasks on which tests will depend, keyed by
+        platform/type.  Returns a dictionary mapping build platform to task
+        label."""
+        builds_by_platform = {}
+        for task in loaded_tasks:
+            if task.kind != dep_kind:
+                continue
+
+            build_platform = task.attributes.get('build_platform')
+            build_type = task.attributes.get('build_type')
+            if not build_platform or not build_type:
+                continue
+            platform = "{}/{}".format(build_platform, build_type)
+            if platform in builds_by_platform:
+                raise Exception("multiple build jobs for " + platform)
+            builds_by_platform[platform] = task.label
+        return builds_by_platform
+
+    @classmethod
+    def get_test_platforms(cls, test_platforms_cfg, builds_by_platform):
+        """Get the test platforms for which test tasks should be generated,
+        based on the available build platforms.  Returns a dictionary mapping
+        test platform to {test-set, build-platform, build-label}."""
+        test_platforms = {}
+        for test_platform, cfg in test_platforms_cfg.iteritems():
+            build_platform = cfg['build-platform']
+            if build_platform not in builds_by_platform:
+                logger.warning(
+                    "No build task with platform {}; ignoring test platform {}".format(
+                        build_platform, test_platform))
+                continue
+            test_platforms[test_platform] = {
+                'build-platform': build_platform,
+                'build-label': builds_by_platform[build_platform],
+            }
+            test_platforms[test_platform].update(cfg)
+        return test_platforms
+
+    @classmethod
+    def expand_tests(cls, test_sets_cfg, test_platforms):
+        """Expand the test sets in `test_platforms` out to sets of test names.
+        Returns a dictionary like `get_test_platforms`, with an additional
+        `test-names` key for each test platform, containing a set of test
+        names."""
+        rv = {}
+        for test_platform, cfg in test_platforms.iteritems():
+            test_sets = cfg['test-sets']
+            if not set(test_sets) < set(test_sets_cfg):
+                raise Exception(
+                    "Test sets {} for test platform {} are not defined".format(
+                        ', '.join(test_sets), test_platform))
+            test_names = set()
+            for test_set in test_sets:
+                test_names.update(test_sets_cfg[test_set])
+            rv[test_platform] = cfg.copy()
+            rv[test_platform]['test-names'] = test_names
+        return rv
new file mode 100644
--- /dev/null
+++ b/taskcluster/taskgraph/task/transform.py
@@ -0,0 +1,130 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import logging
+import itertools
+
+from . import base
+from .. import files_changed
+from ..util.python_path import find_object
+from ..util.templates import merge
+from ..util.yaml import load_yaml
+from ..util.seta import is_low_value_task
+
+from ..transforms.base import TransformSequence, TransformConfig
+
+logger = logging.getLogger(__name__)
+
+
+class TransformTask(base.Task):
+    """
+    Tasks of this class are generated by applying transformations to a sequence
+    of input entities.  By default, it gets those inputs from YAML data in the
+    kind directory, but subclasses may override `get_inputs` to produce them in
+    some other way.
+    """
+
+    @classmethod
+    def get_inputs(cls, kind, path, config, params, loaded_tasks):
+        """
+        Get the input elements that will be transformed into tasks.  The
+        elements themselves are free-form, and become the input to the first
+        transform.
+
+        By default, this reads jobs from the `jobs` key, or from yaml files
+        named by `jobs-from`.  The entities are read from mappings, and the
+        keys to those mappings are added in the `name` key of each entity.
+
+        If there is a `job-defaults` config, then every job is merged with it.
+        This provides a simple way to set default values for all jobs of a
+        kind.  More complex defaults should be implemented with custom
+        transforms.
+
+        This method can be overridden in subclasses that need to perform more
+        complex calculations to generate the list of inputs.
+        """
+        def jobs():
+            defaults = config.get('job-defaults')
+            jobs = config.get('jobs', {}).iteritems()
+            jobs_from = itertools.chain.from_iterable(
+                load_yaml(path, filename).iteritems()
+                for filename in config.get('jobs-from', {}))
+            for name, job in itertools.chain(jobs, jobs_from):
+                if defaults:
+                    job = merge(defaults, job)
+                yield name, job
+
+        for name, job in jobs():
+            job['name'] = name
+            logger.debug("Generating tasks for {} {}".format(kind, name))
+            yield job
+
+    @classmethod
+    def load_tasks(cls, kind, path, config, params, loaded_tasks):
+        inputs = cls.get_inputs(kind, path, config, params, loaded_tasks)
+
+        transforms = TransformSequence()
+        for xform_path in config['transforms']:
+            transform = find_object(xform_path)
+            transforms.add(transform)
+
+        # perform the transformations
+        trans_config = TransformConfig(kind, path, config, params)
+        tasks = [cls(kind, t) for t in transforms(trans_config, inputs)]
+        return tasks
+
+    def __init__(self, kind, task):
+        self.dependencies = task['dependencies']
+        self.when = task['when']
+        super(TransformTask, self).__init__(kind, task['label'],
+                                            task['attributes'], task['task'],
+                                            index_paths=task.get('index-paths'))
+
+    def get_dependencies(self, taskgraph):
+        return [(label, name) for name, label in self.dependencies.items()]
+
+    def optimize(self, params):
+        bbb_task = False
+
+        if self.index_paths:
+            optimized, taskId = super(TransformTask, self).optimize(params)
+            if optimized:
+                return optimized, taskId
+
+        elif 'files-changed' in self.when:
+            changed = files_changed.check(
+                params, self.when['files-changed'])
+            if not changed:
+                logger.debug('no files found matching a pattern in `when.files-changed` for ' +
+                             self.label)
+                return True, None
+
+        # no need to call SETA for build jobs
+        if self.task.get('extra', {}).get('treeherder', {}).get('jobKind', '') == 'build':
+            return False, None
+
+        # for bbb tasks we need to send in the buildbot buildername
+        if self.task.get('provisionerId', '') == 'buildbot-bridge':
+            self.label = self.task.get('payload').get('buildername')
+            bbb_task = True
+
+        # we would like to return 'False, None' while it's high_value_task
+        # and we wouldn't optimize it. Otherwise, it will return 'True, None'
+        if is_low_value_task(self.label,
+                             params.get('project'),
+                             params.get('pushlog_id'),
+                             params.get('pushdate'),
+                             bbb_task):
+            # Always optimize away low-value tasks
+            return True, None
+        else:
+            return False, None
+
+    @classmethod
+    def from_json(cls, task_dict):
+        # when reading back from JSON, we lose the "when" information
+        task_dict['when'] = {}
+        return cls(task_dict['attributes']['kind'], task_dict)
--- a/taskcluster/taskgraph/taskgraph.py
+++ b/taskcluster/taskgraph/taskgraph.py
@@ -1,31 +1,51 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 from .graph import Graph
-from .task import Task
+from .util.python_path import find_object
 
 
 class TaskGraph(object):
     """
     Representation of a task graph.
 
     A task graph is a combination of a Graph and a dictionary of tasks indexed
     by label.  TaskGraph instances should be treated as immutable.
     """
 
     def __init__(self, tasks, graph):
         assert set(tasks) == graph.nodes
         self.tasks = tasks
         self.graph = graph
 
+    def to_json(self):
+        "Return a JSON-able object representing the task graph, as documented"
+        named_links_dict = self.graph.named_links_dict()
+        # this dictionary may be keyed by label or by taskid, so let's just call it 'key'
+        tasks = {}
+        for key in self.graph.visit_postorder():
+            task = self.tasks[key]
+            implementation = task.__class__.__module__ + ":" + task.__class__.__name__
+            task_json = {
+                'label': task.label,
+                'attributes': task.attributes,
+                'dependencies': named_links_dict.get(key, {}),
+                'task': task.task,
+                'kind_implementation': implementation
+            }
+            if task.task_id:
+                task_json['task_id'] = task.task_id
+            tasks[key] = task_json
+        return tasks
+
     def for_each_task(self, f, *args, **kwargs):
         for task_label in self.graph.visit_postorder():
             task = self.tasks[task_label]
             f(task, self, *args, **kwargs)
 
     def __getitem__(self, label):
         "Get a task by label"
         return self.tasks[label]
@@ -35,35 +55,28 @@ class TaskGraph(object):
         return self.tasks.itervalues()
 
     def __repr__(self):
         return "<TaskGraph graph={!r} tasks={!r}>".format(self.graph, self.tasks)
 
     def __eq__(self, other):
         return self.tasks == other.tasks and self.graph == other.graph
 
-    def to_json(self):
-        "Return a JSON-able object representing the task graph, as documented"
-        named_links_dict = self.graph.named_links_dict()
-        # this dictionary may be keyed by label or by taskid, so let's just call it 'key'
-        tasks = {}
-        for key in self.graph.visit_postorder():
-            tasks[key] = self.tasks[key].to_json()
-            # overwrite dependencies with the information in the taskgraph's edges.
-            tasks[key]['dependencies'] = named_links_dict.get(key, {})
-        return tasks
-
     @classmethod
     def from_json(cls, tasks_dict):
         """
         This code is used to generate the a TaskGraph using a dictionary
         which is representative of the TaskGraph.
         """
         tasks = {}
         edges = set()
         for key, value in tasks_dict.iteritems():
-            tasks[key] = Task.from_json(value)
+            # We get the implementation from JSON
+            implementation = value['kind_implementation']
+            # Loading the module and creating a Task from a dictionary
+            task_kind = find_object(implementation)
+            tasks[key] = task_kind.from_json(value)
             if 'task_id' in value:
                 tasks[key].task_id = value['task_id']
             for depname, dep in value['dependencies'].iteritems():
                 edges.add((key, dep, depname))
         task_graph = cls(tasks, Graph(set(tasks), edges))
         return tasks, task_graph
--- a/taskcluster/taskgraph/test/test_create.py
+++ b/taskcluster/taskgraph/test/test_create.py
@@ -5,17 +5,17 @@
 from __future__ import absolute_import, print_function, unicode_literals
 
 import unittest
 import os
 
 from .. import create
 from ..graph import Graph
 from ..taskgraph import TaskGraph
-from ..task import Task
+from .util import TestTask
 
 from mozunit import main
 
 
 class TestCreate(unittest.TestCase):
 
     def setUp(self):
         self.old_task_id = os.environ.get('TASK_ID')
@@ -32,18 +32,18 @@ class TestCreate(unittest.TestCase):
         elif 'TASK_ID' in os.environ:
             del os.environ['TASK_ID']
 
     def fake_create_task(self, session, task_id, label, task_def):
         self.created_tasks[task_id] = task_def
 
     def test_create_tasks(self):
         tasks = {
-            'tid-a': Task(kind='test', label='a', attributes={}, task={'payload': 'hello world'}),
-            'tid-b': Task(kind='test', label='b', attributes={}, task={'payload': 'hello world'}),
+            'tid-a': TestTask(label='a', task={'payload': 'hello world'}),
+            'tid-b': TestTask(label='b', task={'payload': 'hello world'}),
         }
         label_to_taskid = {'a': 'tid-a', 'b': 'tid-b'}
         graph = Graph(nodes={'tid-a', 'tid-b'}, edges={('tid-a', 'tid-b', 'edge')})
         taskgraph = TaskGraph(tasks, graph)
 
         create.create_tasks(taskgraph, label_to_taskid, {'level': '4'})
 
         for tid, task in self.created_tasks.iteritems():
@@ -55,17 +55,17 @@ class TestCreate(unittest.TestCase):
                     # Don't look for decisiontask here
                     continue
                 self.assertIn(depid, self.created_tasks)
 
     def test_create_task_without_dependencies(self):
         "a task with no dependencies depends on the decision task"
         os.environ['TASK_ID'] = 'decisiontask'
         tasks = {
-            'tid-a': Task(kind='test', label='a', attributes={}, task={'payload': 'hello world'}),
+            'tid-a': TestTask(label='a', task={'payload': 'hello world'}),
         }
         label_to_taskid = {'a': 'tid-a'}
         graph = Graph(nodes={'tid-a'}, edges=set())
         taskgraph = TaskGraph(tasks, graph)
 
         create.create_tasks(taskgraph, label_to_taskid, {'level': '4'})
 
         for tid, task in self.created_tasks.iteritems():
--- a/taskcluster/taskgraph/test/test_decision.py
+++ b/taskcluster/taskgraph/test/test_decision.py
@@ -7,21 +7,51 @@ from __future__ import absolute_import, 
 import os
 import json
 import yaml
 import shutil
 import unittest
 import tempfile
 
 from .. import decision
+from ..graph import Graph
+from ..taskgraph import TaskGraph
+from .util import TestTask
 from mozunit import main
 
 
 class TestDecision(unittest.TestCase):
 
+    def test_taskgraph_to_json(self):
+        tasks = {
+            'a': TestTask(label='a', attributes={'attr': 'a-task'}),
+            'b': TestTask(label='b', task={'task': 'def'}),
+        }
+        graph = Graph(nodes=set('ab'), edges={('a', 'b', 'edgelabel')})
+        taskgraph = TaskGraph(tasks, graph)
+
+        res = taskgraph.to_json()
+
+        self.assertEqual(res, {
+            'a': {
+                'label': 'a',
+                'attributes': {'attr': 'a-task', 'kind': 'test'},
+                'task': {},
+                'dependencies': {'edgelabel': 'b'},
+                'kind_implementation': 'taskgraph.test.util:TestTask',
+            },
+            'b': {
+                'label': 'b',
+                'attributes': {'kind': 'test'},
+                'task': {'task': 'def'},
+                'dependencies': {},
+                'kind_implementation': 'taskgraph.test.util:TestTask',
+            }
+        })
+
     def test_write_artifact_json(self):
         data = [{'some': 'data'}]
         tmpdir = tempfile.mkdtemp()
         try:
             decision.ARTIFACTS_DIR = os.path.join(tmpdir, "artifacts")
             decision.write_artifact("artifact.json", data)
             with open(os.path.join(decision.ARTIFACTS_DIR, "artifact.json")) as f:
                 self.assertEqual(json.load(f), data)
--- a/taskcluster/taskgraph/test/test_generator.py
+++ b/taskcluster/taskgraph/test/test_generator.py
@@ -1,53 +1,65 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import unittest
 
-from taskgraph.generator import TaskGraphGenerator, Kind
-from taskgraph import graph, target_tasks as target_tasks_mod
+from ..generator import TaskGraphGenerator, Kind
+from .. import graph, target_tasks as target_tasks_mod
+from ..task import base
 from mozunit import main
 
 
-def fake_loader(kind, path, config, parameters, loaded_tasks):
-    for i in range(3):
-        dependencies = {}
-        if i >= 1:
-            dependencies['prev'] = '{}-t-{}'.format(kind, i-1)
-        yield {'kind': kind,
-               'label': '{}-t-{}'.format(kind, i),
-               'attributes': {'_tasknum': str(i)},
-               'task': {'i': i},
-               'dependencies': dependencies}
+class FakeTask(base.Task):
+
+    def __init__(self, **kwargs):
+        self.i = kwargs.pop('i')
+        super(FakeTask, self).__init__(**kwargs)
+
+    @classmethod
+    def load_tasks(cls, kind, path, config, parameters, loaded_tasks):
+        return [cls(kind=kind,
+                    label='{}-t-{}'.format(kind, i),
+                    attributes={'_tasknum': str(i)},
+                    task={},
+                    i=i)
+                for i in range(3)]
+
+    def get_dependencies(self, full_task_set):
+        i = self.i
+        if i > 0:
+            return [('{}-t-{}'.format(self.kind, i - 1), 'prev')]
+        else:
+            return []
+
+    def optimize(self, params):
+        return False, None
 
 
 class FakeKind(Kind):
 
-    def _get_loader(self):
-        return fake_loader
+    def _get_impl_class(self):
+        return FakeTask
 
     def load_tasks(self, parameters, loaded_tasks):
         FakeKind.loaded_kinds.append(self.name)
         return super(FakeKind, self).load_tasks(parameters, loaded_tasks)
 
 
 class WithFakeKind(TaskGraphGenerator):
 
     def _load_kinds(self):
         for kind_name, deps in self.parameters['_kinds']:
-            config = {
-                'transforms': [],
-            }
-            if deps:
-                config['kind-dependencies'] = deps
-            yield FakeKind(kind_name, '/fake', config)
+            yield FakeKind(
+                kind_name, '/fake',
+                {'kind-dependencies': deps} if deps else {})
 
 
 class TestGenerator(unittest.TestCase):
 
     def maketgg(self, target_tasks=None, kinds=[('_fake', [])]):
         FakeKind.loaded_kinds = []
         self.target_tasks = target_tasks or []
 
@@ -116,11 +128,10 @@ class TestGenerator(unittest.TestCase):
         tid = self.tgg.label_to_taskid
         self.assertEqual(
             self.tgg.optimized_task_graph.graph,
             graph.Graph({tid['_fake-t-0'], tid['_fake-t-1'], tid['_fake-t-2']}, {
                 (tid['_fake-t-1'], tid['_fake-t-0'], 'prev'),
                 (tid['_fake-t-2'], tid['_fake-t-1'], 'prev'),
             }))
 
-
 if __name__ == '__main__':
     main()
--- a/taskcluster/taskgraph/test/test_optimize.py
+++ b/taskcluster/taskgraph/test/test_optimize.py
@@ -1,21 +1,21 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import unittest
 
-from ..optimize import optimize_task_graph, resolve_task_references, optimization
+from ..optimize import optimize_task_graph, resolve_task_references
 from ..optimize import annotate_task_graph, get_subgraph
 from ..taskgraph import TaskGraph
 from .. import graph
-from ..task import Task
+from .util import TestTask
 
 
 class TestResolveTaskReferences(unittest.TestCase):
 
     def do(self, input, output):
         taskid_for_edge_name = {'edge%d' % n: 'tid%d' % n for n in range(1, 4)}
         self.assertEqual(resolve_task_references('subject', input, taskid_for_edge_name), output)
 
@@ -48,112 +48,113 @@ class TestResolveTaskReferences(unittest
         "resolve_task_references raises a KeyError on reference to an invalid task"
         self.assertRaisesRegexp(
             KeyError,
             "task 'subject' has no dependency named 'no-such'",
             lambda: resolve_task_references('subject', {'task-reference': '<no-such>'}, {})
         )
 
 
+class OptimizingTask(TestTask):
+    # the `optimize` method on this class is overridden direclty in the tests
+    # below.
+    pass
+
+
 class TestOptimize(unittest.TestCase):
 
     kind = None
 
-    @classmethod
-    def setUpClass(cls):
-        # set up some simple optimization functions
-        optimization('no-optimize')(lambda self, params: (False, None))
-        optimization('optimize-away')(lambda self, params: (True, None))
-        optimization('optimize-to-task')(lambda self, params, task: (True, task))
-        optimization('false-with-taskid')(lambda self, params: (False, 'some-taskid'))
-
-    def make_task(self, label, optimization=None, task_def=None, optimized=None, task_id=None):
+    def make_task(self, label, task_def=None, optimized=None, task_id=None):
         task_def = task_def or {'sample': 'task-def'}
-        task = Task(kind='test', label=label, attributes={}, task=task_def)
+        task = OptimizingTask(label=label, task=task_def)
         task.optimized = optimized
-        if optimization:
-            task.optimizations = [optimization]
-        else:
-            task.optimizations = []
         task.task_id = task_id
         return task
 
     def make_graph(self, *tasks_and_edges):
-        tasks = {t.label: t for t in tasks_and_edges if isinstance(t, Task)}
-        edges = {e for e in tasks_and_edges if not isinstance(e, Task)}
+        tasks = {t.label: t for t in tasks_and_edges if isinstance(t, OptimizingTask)}
+        edges = {e for e in tasks_and_edges if not isinstance(e, OptimizingTask)}
         return TaskGraph(tasks, graph.Graph(set(tasks), edges))
 
     def assert_annotations(self, graph, **annotations):
         def repl(task_id):
             return 'SLUGID' if task_id and len(task_id) == 22 else task_id
         got_annotations = {
             t.label: (t.optimized, repl(t.task_id)) for t in graph.tasks.itervalues()
         }
         self.assertEqual(got_annotations, annotations)
 
     def test_annotate_task_graph_no_optimize(self):
         "annotating marks everything as un-optimized if the kind returns that"
+        OptimizingTask.optimize = lambda self, params: (False, None)
         graph = self.make_graph(
-            self.make_task('task1', ['no-optimize']),
-            self.make_task('task2', ['no-optimize']),
-            self.make_task('task3', ['no-optimize']),
+            self.make_task('task1'),
+            self.make_task('task2'),
+            self.make_task('task3'),
             ('task2', 'task1', 'build'),
             ('task2', 'task3', 'image'),
         )
         annotate_task_graph(graph, {}, set(), graph.graph.named_links_dict(), {}, None)
         self.assert_annotations(
             graph,
             task1=(False, None),
             task2=(False, None),
             task3=(False, None)
         )
 
     def test_annotate_task_graph_taskid_without_optimize(self):
         "raises exception if kind returns a taskid without optimizing"
-        graph = self.make_graph(self.make_task('task1', ['false-with-taskid']))
+        OptimizingTask.optimize = lambda self, params: (False, 'some-taskid')
+        graph = self.make_graph(self.make_task('task1'))
         self.assertRaises(
             Exception,
             lambda: annotate_task_graph(graph, {}, set(), graph.graph.named_links_dict(), {}, None)
         )
 
     def test_annotate_task_graph_optimize_away_dependency(self):
         "raises exception if kind optimizes away a task on which another depends"
+        OptimizingTask.optimize = \
+            lambda self, params: (True, None) if self.label == 'task1' else (False, None)
         graph = self.make_graph(
-            self.make_task('task1', ['optimize-away']),
-            self.make_task('task2', ['no-optimize']),
+            self.make_task('task1'),
+            self.make_task('task2'),
             ('task2', 'task1', 'build'),
         )
         self.assertRaises(
             Exception,
             lambda: annotate_task_graph(graph, {}, set(), graph.graph.named_links_dict(), {}, None)
         )
 
     def test_annotate_task_graph_do_not_optimize(self):
         "annotating marks everything as un-optimized if in do_not_optimize"
+        OptimizingTask.optimize = lambda self, params: (True, 'taskid')
         graph = self.make_graph(
-            self.make_task('task1', ['optimize-away']),
-            self.make_task('task2', ['optimize-away']),
+            self.make_task('task1'),
+            self.make_task('task2'),
             ('task2', 'task1', 'build'),
         )
         label_to_taskid = {}
         annotate_task_graph(graph, {}, {'task1', 'task2'},
                             graph.graph.named_links_dict(), label_to_taskid, None)
         self.assert_annotations(
             graph,
             task1=(False, None),
             task2=(False, None)
         )
         self.assertEqual
 
     def test_annotate_task_graph_nos_do_not_propagate(self):
         "a task with a non-optimized dependency can be optimized"
+        OptimizingTask.optimize = \
+            lambda self, params: (False, None) if self.label == 'task1' else (True, 'taskid')
         graph = self.make_graph(
-            self.make_task('task1', ['no-optimize']),
-            self.make_task('task2', ['optimize-to-task', 'taskid']),
-            self.make_task('task3', ['optimize-to-task', 'taskid']),
+            self.make_task('task1'),
+            self.make_task('task2'),
+            self.make_task('task3'),
             ('task2', 'task1', 'build'),
             ('task2', 'task3', 'image'),
         )
         annotate_task_graph(graph, {}, set(),
                             graph.graph.named_links_dict(), {}, None)
         self.assert_annotations(
             graph,
             task1=(False, None),
@@ -235,19 +236,21 @@ class TestOptimize(unittest.TestCase):
         self.assertEqual(sub.graph.edges, {(task2, task3, 'test')})
         self.assertEqual(sub.tasks[task2].task_id, task2)
         self.assertEqual(sorted(sub.tasks[task2].task['dependencies']), sorted([task3, 'dep1']))
         self.assertEqual(sub.tasks[task2].task['payload'], 'http://dep1/' + task3)
         self.assertEqual(sub.tasks[task3].task_id, task3)
 
     def test_optimize(self):
         "optimize_task_graph annotates and extracts the subgraph from a simple graph"
+        OptimizingTask.optimize = \
+            lambda self, params: (True, 'dep1') if self.label == 'task1' else (False, None)
         input = self.make_graph(
-            self.make_task('task1', ['optimize-to-task', 'dep1']),
-            self.make_task('task2', ['no-optimize']),
-            self.make_task('task3', ['no-optimize']),
+            self.make_task('task1'),
+            self.make_task('task2'),
+            self.make_task('task3'),
             ('task2', 'task1', 'build'),
             ('task2', 'task3', 'image'),
         )
         opt, label_to_taskid = optimize_task_graph(input, {}, set())
         self.assertEqual(opt.graph, graph.Graph(
             {label_to_taskid['task2'], label_to_taskid['task3']},
             {(label_to_taskid['task2'], label_to_taskid['task3'], 'image')}))
--- a/taskcluster/taskgraph/test/test_target_tasks.py
+++ b/taskcluster/taskgraph/test/test_target_tasks.py
@@ -5,17 +5,17 @@
 from __future__ import absolute_import, print_function, unicode_literals
 
 import unittest
 
 from .. import target_tasks
 from .. import try_option_syntax
 from ..graph import Graph
 from ..taskgraph import TaskGraph
-from ..task import Task
+from .util import TestTask
 from mozunit import main
 
 
 class FakeTryOptionSyntax(object):
 
     def __init__(self, message, task_graph):
         self.trigger_tests = 0
         self.talos_trigger_tests = 0
@@ -29,19 +29,18 @@ class FakeTryOptionSyntax(object):
         return 'at-at' in attributes
 
 
 class TestTargetTasks(unittest.TestCase):
 
     def default_matches(self, run_on_projects, project):
         method = target_tasks.get_method('default')
         graph = TaskGraph(tasks={
-            'a': Task(kind='build', label='a',
-                      attributes={'run_on_projects': run_on_projects},
-                      task={}),
+            'a': TestTask(kind='build', label='a',
+                          attributes={'run_on_projects': run_on_projects}),
         }, graph=Graph(nodes={'a'}, edges=set()))
         parameters = {'project': project}
         return 'a' in method(graph, parameters)
 
     def test_default_all(self):
         """run_on_projects=[all] includes release, integration, and other projects"""
         self.assertTrue(self.default_matches(['all'], 'mozilla-central'))
         self.assertTrue(self.default_matches(['all'], 'mozilla-inbound'))
@@ -63,18 +62,18 @@ class TestTargetTasks(unittest.TestCase)
     def test_default_nothing(self):
         """run_on_projects=[] includes nothing"""
         self.assertFalse(self.default_matches([], 'mozilla-central'))
         self.assertFalse(self.default_matches([], 'mozilla-inbound'))
         self.assertFalse(self.default_matches([], 'baobab'))
 
     def test_try_option_syntax(self):
         tasks = {
-            'a': Task(kind=None, label='a', attributes={}, task={}),
-            'b': Task(kind=None, label='b', attributes={'at-at': 'yep'}, task={}),
+            'a': TestTask(kind=None, label='a'),
+            'b': TestTask(kind=None, label='b', attributes={'at-at': 'yep'}),
         }
         graph = Graph(nodes=set('ab'), edges=set())
         tg = TaskGraph(tasks, graph)
         params = {'message': 'try me'}
 
         orig_TryOptionSyntax = try_option_syntax.TryOptionSyntax
         try:
             try_option_syntax.TryOptionSyntax = FakeTryOptionSyntax
new file mode 100644
--- /dev/null
+++ b/taskcluster/taskgraph/test/test_task_docker_image.py
@@ -0,0 +1,35 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import unittest
+import os
+
+from ..task import docker_image
+from mozunit import main
+
+
+KIND_PATH = os.path.join(docker_image.GECKO, 'taskcluster', 'ci', 'docker-image')
+
+
+class TestDockerImageKind(unittest.TestCase):
+
+    def setUp(self):
+        self.task = docker_image.DockerImageTask(
+            'docker-image',
+            KIND_PATH,
+            {},
+            {},
+            index_paths=[])
+
+    def test_get_task_dependencies(self):
+        # this one's easy!
+        self.assertEqual(self.task.get_dependencies(None), [])
+
+    # TODO: optimize_task
+
+
+if __name__ == '__main__':
+    main()
--- a/taskcluster/taskgraph/test/test_taskgraph.py
+++ b/taskcluster/taskgraph/test/test_taskgraph.py
@@ -2,77 +2,53 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import unittest
 
 from ..graph import Graph
-from ..task import Task
+from ..task.docker_image import DockerImageTask
+from ..task.transform import TransformTask
 from ..taskgraph import TaskGraph
 from mozunit import main
+from taskgraph.util.docker import INDEX_PREFIX
 
 
-class TestTaskGraph(unittest.TestCase):
-
-    maxDiff = None
-
-    def test_taskgraph_to_json(self):
-        tasks = {
-            'a': Task(kind='test', label='a',
-                      attributes={'attr': 'a-task'},
-                      task={'taskdef': True}),
-            'b': Task(kind='test', label='b',
-                      attributes={},
-                      task={'task': 'def'},
-                      optimizations=[['seta']],
-                      # note that this dep is ignored, superseded by that
-                      # from the taskgraph's edges
-                      dependencies={'first': 'a'}),
-        }
-        graph = Graph(nodes=set('ab'), edges={('a', 'b', 'edgelabel')})
-        taskgraph = TaskGraph(tasks, graph)
-
-        res = taskgraph.to_json()
+class TestTargetTasks(unittest.TestCase):
 
-        self.assertEqual(res, {
-            'a': {
-                'kind': 'test',
-                'label': 'a',
-                'attributes': {'attr': 'a-task', 'kind': 'test'},
-                'task': {'taskdef': True},
-                'dependencies': {'edgelabel': 'b'},
-                'optimizations': [],
-            },
-            'b': {
-                'kind': 'test',
-                'label': 'b',
-                'attributes': {'kind': 'test'},
-                'task': {'task': 'def'},
-                'dependencies': {},
-                'optimizations': [['seta']],
+    def test_from_json(self):
+        task = {
+            "routes": [],
+            "extra": {
+                "imageMeta": {
+                    "contextHash": "<hash>",
+                    "imageName": "<image>",
+                    "level": "1"
+                }
             }
-        })
-
-    def test_round_trip(self):
+        }
+        index_paths = ["{}.level-{}.<image>.hash.<hash>".format(INDEX_PREFIX, level)
+                       for level in reversed(range(1, 4))]
         graph = TaskGraph(tasks={
-            'a': Task(
+            'a': TransformTask(
                 kind='fancy',
-                label='a',
-                attributes={},
-                dependencies={'prereq': 'b'},  # must match edges, below
-                optimizations=[['seta']],
-                task={'task': 'def'}),
-            'b': Task(
-                kind='pre',
-                label='b',
-                attributes={},
-                dependencies={},
-                optimizations=[['seta']],
-                task={'task': 'def2'}),
-        }, graph=Graph(nodes={'a', 'b'}, edges={('a', 'b', 'prereq')}))
+                task={
+                    'label': 'a',
+                    'attributes': {},
+                    'dependencies': {},
+                    'when': {},
+                    'task': {'task': 'def'},
+                }),
+            'b': DockerImageTask(kind='docker-image',
+                                 label='b',
+                                 attributes={},
+                                 task=task,
+                                 index_paths=index_paths),
+        }, graph=Graph(nodes={'a', 'b'}, edges=set()))
 
         tasks, new_graph = TaskGraph.from_json(graph.to_json())
+        self.assertEqual(graph.tasks['a'], new_graph.tasks['a'])
         self.assertEqual(graph, new_graph)
 
 if __name__ == '__main__':
     main()
--- a/taskcluster/taskgraph/test/test_try_option_syntax.py
+++ b/taskcluster/taskgraph/test/test_try_option_syntax.py
@@ -5,35 +5,35 @@
 from __future__ import absolute_import, print_function, unicode_literals
 
 import unittest
 
 from ..try_option_syntax import TryOptionSyntax
 from ..try_option_syntax import RIDEALONG_BUILDS
 from ..graph import Graph
 from ..taskgraph import TaskGraph
-from ..task import Task
+from .util import TestTask
 from mozunit import main
 
 # an empty graph, for things that don't look at it
 empty_graph = TaskGraph({}, Graph(set(), set()))
 
 
 def unittest_task(n, tp):
-    return (n, Task('test', n, {
+    return (n, TestTask('test', n, {
         'unittest_try_name': n,
         'test_platform': tp,
-    }, {}))
+    }))
 
 
 def talos_task(n, tp):
-    return (n, Task('test', n, {
+    return (n, TestTask('test', n, {
         'talos_try_name': n,
         'test_platform': tp,
-    }, {}))
+    }))
 
 tasks = {k: v for k, v in [
     unittest_task('mochitest-browser-chrome', 'linux'),
     unittest_task('mochitest-browser-chrome-e10s', 'linux64'),
     unittest_task('mochitest-chrome', 'linux'),
     unittest_task('mochitest-webgl', 'linux'),
     unittest_task('crashtest-e10s', 'linux'),
     unittest_task('gtest', 'linux64'),
new file mode 100644
--- /dev/null
+++ b/taskcluster/taskgraph/test/util.py
@@ -0,0 +1,24 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from ..task import base
+
+
+class TestTask(base.Task):
+
+    def __init__(self, kind=None, label=None, attributes=None, task=None):
+        super(TestTask, self).__init__(
+                kind or 'test',
+                label or 'test-label',
+                attributes or {},
+                task or {})
+
+    @classmethod
+    def load_tasks(cls, kind, path, config, parameters):
+        return []
+
+    def get_dependencies(self, taskgraph):
+        return []
deleted file mode 100644
--- a/taskcluster/taskgraph/transforms/docker_image.py
+++ /dev/null
@@ -1,117 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-"""
-Transform the upload-symbols task description template,
-  taskcluster/ci/upload-symbols/job-template.yml
-into an actual task description.
-"""
-
-from __future__ import absolute_import, print_function, unicode_literals
-
-import os
-
-from taskgraph.transforms.base import TransformSequence
-from .. import GECKO
-from taskgraph.util.docker import (
-    docker_image,
-    generate_context_hash,
-    INDEX_PREFIX,
-)
-
-transforms = TransformSequence()
-
-ROUTE_TEMPLATES = [
-    'index.{index_prefix}.level-{level}.{image_name}.latest',
-    'index.{index_prefix}.level-{level}.{image_name}.pushdate.{year}.{month}-{day}-{pushtime}',
-    'index.{index_prefix}.level-{level}.{image_name}.hash.{context_hash}',
-]
-
-
-@transforms.add
-def fill_template(config, tasks):
-    for task in tasks:
-        image_name = task.pop('name')
-        job_symbol = task.pop('symbol')
-
-        context_path = os.path.join('taskcluster', 'docker', image_name)
-        context_hash = generate_context_hash(GECKO, context_path, image_name)
-
-        description = 'Build the docker image {} for use by dependent tasks'.format(
-            image_name)
-
-        routes = []
-        for tpl in ROUTE_TEMPLATES:
-            routes.append(tpl.format(
-                index_prefix=INDEX_PREFIX,
-                level=config.params['level'],
-                image_name=image_name,
-                project=config.params['project'],
-                head_rev=config.params['head_rev'],
-                pushlog_id=config.params.get('pushlog_id', 0),
-                pushtime=config.params['moz_build_date'][8:],
-                year=config.params['moz_build_date'][0:4],
-                month=config.params['moz_build_date'][4:6],
-                day=config.params['moz_build_date'][6:8],
-                context_hash=context_hash,
-            ))
-
-        # As an optimization, if the context hash exists for a high level, that image
-        # task ID will be used.  The reasoning behind this is that eventually everything ends
-        # up on level 3 at some point if most tasks use this as a common image
-        # for a given context hash, a worker within Taskcluster does not need to contain
-        # the same image per branch.
-        optimizations = [['index-search', '{}.level-{}.{}.hash.{}'.format(
-            INDEX_PREFIX, level, image_name, context_hash)]
-            for level in reversed(range(int(config.params['level']), 4))]
-
-        # include some information that is useful in reconstructing this task
-        # from JSON
-        taskdesc = {
-            'label': 'build-docker-image-' + image_name,
-            'description': description,
-            'attributes': {'image_name': image_name},
-            'expires-after': '1 year',
-            'routes': routes,
-            'optimizations': optimizations,
-            'scopes': ['secrets:get:project/taskcluster/gecko/hgfingerprint'],
-            'treeherder': {
-                'symbol': job_symbol,
-                'platform': 'taskcluster-images/opt',
-                'kind': 'other',
-                'tier': 1,
-            },
-            'run-on-projects': [],
-            'worker-type': 'aws-provisioner-v1/gecko-images',
-            # can't use {in-tree: ..} here, otherwise we might try to build
-            # this image..
-            'worker': {
-                'implementation': 'docker-worker',
-                'docker-image': docker_image('image_builder'),
-                'caches': [{
-                    'type': 'persistent',
-                    'name': 'level-{}-imagebuilder-v1'.format(config.params['level']),
-                    'mount-point': '/home/worker/checkouts',
-                }],
-                'artifacts': [{
-                    'type': 'file',
-                    'path': '/home/worker/workspace/artifacts/image.tar.zst',
-                    'name': 'public/image.tar.zst',
-                }],
-                'env': {
-                    'HG_STORE_PATH': '/home/worker/checkouts/hg-store',
-                    'HASH': context_hash,
-                    'PROJECT': config.params['project'],
-                    'IMAGE_NAME': image_name,
-                    'GECKO_BASE_REPOSITORY': config.params['base_repository'],
-                    'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
-                    'GECKO_HEAD_REV': config.params['head_rev'],
-                },
-                'chain-of-trust': True,
-                'docker-in-docker': True,
-                'taskcluster-proxy': True,
-                'max-run-time': 3600,
-            },
-        }
-
-        yield taskdesc
--- a/taskcluster/taskgraph/transforms/job/__init__.py
+++ b/taskcluster/taskgraph/transforms/job/__init__.py
@@ -53,28 +53,18 @@ job_description_schema = Schema({
     Optional('routes'): task_description_schema['routes'],
     Optional('scopes'): task_description_schema['scopes'],
     Optional('tags'): task_description_schema['tags'],
     Optional('extra'): task_description_schema['extra'],
     Optional('treeherder'): task_description_schema['treeherder'],
     Optional('index'): task_description_schema['index'],
     Optional('run-on-projects'): task_description_schema['run-on-projects'],
     Optional('coalesce-name'): task_description_schema['coalesce-name'],
-    Optional('optimizations'): task_description_schema['optimizations'],
     Optional('needs-sccache'): task_description_schema['needs-sccache'],
-
-    # The "when" section contains descriptions of the circumstances
-    # under which this task should be included in the task graph.  This
-    # will be converted into an element in the `optimizations` list.
-    Optional('when'): Any({
-        # This task only needs to be run if a file matching one of the given
-        # patterns has changed in the push.  The patterns use the mozpack
-        # match function (python/mozbuild/mozpack/path.py).
-        Optional('files-changed'): [basestring],
-    }),
+    Optional('when'): task_description_schema['when'],
 
     # A description of how to run this job.
     'run': {
         # The key to a job implementation in a peer module to this one
         'using': basestring,
 
         # Any remaining content is verified against that job implementation's
         # own schema.
@@ -127,40 +117,16 @@ def handle_keyed_by(config, jobs):
 
     for job in jobs:
         for field in fields:
             resolve_keyed_by(job, field, item_name=job['name'])
         yield job
 
 
 @transforms.add
-def rewrite_when_to_optimization(config, jobs):
-    for job in jobs:
-        when = job.pop('when', {})
-        files_changed = when.get('files-changed')
-        if not files_changed:
-            yield job
-            continue
-
-        # add some common files
-        files_changed.extend([
-            '{}/**'.format(config.path),
-            'taskcluster/taskgraph/**',
-        ])
-        if 'in-tree' in job['worker'].get('docker-image', {}):
-            files_changed.append('taskcluster/docker/{}/**'.format(
-                job['worker']['docker-image']['in-tree']))
-
-        job.setdefault('optimizations', []).append(['files-changed', files_changed])
-
-        assert 'when' not in job
-        yield job
-
-
-@transforms.add
 def make_task_description(config, jobs):
     """Given a build description, create a task description"""
     # import plugin modules first, before iterating over jobs
     import_all()
     for job in jobs:
         if 'label' not in job:
             if 'name' not in job:
                 raise Exception("job has neither a name nor a label")
--- a/taskcluster/taskgraph/transforms/job/toolchain.py
+++ b/taskcluster/taskgraph/transforms/job/toolchain.py
@@ -36,36 +36,36 @@ toolchain_run_schema = Schema({
     ),
 
     # Paths/patterns pointing to files that influence the outcome of a
     # toolchain build.
     Optional('resources'): [basestring],
 })
 
 
-def add_optimizations(config, run, taskdesc):
+def add_index_paths(config, run, taskdesc):
     files = list(run.get('resources', []))
     # This file
     files.append('taskcluster/taskgraph/transforms/job/toolchain.py')
     # The script
     files.append('taskcluster/scripts/misc/{}'.format(run['script']))
 
     label = taskdesc['label']
     subs = {
         'name': label.replace('toolchain-', '').split('/')[0],
         'digest': hash_paths(GECKO, files),
     }
 
-    optimizations = taskdesc.setdefault('optimizations', [])
+    index_paths = taskdesc.setdefault('index-paths', [])
 
     # We'll try to find a cached version of the toolchain at levels above
     # and including the current level, starting at the highest level.
     for level in reversed(range(int(config.params['level']), 4)):
         subs['level'] = level
-        optimizations.append(['index-search', TOOLCHAIN_INDEX.format(**subs)])
+        index_paths.append(TOOLCHAIN_INDEX.format(**subs))
 
     # ... and cache at the lowest level.
     taskdesc.setdefault('routes', []).append(
         'index.{}'.format(TOOLCHAIN_INDEX.format(**subs)))
 
 
 @run_job_using("docker-worker", "toolchain-script", schema=toolchain_run_schema)
 def docker_worker_toolchain(config, job, taskdesc):
@@ -119,17 +119,17 @@ def docker_worker_toolchain(config, job,
         '--',
         'bash',
         '-c',
         'cd /home/worker && '
         './workspace/build/src/taskcluster/scripts/misc/{}'.format(
             run['script'])
     ]
 
-    add_optimizations(config, run, taskdesc)
+    add_index_paths(config, run, taskdesc)
 
 
 @run_job_using("generic-worker", "toolchain-script", schema=toolchain_run_schema)
 def windows_toolchain(config, job, taskdesc):
     run = job['run']
 
     worker = taskdesc['worker']
 
@@ -168,9 +168,9 @@ def windows_toolchain(config, job, taskd
 
     bash = r'c:\mozilla-build\msys\bin\bash'
     worker['command'] = [
         ' '.join(hg_command),
         # do something intelligent.
         r'{} -c ./build/src/taskcluster/scripts/misc/{}'.format(bash, run['script'])
     ]
 
-    add_optimizations(config, run, taskdesc)
+    add_index_paths(config, run, taskdesc)
--- a/taskcluster/taskgraph/transforms/task.py
+++ b/taskcluster/taskgraph/transforms/task.py
@@ -47,16 +47,20 @@ task_description_schema = Schema({
     # (e.g., "14 days").  Defaults are set based on the project.
     Optional('expires-after'): basestring,
     Optional('deadline-after'): basestring,
 
     # custom routes for this task; the default treeherder routes will be added
     # automatically
     Optional('routes'): [basestring],
 
+    # The index paths where this task may be cached. Transforms are expected to
+    # fill these automatically when wanted.
+    Optional('index-paths'): [basestring],
+
     # custom scopes for this task; any scopes required for the worker will be
     # added automatically
     Optional('scopes'): [basestring],
 
     # Tags
     Optional('tags'): {basestring: object},
 
     # custom "task.extra" content
@@ -123,28 +127,16 @@ task_description_schema = Schema({
     # See the attributes documentation for details.
     Optional('run-on-projects'): [basestring],
 
     # If the task can be coalesced, this is the name used in the coalesce key
     # the project, etc. will be added automatically.  Note that try (level 1)
     # tasks are never coalesced
     Optional('coalesce-name'): basestring,
 
-    # Optimizations to perform on this task during the optimization phase,
-    # specified in order.  These optimizations are defined in
-    # taskcluster/taskgraph/optimize.py.
-    Optional('optimizations'): [Any(
-        # search the index for the given index namespace, and replace this task if found
-        ['index-search', basestring],
-        # consult SETA and skip this task if it is low-value
-        ['seta'],
-        # skip this task if none of the given file patterns match
-        ['files-changed', [basestring]],
-    )],
-
     # the provisioner-id/worker-type for the task.  The following parameters will
     # be substituted in this string:
     #  {level} -- the scm level of this push
     'worker-type': basestring,
 
     # Whether the job should use sccache compiler caching.
     Required('needs-sccache', default=False): bool,
 
@@ -165,17 +157,16 @@ task_description_schema = Schema({
 
         # worker features that should be enabled
         Required('relengapi-proxy', default=False): bool,
         Required('chain-of-trust', default=False): bool,
         Required('taskcluster-proxy', default=False): bool,
         Required('allow-ptrace', default=False): bool,
         Required('loopback-video', default=False): bool,
         Required('loopback-audio', default=False): bool,
-        Required('docker-in-docker', default=False): bool,  # (aka 'dind')
 
         # caches to set up for the task
         Optional('caches'): [{
             # only one type is supported by any of the workers right now
             'type': 'persistent',
 
             # name of the cache, allowing re-use by subsequent tasks naming the
             # same cache
@@ -340,16 +331,26 @@ task_description_schema = Schema({
 
             # type of signing task (for CoT)
             Required('taskType'): basestring,
 
             # Paths to the artifacts to sign
             Required('paths'): [basestring],
         }],
     }),
+
+    # The "when" section contains descriptions of the circumstances
+    # under which this task can be "optimized", that is, left out of the
+    # task graph because it is unnecessary.
+    Optional('when'): Any({
+        # This task only needs to be run if a file matching one of the given
+        # patterns has changed in the push.  The patterns use the mozpack
+        # match function (python/mozbuild/mozpack/path.py).
+        Optional('files-changed'): [basestring],
+    }),
 })
 
 GROUP_NAMES = {
     'py': 'Python unit tests',
     'tc': 'Executed by TaskCluster',
     'tc-e10s': 'Executed by TaskCluster with e10s',
     'tc-Fxfn-l': 'Firefox functional tests (local) executed by TaskCluster',
     'tc-Fxfn-l-e10s': 'Firefox functional tests (local) executed by TaskCluster with e10s',
@@ -369,17 +370,16 @@ GROUP_NAMES = {
     'tc-X-e10s': 'Xpcshell tests executed by TaskCluster with e10s',
     'tc-L10n': 'Localised Repacks executed by Taskcluster',
     'tc-BM-L10n': 'Beetmover for locales executed by Taskcluster',
     'tc-Up': 'Balrog submission of updates, executed by Taskcluster',
     'tc-cs': 'Checksum signing executed by Taskcluster',
     'tc-BMcs': 'Beetmover checksums, executed by Taskcluster',
     'Aries': 'Aries Device Image',
     'Nexus 5-L': 'Nexus 5-L Device Image',
-    'I': 'Docker Image Builds',
     'TL': 'Toolchain builds for Linux 64-bits',
     'TM': 'Toolchain builds for OSX',
     'TW32': 'Toolchain builds for Windows 32-bits',
     'TW64': 'Toolchain builds for Windows 64-bits',
     'SM-tc': 'Spidermonkey builds',
 }
 UNKNOWN_GROUP_NAME = "Treeherder group {} has no name; add it to " + __file__
 
@@ -455,19 +455,16 @@ def build_docker_worker_payload(config, 
 
     if worker.get('allow-ptrace'):
         features['allowPtrace'] = True
         task_def['scopes'].append('docker-worker:feature:allowPtrace')
 
     if worker.get('chain-of-trust'):
         features['chainOfTrust'] = True
 
-    if worker.get('docker-in-docker'):
-        features['dind'] = True
-
     if task.get('needs-sccache'):
         features['taskclusterProxy'] = True
         task_def['scopes'].append(
             'assume:project:taskcluster:level-{level}-sccache-buckets'.format(
                 level=config.params['level'])
         )
         worker['env']['USE_SCCACHE'] = '1'
     else:
@@ -478,21 +475,20 @@ def build_docker_worker_payload(config, 
     for lo in 'audio', 'video':
         if worker.get('loopback-' + lo):
             capitalized = 'loopback' + lo.capitalize()
             devices = capabilities.setdefault('devices', {})
             devices[capitalized] = True
             task_def['scopes'].append('docker-worker:capability:device:' + capitalized)
 
     task_def['payload'] = payload = {
+        'command': worker['command'],
         'image': image,
         'env': worker['env'],
     }
-    if 'command' in worker:
-        payload['command'] = worker['command']
 
     if 'max-run-time' in worker:
         payload['maxRunTime'] = worker['max-run-time']
 
     if 'retry-exit-status' in worker:
         payload['onExitStatus'] = {'retry': [worker['retry-exit-status']]}
 
     if 'artifacts' in worker:
@@ -758,16 +754,35 @@ def add_index_routes(config, tasks):
         else:
             extra_index['rank'] = rank
 
         del task['index']
         yield task
 
 
 @transforms.add
+def add_files_changed(config, tasks):
+    for task in tasks:
+        if 'files-changed' not in task.get('when', {}):
+            yield task
+            continue
+
+        task['when']['files-changed'].extend([
+            '{}/**'.format(config.path),
+            'taskcluster/taskgraph/**',
+        ])
+
+        if 'in-tree' in task['worker'].get('docker-image', {}):
+            task['when']['files-changed'].append('taskcluster/docker/{}/**'.format(
+                task['worker']['docker-image']['in-tree']))
+
+        yield task
+
+
+@transforms.add
 def build_task(config, tasks):
     for task in tasks:
         worker_type = task['worker-type'].format(level=str(config.params['level']))
         provisioner_id, worker_type = worker_type.split('/', 1)
 
         routes = task.get('routes', [])
         scopes = task.get('scopes', [])
 
@@ -850,17 +865,18 @@ def build_task(config, tasks):
         attributes = task.get('attributes', {})
         attributes['run_on_projects'] = task.get('run-on-projects', ['all'])
 
         yield {
             'label': task['label'],
             'task': task_def,
             'dependencies': task.get('dependencies', {}),
             'attributes': attributes,
-            'optimizations': task.get('optimizations', []),
+            'index-paths': task.get('index-paths'),
+            'when': task.get('when', {}),
         }
 
 
 # Check that the v2 route templates match those used by Mozharness.  This can
 # go away once Mozharness builds are no longer performed in Buildbot, and the
 # Mozharness code referencing routes.json is deleted.
 def check_v2_routes():
     with open("testing/mozharness/configs/routes.json", "rb") as f:
--- a/taskcluster/taskgraph/transforms/tests.py
+++ b/taskcluster/taskgraph/transforms/tests.py
@@ -641,17 +641,16 @@ def make_job_description(config, tests):
         jobdesc['when'] = test.get('when', {})
         jobdesc['attributes'] = attributes
         jobdesc['dependencies'] = {'build': build_label}
         jobdesc['expires-after'] = test['expires-after']
         jobdesc['routes'] = []
         jobdesc['run-on-projects'] = test.get('run-on-projects', ['all'])
         jobdesc['scopes'] = []
         jobdesc['tags'] = test.get('tags', {})
-        jobdesc['optimizations'] = [['seta']]  # always run SETA for tests
         jobdesc['extra'] = {
             'chunks': {
                 'current': test['this-chunk'],
                 'total': test['chunks'],
             },
             'suite': {
                 'name': suite,
                 'flavor': flavor,