Bug 1420449: Skip schema validation with --fast r=dustin,ahal
☠☠ backed out by 5950f275b192 ☠ ☠
authorChris AtLee <catlee@mozilla.com>
Wed, 03 Jan 2018 10:27:16 -0500
changeset 397667 7a47accb11c59245d427149ab0fe251c2977d782
parent 397666 cc9caa712156f8c278c34cf8844af0bf17cd8150
child 397668 ad03ffad73fe90c83089cfe5e14e782ebe8297da
push id57487
push usercsabou@mozilla.com
push dateWed, 03 Jan 2018 22:00:20 +0000
treeherderautoland@4f37a0996fc6 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersdustin, ahal
bugs1420449
milestone59.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1420449: Skip schema validation with --fast r=dustin,ahal
taskcluster/taskgraph/__init__.py
taskcluster/taskgraph/generator.py
taskcluster/taskgraph/transforms/balrog.py
taskcluster/taskgraph/transforms/beetmover.py
taskcluster/taskgraph/transforms/beetmover_cdns.py
taskcluster/taskgraph/transforms/beetmover_checksums.py
taskcluster/taskgraph/transforms/beetmover_repackage.py
taskcluster/taskgraph/transforms/checksums_signing.py
taskcluster/taskgraph/transforms/job/__init__.py
taskcluster/taskgraph/transforms/job/mach.py
taskcluster/taskgraph/transforms/job/mozharness.py
taskcluster/taskgraph/transforms/job/run_task.py
taskcluster/taskgraph/transforms/job/toolchain.py
taskcluster/taskgraph/transforms/l10n.py
taskcluster/taskgraph/transforms/repackage.py
taskcluster/taskgraph/transforms/repackage_routes.py
taskcluster/taskgraph/transforms/repackage_signing.py
taskcluster/taskgraph/transforms/signing.py
taskcluster/taskgraph/transforms/source_test.py
taskcluster/taskgraph/transforms/task.py
taskcluster/taskgraph/transforms/tests.py
taskcluster/taskgraph/util/push_apk.py
taskcluster/taskgraph/util/schema.py
--- a/taskcluster/taskgraph/__init__.py
+++ b/taskcluster/taskgraph/__init__.py
@@ -11,10 +11,10 @@ GECKO = os.path.realpath(os.path.join(__
 # Maximum number of dependencies a single task can have
 # https://docs.taskcluster.net/reference/platform/taskcluster-queue/references/api#createTask
 # specifies 100, but we also optionally add the decision task id as a dep in
 # taskgraph.create, so let's set this to 99.
 MAX_DEPENDENCIES = 99
 
 # Enable fast task generation for local debugging
 # This is normally switched on via the --fast/-F flag to `mach taskgraph`
-# Currently this skips toolchain task optimizations
+# Currently this skips toolchain task optimizations and schema validation
 fast = False
--- a/taskcluster/taskgraph/generator.py
+++ b/taskcluster/taskgraph/generator.py
@@ -91,17 +91,18 @@ def load_graph_config(root_dir):
     config_yml = os.path.join(root_dir, "config.yml")
     if not os.path.exists(config_yml):
         raise Exception("Couldn't find taskgraph configuration: {}".format(config_yml))
 
     logger.debug("loading config from `{}`".format(config_yml))
     with open(config_yml) as f:
         config = yaml.load(f)
 
-    return validate_graph_config(config)
+    validate_graph_config(config)
+    return config
 
 
 class TaskGraphGenerator(object):
     """
     The central controller for taskgraph.  This handles all phases of graph
     generation.  The task is generated from all of the kinds defined in
     subdirectories of the generator's root directory.
 
--- a/taskcluster/taskgraph/transforms/balrog.py
+++ b/taskcluster/taskgraph/transforms/balrog.py
@@ -48,19 +48,20 @@ balrog_description_schema = Schema({
     Optional('notifications'): task_description_schema['notifications'],
 })
 
 
 @transforms.add
 def validate(config, jobs):
     for job in jobs:
         label = job.get('dependent-task', object).__dict__.get('label', '?no-label?')
-        yield validate_schema(
+        validate_schema(
             balrog_description_schema, job,
             "In balrog ({!r} kind) task for {!r}:".format(config.kind, label))
+        yield job
 
 
 @transforms.add
 def make_task_description(config, jobs):
     for job in jobs:
         dep_job = job['dependent-task']
 
         treeherder = job.get('treeherder', {})
--- a/taskcluster/taskgraph/transforms/beetmover.py
+++ b/taskcluster/taskgraph/transforms/beetmover.py
@@ -335,19 +335,20 @@ beetmover_description_schema = Schema({
     Optional('shipping-product'): task_description_schema['shipping-product'],
 })
 
 
 @transforms.add
 def validate(config, jobs):
     for job in jobs:
         label = job.get('dependent-task', object).__dict__.get('label', '?no-label?')
-        yield validate_schema(
+        validate_schema(
             beetmover_description_schema, job,
             "In beetmover ({!r} kind) task for {!r}:".format(config.kind, label))
+        yield job
 
 
 @transforms.add
 def make_task_description(config, jobs):
     for job in jobs:
         dep_job = job['dependent-task']
         attributes = dep_job.attributes
 
--- a/taskcluster/taskgraph/transforms/beetmover_cdns.py
+++ b/taskcluster/taskgraph/transforms/beetmover_cdns.py
@@ -46,19 +46,20 @@ beetmover_cdns_description_schema = Sche
     Optional('notifications'): task_description_schema['notifications'],
 })
 
 
 @transforms.add
 def validate(config, jobs):
     for job in jobs:
         label = job['name']
-        yield validate_schema(
+        validate_schema(
             beetmover_cdns_description_schema, job,
             "In cdns-signing ({!r} kind) task for {!r}:".format(config.kind, label))
+        yield job
 
 
 @transforms.add
 def make_beetmover_cdns_description(config, jobs):
     for job in jobs:
         treeherder = job.get('treeherder', {})
         treeherder.setdefault('symbol', 'Rel(BM-C)')
         treeherder.setdefault('tier', 1)
--- a/taskcluster/taskgraph/transforms/beetmover_checksums.py
+++ b/taskcluster/taskgraph/transforms/beetmover_checksums.py
@@ -36,19 +36,20 @@ beetmover_checksums_description_schema =
     Optional('notifications'): task_description_schema['notifications'],
 })
 
 
 @transforms.add
 def validate(config, jobs):
     for job in jobs:
         label = job.get('dependent-task', object).__dict__.get('label', '?no-label?')
-        yield validate_schema(
+        validate_schema(
             beetmover_checksums_description_schema, job,
             "In checksums-signing ({!r} kind) task for {!r}:".format(config.kind, label))
+        yield job
 
 
 @transforms.add
 def make_beetmover_checksums_description(config, jobs):
     for job in jobs:
         dep_job = job['dependent-task']
         attributes = dep_job.attributes
 
--- a/taskcluster/taskgraph/transforms/beetmover_repackage.py
+++ b/taskcluster/taskgraph/transforms/beetmover_repackage.py
@@ -157,19 +157,20 @@ beetmover_description_schema = Schema({
     Optional('shipping-product'): task_description_schema['shipping-product'],
 })
 
 
 @transforms.add
 def validate(config, jobs):
     for job in jobs:
         label = job.get('dependent-task', object).__dict__.get('label', '?no-label?')
-        yield validate_schema(
+        validate_schema(
             beetmover_description_schema, job,
             "In beetmover ({!r} kind) task for {!r}:".format(config.kind, label))
+        yield job
 
 
 @transforms.add
 def make_task_description(config, jobs):
     for job in jobs:
         dep_job = job['dependent-task']
         attributes = dep_job.attributes
 
--- a/taskcluster/taskgraph/transforms/checksums_signing.py
+++ b/taskcluster/taskgraph/transforms/checksums_signing.py
@@ -33,19 +33,20 @@ checksums_signing_description_schema = S
     Optional('shipping-phase'): task_description_schema['shipping-phase'],
 })
 
 
 @transforms.add
 def validate(config, jobs):
     for job in jobs:
         label = job.get('dependent-task', object).__dict__.get('label', '?no-label?')
-        yield validate_schema(
+        validate_schema(
             checksums_signing_description_schema, job,
             "In checksums-signing ({!r} kind) task for {!r}:".format(config.kind, label))
+        yield job
 
 
 @transforms.add
 def make_checksums_signing_description(config, jobs):
     for job in jobs:
         dep_job = job['dependent-task']
         attributes = dep_job.attributes
 
--- a/taskcluster/taskgraph/transforms/job/__init__.py
+++ b/taskcluster/taskgraph/transforms/job/__init__.py
@@ -97,18 +97,19 @@ job_description_schema = Schema({
 })
 
 transforms = TransformSequence()
 
 
 @transforms.add
 def validate(config, jobs):
     for job in jobs:
-        yield validate_schema(job_description_schema, job,
-                              "In job {!r}:".format(job.get('name', job.get('label'))))
+        validate_schema(job_description_schema, job,
+                        "In job {!r}:".format(job.get('name', job.get('label'))))
+        yield job
 
 
 @transforms.add
 def rewrite_when_to_optimization(config, jobs):
     for job in jobs:
         when = job.pop('when', {})
         if not when:
             yield job
@@ -170,30 +171,30 @@ def make_task_description(config, jobs):
         # yield only the task description, discarding the job description
         yield taskdesc
 
 
 # A registry of all functions decorated with run_job_using
 registry = {}
 
 
-def run_job_using(worker_implementation, run_using, schema=None):
+def run_job_using(worker_implementation, run_using, schema=None, defaults={}):
     """Register the decorated function as able to set up a task description for
     jobs with the given worker implementation and `run.using` property.  If
     `schema` is given, the job's run field will be verified to match it.
 
     The decorated function should have the signature `using_foo(config, job,
     taskdesc) and should modify the task description in-place.  The skeleton of
     the task description is already set up, but without a payload."""
     def wrap(func):
         for_run_using = registry.setdefault(run_using, {})
         if worker_implementation in for_run_using:
             raise Exception("run_job_using({!r}, {!r}) already exists: {!r}".format(
                 run_using, worker_implementation, for_run_using[run_using]))
-        for_run_using[worker_implementation] = (func, schema)
+        for_run_using[worker_implementation] = (func, schema, defaults)
         return func
     return wrap
 
 
 def configure_taskdesc_for_run(config, job, taskdesc, worker_implementation):
     """
     Run the appropriate function for this job against the given task
     description.
@@ -204,23 +205,25 @@ def configure_taskdesc_for_run(config, j
     run_using = job['run']['using']
     if run_using not in registry:
         raise Exception("no functions for run.using {!r}".format(run_using))
 
     if worker_implementation not in registry[run_using]:
         raise Exception("no functions for run.using {!r} on {!r}".format(
             run_using, worker_implementation))
 
-    func, schema = registry[run_using][worker_implementation]
+    func, schema, defaults = registry[run_using][worker_implementation]
+    for k, v in defaults.items():
+        job['run'].setdefault(k, v)
+
     if schema:
-        job['run'] = validate_schema(
+        validate_schema(
                 schema, job['run'],
-                "In job.run using {!r} for job {!r}:".format(
-                    job['run']['using'], job['label']))
-
+                "In job.run using {!r}/{!r} for job {!r}:".format(
+                    job['run']['using'], worker_implementation, job['label']))
     func(config, job, taskdesc)
 
 
 def import_all():
     """Import all modules that are siblings of this one, triggering the decorator
     above in the process."""
     for f in os.listdir(os.path.dirname(__file__)):
         if f.endswith('.py') and f not in ('commmon.py', '__init__.py'):
--- a/taskcluster/taskgraph/transforms/job/mach.py
+++ b/taskcluster/taskgraph/transforms/job/mach.py
@@ -14,22 +14,22 @@ from voluptuous import Required
 mach_schema = Schema({
     Required('using'): 'mach',
 
     # The mach command (omitting `./mach`) to run
     Required('mach'): basestring,
 
     # if true, perform a checkout of a comm-central based branch inside the
     # gecko checkout
-    Required('comm-checkout', default=False): bool,
+    Required('comm-checkout'): bool,
 })
 
 
-@run_job_using("docker-worker", "mach", schema=mach_schema)
-@run_job_using("native-engine", "mach", schema=mach_schema)
+@run_job_using("docker-worker", "mach", schema=mach_schema, defaults={'comm-checkout': False})
+@run_job_using("native-engine", "mach", schema=mach_schema, defaults={'comm-checkout': False})
 def docker_worker_mach(config, job, taskdesc):
     run = job['run']
 
     # defer to the run_task implementation
     run['command'] = 'cd /builds/worker/checkouts/gecko && ./mach ' + run['mach']
     run['using'] = 'run-task'
     del run['mach']
     configure_taskdesc_for_run(config, job, taskdesc, job['worker']['implementation'])
--- a/taskcluster/taskgraph/transforms/job/mozharness.py
+++ b/taskcluster/taskgraph/transforms/job/mozharness.py
@@ -57,60 +57,74 @@ mozharness_run_schema = Schema({
     Optional('extra-config'): dict,
 
     # Extra metadata to use toward the workspace caching.
     # Only supported on docker-worker
     Optional('extra-workspace-cache-key'): basestring,
 
     # If not false, tooltool downloads will be enabled via relengAPIProxy
     # for either just public files, or all files.  Not supported on Windows
-    Required('tooltool-downloads', default=False): Any(
+    Required('tooltool-downloads'): Any(
         False,
         'public',
         'internal',
     ),
 
     # The set of secret names to which the task has access; these are prefixed
     # with `project/releng/gecko/{treeherder.kind}/level-{level}/`.  Setting
     # this will enable any worker features required and set the task's scopes
     # appropriately.  `true` here means ['*'], all secrets.  Not supported on
     # Windows
-    Required('secrets', default=False): Any(bool, [basestring]),
+    Required('secrets'): Any(bool, [basestring]),
 
     # If true, taskcluster proxy will be enabled; note that it may also be enabled
     # automatically e.g., for secrets support.  Not supported on Windows.
-    Required('taskcluster-proxy', default=False): bool,
+    Required('taskcluster-proxy'): bool,
 
     # If true, the build scripts will start Xvfb.  Not supported on Windows.
-    Required('need-xvfb', default=False): bool,
+    Required('need-xvfb'): bool,
 
     # If false, indicate that builds should skip producing artifacts.  Not
     # supported on Windows.
-    Required('keep-artifacts', default=True): bool,
+    Required('keep-artifacts'): bool,
 
     # If specified, use the in-tree job script specified.
     Optional('job-script'): basestring,
 
-    Required('requires-signed-builds', default=False): bool,
+    Required('requires-signed-builds'): bool,
 
     # If false, don't set MOZ_SIMPLE_PACKAGE_NAME
     # Only disableable on windows
-    Required('use-simple-package', default=True): bool,
+    Required('use-simple-package'): bool,
 
     # If false don't pass --branch or --skip-buildbot-actions to mozharness script
     # Only disableable on windows
-    Required('use-magic-mh-args', default=True): bool,
+    Required('use-magic-mh-args'): bool,
 
     # if true, perform a checkout of a comm-central based branch inside the
     # gecko checkout
-    Required('comm-checkout', default=False): bool,
+    Required('comm-checkout'): bool,
 })
 
 
-@run_job_using("docker-worker", "mozharness", schema=mozharness_run_schema)
+mozharness_defaults = {
+    'tooltool-downloads': False,
+    'secrets': False,
+    'taskcluster-proxy': False,
+    'need-xvfb': False,
+    'keep-artifacts': True,
+    'requires-signed-builds': False,
+    'use-simple-package': True,
+    'use-magic-mh-args': True,
+    'comm-checkout': False,
+}
+
+
+@run_job_using("docker-worker", "mozharness", schema=mozharness_run_schema,
+               defaults=mozharness_defaults)
 def mozharness_on_docker_worker_setup(config, job, taskdesc):
     run = job['run']
 
     worker = taskdesc['worker']
     worker['implementation'] = job['worker']['implementation']
 
     if not run['use-simple-package']:
         raise NotImplementedError("Simple packaging cannot be disabled via"
@@ -198,17 +212,18 @@ def mozharness_on_docker_worker_setup(co
         '/builds/worker/workspace/build/src/{}'.format(
             run.get('job-script', 'taskcluster/scripts/builder/build-linux.sh')
         ),
     ]
 
     worker['command'] = command
 
 
-@run_job_using("generic-worker", "mozharness", schema=mozharness_run_schema)
+@run_job_using("generic-worker", "mozharness", schema=mozharness_run_schema,
+               defaults=mozharness_defaults)
 def mozharness_on_generic_worker(config, job, taskdesc):
     assert job['worker']['os'] == 'windows', 'only supports windows right now'
 
     run = job['run']
 
     # fail if invalid run options are included
     invalid = []
     for prop in ['tooltool-downloads',
@@ -331,17 +346,18 @@ def mozharness_on_generic_worker(config,
         ])
 
     worker['command'].extend(hg_commands)
     worker['command'].extend([
         ' '.join(mh_command)
     ])
 
 
-@run_job_using('buildbot-bridge', 'mozharness', schema=mozharness_run_schema)
+@run_job_using('buildbot-bridge', 'mozharness', schema=mozharness_run_schema,
+               defaults=mozharness_defaults)
 def mozharness_on_buildbot_bridge(config, job, taskdesc):
     run = job['run']
     worker = taskdesc['worker']
     branch = config.params['project']
     product = run.get('index', {}).get('product', 'firefox')
 
     worker.pop('env', None)
 
--- a/taskcluster/taskgraph/transforms/job/run_task.py
+++ b/taskcluster/taskgraph/transforms/job/run_task.py
@@ -12,28 +12,28 @@ from taskgraph.util.schema import Schema
 from taskgraph.transforms.job.common import support_vcs_checkout
 from voluptuous import Required, Any
 
 run_task_schema = Schema({
     Required('using'): 'run-task',
 
     # if true, add a cache at ~worker/.cache, which is where things like pip
     # tend to hide their caches.  This cache is never added for level-1 jobs.
-    Required('cache-dotcache', default=False): bool,
+    Required('cache-dotcache'): bool,
 
     # if true (the default), perform a checkout in /builds/worker/checkouts/gecko
-    Required('checkout', default=True): bool,
+    Required('checkout'): bool,
 
     # The sparse checkout profile to use. Value is the filename relative to the
     # directory where sparse profiles are defined (build/sparse-profiles/).
-    Required('sparse-profile', default=None): basestring,
+    Required('sparse-profile'): Any(basestring, None),
 
     # if true, perform a checkout of a comm-central based branch inside the
     # gecko checkout
-    Required('comm-checkout', default=False): bool,
+    Required('comm-checkout'): bool,
 
     # The command arguments to pass to the `run-task` script, after the
     # checkout arguments.  If a list, it will be passed directly; otherwise
     # it will be included in a single argument to `bash -cx`.
     Required('command'): Any([basestring], basestring),
 })
 
 
@@ -52,17 +52,25 @@ def add_checkout_to_command(run, command
 
     command.append('--vcs-checkout=/builds/worker/checkouts/gecko')
 
     if run['sparse-profile']:
         command.append('--sparse-profile=build/sparse-profiles/%s' %
                        run['sparse-profile'])
 
 
-@run_job_using("docker-worker", "run-task", schema=run_task_schema)
+docker_defaults = {
+    'cache-dotcache': False,
+    'checkout': True,
+    'comm-checkout': False,
+    'sparse-profile': None,
+}
+
+
+@run_job_using("docker-worker", "run-task", schema=run_task_schema, defaults=docker_defaults)
 def docker_worker_run_task(config, job, taskdesc):
     run = job['run']
     worker = taskdesc['worker'] = job['worker']
     common_setup(config, job, taskdesc)
 
     if run.get('cache-dotcache'):
         worker['caches'].append({
             'type': 'persistent',
--- a/taskcluster/taskgraph/transforms/job/toolchain.py
+++ b/taskcluster/taskgraph/transforms/job/toolchain.py
@@ -35,28 +35,28 @@ toolchain_run_schema = Schema({
     # are available.
     Required('script'): basestring,
 
     # Arguments to pass to the script.
     Optional('arguments'): [basestring],
 
     # If not false, tooltool downloads will be enabled via relengAPIProxy
     # for either just public files, or all files.  Not supported on Windows
-    Required('tooltool-downloads', default=False): Any(
+    Required('tooltool-downloads'): Any(
         False,
         'public',
         'internal',
     ),
 
     # Sparse profile to give to checkout using `run-task`.  If given,
     # a filename in `build/sparse-profiles`.  Defaults to
     # "toolchain-build", i.e., to
     # `build/sparse-profiles/toolchain-build`.  If `None`, instructs
     # `run-task` to not use a sparse profile at all.
-    Required('sparse-profile', default='toolchain-build'): Any(basestring, None),
+    Required('sparse-profile'): Any(basestring, None),
 
     # Paths/patterns pointing to files that influence the outcome of a
     # toolchain build.
     Optional('resources'): [basestring],
 
     # Path to the artifact produced by the toolchain job
     Required('toolchain-artifact'): basestring,
 
@@ -101,17 +101,24 @@ def get_digest_data(config, run, taskdes
 
     # Likewise script arguments should influence the index.
     args = run.get('arguments')
     if args:
         data.extend(args)
     return data
 
 
-@run_job_using("docker-worker", "toolchain-script", schema=toolchain_run_schema)
+toolchain_defaults = {
+    'tooltool-downloads': False,
+    'sparse-profile': 'toolchain-build',
+}
+
+
+@run_job_using("docker-worker", "toolchain-script",
+               schema=toolchain_run_schema, defaults=toolchain_defaults)
 def docker_worker_toolchain(config, job, taskdesc):
     run = job['run']
     taskdesc['run-on-projects'] = ['trunk', 'try']
 
     worker = taskdesc['worker']
     worker['chain-of-trust'] = True
 
     # Allow the job to specify where artifacts come from, but add
@@ -172,17 +179,18 @@ def docker_worker_toolchain(config, job,
         add_optimization(
             config, taskdesc,
             cache_type=CACHE_TYPE,
             cache_name=name,
             digest_data=get_digest_data(config, run, taskdesc),
         )
 
 
-@run_job_using("generic-worker", "toolchain-script", schema=toolchain_run_schema)
+@run_job_using("generic-worker", "toolchain-script",
+               schema=toolchain_run_schema, defaults=toolchain_defaults)
 def windows_toolchain(config, job, taskdesc):
     run = job['run']
     taskdesc['run-on-projects'] = ['trunk', 'try']
 
     worker = taskdesc['worker']
 
     worker['artifacts'] = [{
         'path': r'public\build',
--- a/taskcluster/taskgraph/transforms/l10n.py
+++ b/taskcluster/taskgraph/transforms/l10n.py
@@ -231,18 +231,19 @@ def copy_in_useful_magic(config, jobs):
 
         attributes['build_platform'] = job['build-platform']
         yield job
 
 
 @transforms.add
 def validate_early(config, jobs):
     for job in jobs:
-        yield validate_schema(l10n_description_schema, job,
-                              "In job {!r}:".format(job.get('name', 'unknown')))
+        validate_schema(l10n_description_schema, job,
+                        "In job {!r}:".format(job.get('name', 'unknown')))
+        yield job
 
 
 @transforms.add
 def setup_nightly_dependency(config, jobs):
     """ Sets up a task dependency to the signing job this relates to """
     for job in jobs:
         if not job['attributes'].get('nightly'):
             yield job
@@ -389,18 +390,19 @@ def chain_of_trust(config, jobs):
             cot = job.setdefault('extra', {}).setdefault('chainOfTrust', {})
             cot.setdefault('inputs', {})['docker-image'] = {"task-reference": "<docker-image>"}
         yield job
 
 
 @transforms.add
 def validate_again(config, jobs):
     for job in jobs:
-        yield validate_schema(l10n_description_schema, job,
-                              "In job {!r}:".format(job.get('name', 'unknown')))
+        validate_schema(l10n_description_schema, job,
+                        "In job {!r}:".format(job.get('name', 'unknown')))
+        yield job
 
 
 @transforms.add
 def make_job_description(config, jobs):
     for job in jobs:
         job['mozharness'].update({
             'using': 'mozharness',
             'job-script': 'taskcluster/scripts/builder/build-l10n.sh',
--- a/taskcluster/taskgraph/transforms/repackage.py
+++ b/taskcluster/taskgraph/transforms/repackage.py
@@ -54,19 +54,20 @@ packaging_description_schema = Schema({
     Optional('shipping-phase'): task_description_schema['shipping-phase'],
 })
 
 
 @transforms.add
 def validate(config, jobs):
     for job in jobs:
         label = job.get('dependent-task', object).__dict__.get('label', '?no-label?')
-        yield validate_schema(
+        validate_schema(
             packaging_description_schema, job,
             "In packaging ({!r} kind) task for {!r}:".format(config.kind, label))
+        yield job
 
 
 @transforms.add
 def make_repackage_description(config, jobs):
     for job in jobs:
         dep_job = job['dependent-task']
 
         label = job.get('label',
--- a/taskcluster/taskgraph/transforms/repackage_routes.py
+++ b/taskcluster/taskgraph/transforms/repackage_routes.py
@@ -13,19 +13,20 @@ from taskgraph.transforms.job import job
 
 transforms = TransformSequence()
 
 
 @transforms.add
 def validate(config, jobs):
     for job in jobs:
         label = job['label']
-        yield validate_schema(
+        validate_schema(
             job_description_schema, job,
             "In repackage-signing ({!r} kind) task for {!r}:".format(config.kind, label))
+        yield job
 
 
 @transforms.add
 def add_indexes(config, jobs):
     for job in jobs:
         repackage_type = job['attributes'].get('repackage_type')
         if repackage_type:
             build_platform = job['attributes']['build_platform']
--- a/taskcluster/taskgraph/transforms/repackage_signing.py
+++ b/taskcluster/taskgraph/transforms/repackage_signing.py
@@ -29,19 +29,20 @@ repackage_signing_description_schema = S
     Optional('shipping-phase'): task_description_schema['shipping-phase'],
 })
 
 
 @transforms.add
 def validate(config, jobs):
     for job in jobs:
         label = job.get('dependent-task', object).__dict__.get('label', '?no-label?')
-        yield validate_schema(
+        validate_schema(
             repackage_signing_description_schema, job,
             "In repackage-signing ({!r} kind) task for {!r}:".format(config.kind, label))
+        yield job
 
 
 @transforms.add
 def make_repackage_signing_description(config, jobs):
     for job in jobs:
         dep_job = job['dependent-task']
         attributes = dep_job.attributes
 
--- a/taskcluster/taskgraph/transforms/signing.py
+++ b/taskcluster/taskgraph/transforms/signing.py
@@ -42,17 +42,17 @@ signing_description_schema = Schema({
         # Paths to the artifacts to sign
         Required('paths'): [basestring],
 
         # Signing formats to use on each of the paths
         Required('formats'): [basestring],
     }],
 
     # depname is used in taskref's to identify the taskID of the unsigned things
-    Required('depname', default='build'): basestring,
+    Required('depname'): basestring,
 
     # unique label to describe this signing task, defaults to {dep.label}-signing
     Optional('label'): basestring,
 
     # treeherder is allowed here to override any defaults we use for signing.  See
     # taskcluster/taskgraph/transforms/task.py for the schema details, and the
     # below transforms for defaults of various values.
     Optional('treeherder'): task_description_schema['treeherder'],
@@ -61,22 +61,30 @@ signing_description_schema = Schema({
     Optional('routes'): [basestring],
 
     Optional('shipping-phase'): task_description_schema['shipping-phase'],
     Optional('shipping-product'): task_description_schema['shipping-product'],
 })
 
 
 @transforms.add
+def set_defaults(config, jobs):
+    for job in jobs:
+        job.setdefault('depname', 'build')
+        yield job
+
+
+@transforms.add
 def validate(config, jobs):
     for job in jobs:
         label = job.get('dependent-task', object).__dict__.get('label', '?no-label?')
-        yield validate_schema(
+        validate_schema(
             signing_description_schema, job,
             "In signing ({!r} kind) task for {!r}:".format(config.kind, label))
+        yield job
 
 
 @transforms.add
 def make_task_description(config, jobs):
     for job in jobs:
         dep_job = job['dependent-task']
         attributes = dep_job.attributes
 
--- a/taskcluster/taskgraph/transforms/source_test.py
+++ b/taskcluster/taskgraph/transforms/source_test.py
@@ -39,17 +39,17 @@ source_test_description_schema = Schema(
     # (for try selection) and treeherder metadata (for display).  If given as a list,
     # the job will be "split" into multiple tasks, one with each platform.
     Required('platform'): Any(basestring, [basestring]),
 
     # Whether the job requires a build artifact or not. If True, the task will
     # depend on a build task and the installer url will be saved to the
     # GECKO_INSTALLER_URL environment variable. Build labels are determined by the
     # `dependent-build-platforms` config in kind.yml.
-    Required('require-build', default=False): bool,
+    Required('require-build'): bool,
 
     # These fields can be keyed by "platform", and are otherwise identical to
     # job descriptions.
     Required('worker-type'): Any(
         job_description_schema['worker-type'],
         {'by-platform': {basestring: job_description_schema['worker-type']}},
     ),
     Required('worker'): Any(
@@ -57,20 +57,28 @@ source_test_description_schema = Schema(
         {'by-platform': {basestring: job_description_schema['worker']}},
     ),
 })
 
 transforms = TransformSequence()
 
 
 @transforms.add
+def set_defaults(config, jobs):
+    for job in jobs:
+        job.setdefault('require-build', False)
+        yield job
+
+
+@transforms.add
 def validate(config, jobs):
     for job in jobs:
-        yield validate_schema(source_test_description_schema, job,
-                              "In job {!r}:".format(job['name']))
+        validate_schema(source_test_description_schema, job,
+                        "In job {!r}:".format(job['name']))
+        yield job
 
 
 @transforms.add
 def set_job_name(config, jobs):
     for job in jobs:
         if 'job-from' in job and job['job-from'] != 'kind.yml':
             from_name = os.path.splitext(job['job-from'])[0]
             job['name'] = '{}-{}'.format(from_name, job['name'])
--- a/taskcluster/taskgraph/transforms/task.py
+++ b/taskcluster/taskgraph/transforms/task.py
@@ -173,27 +173,27 @@ task_description_schema = Schema({
 
     # The `run_on_projects` attribute, defaulting to "all".  This dictates the
     # projects on which this task should be included in the target task set.
     # See the attributes documentation for details.
     Optional('run-on-projects'): [basestring],
 
     # The `shipping_phase` attribute, defaulting to None. This specifies the
     # release promotion phase that this task belongs to.
-    Required('shipping-phase', default=None): Any(
+    Required('shipping-phase'): Any(
         None,
         'build',
         'promote',
         'push',
         'ship',
     ),
 
     # The `shipping_product` attribute, defaulting to None. This specifies the
     # release promotion product that this task belongs to.
-    Required('shipping-product', default=None): Any(
+    Required('shipping-product'): Any(
         None,
         'devedition',
         'fennec',
         'firefox',
     ),
 
     # Coalescing provides the facility for tasks to be superseded by the same
     # task in a subsequent commit, if the current task backlog reaches an
@@ -216,21 +216,21 @@ task_description_schema = Schema({
         'size': int,
     },
 
     # The `always-target` attribute will cause the task to be included in the
     # target_task_graph regardless of filtering. Tasks included in this manner
     # will be candidates for optimization even when `optimize_target_tasks` is
     # False, unless the task was also explicitly chosen by the target_tasks
     # method.
-    Required('always-target', default=False): bool,
+    Required('always-target'): bool,
 
     # Optimization to perform on this task during the optimization phase.
     # Optimizations are defined in taskcluster/taskgraph/optimize.py.
-    Required('optimization', default=None): Any(
+    Required('optimization'): Any(
         # always run this task (default)
         None,
         # search the index for the given index namespaces, and replace this task if found
         # the search occurs in order, with the first match winning
         {'index-search': [basestring]},
         # consult SETA and skip this task if it is low-value
         {'seta': None},
         # skip this task if none of the given file patterns match
@@ -245,17 +245,17 @@ task_description_schema = Schema({
     ),
 
     # the provisioner-id/worker-type for the task.  The following parameters will
     # be substituted in this string:
     #  {level} -- the scm level of this push
     'worker-type': basestring,
 
     # Whether the job should use sccache compiler caching.
-    Required('needs-sccache', default=False): bool,
+    Required('needs-sccache'): bool,
 
     # Send notifications using pulse-notifier[1] service:
     #
     #     https://github.com/mozilla-releng/pulse-notify
     #
     # Notifications are send uppon task completion, failure or when exception
     # is raised.
     Optional('notifications'): {
@@ -282,68 +282,68 @@ task_description_schema = Schema({
             basestring,
             # an in-tree generated docker image (from `taskcluster/docker/<name>`)
             {'in-tree': basestring},
             # an indexed docker image
             {'indexed': basestring},
         ),
 
         # worker features that should be enabled
-        Required('relengapi-proxy', default=False): bool,
-        Required('chain-of-trust', default=False): bool,
-        Required('taskcluster-proxy', default=False): bool,
-        Required('allow-ptrace', default=False): bool,
-        Required('loopback-video', default=False): bool,
-        Required('loopback-audio', default=False): bool,
-        Required('docker-in-docker', default=False): bool,  # (aka 'dind')
+        Required('relengapi-proxy'): bool,
+        Required('chain-of-trust'): bool,
+        Required('taskcluster-proxy'): bool,
+        Required('allow-ptrace'): bool,
+        Required('loopback-video'): bool,
+        Required('loopback-audio'): bool,
+        Required('docker-in-docker'): bool,  # (aka 'dind')
 
         # Paths to Docker volumes.
         #
         # For in-tree Docker images, volumes can be parsed from Dockerfile.
         # This only works for the Dockerfile itself: if a volume is defined in
         # a base image, it will need to be declared here. Out-of-tree Docker
         # images will also require explicit volume annotation.
         #
         # Caches are often mounted to the same path as Docker volumes. In this
         # case, they take precedence over a Docker volume. But a volume still
         # needs to be declared for the path.
-        Optional('volumes', default=[]): [basestring],
+        Optional('volumes'): [basestring],
 
         # caches to set up for the task
         Optional('caches'): [{
             # only one type is supported by any of the workers right now
             'type': 'persistent',
 
             # name of the cache, allowing re-use by subsequent tasks naming the
             # same cache
             'name': basestring,
 
             # location in the task image where the cache will be mounted
             'mount-point': basestring,
 
             # Whether the cache is not used in untrusted environments
             # (like the Try repo).
-            Optional('skip-untrusted', default=False): bool,
+            Optional('skip-untrusted'): bool,
         }],
 
         # artifacts to extract from the task image after completion
         Optional('artifacts'): [{
             # type of artifact -- simple file, or recursive directory
             'type': Any('file', 'directory'),
 
             # task image path from which to read artifact
             'path': basestring,
 
             # name of the produced artifact (root of the names for
             # type=directory)
             'name': basestring,
         }],
 
         # environment variables
-        Required('env', default={}): {basestring: taskref_or_string},
+        Required('env'): {basestring: taskref_or_string},
 
         # the command to run; if not given, docker-worker will default to the
         # command in the docker image
         Optional('command'): [taskref_or_string],
 
         # the maximum time to run, in seconds
         Required('max-run-time'): int,
 
@@ -414,26 +414,26 @@ task_description_schema = Schema({
             Optional('file'): basestring,
             # Required if and only if `content` is specified and mounting a
             # directory (not a file). This should be the archive format of the
             # content (either pre-loaded cache or read-only directory).
             Optional('format'): Any('rar', 'tar.bz2', 'tar.gz', 'zip')
         }],
 
         # environment variables
-        Required('env', default={}): {basestring: taskref_or_string},
+        Required('env'): {basestring: taskref_or_string},
 
         # the maximum time to run, in seconds
         Required('max-run-time'): int,
 
         # os user groups for test task workers
-        Optional('os-groups', default=[]): [basestring],
+        Optional('os-groups'): [basestring],
 
         # optional features
-        Required('chain-of-trust', default=False): bool,
+        Required('chain-of-trust'): bool,
     }, {
         Required('implementation'): 'buildbot-bridge',
 
         # see
         # https://github.com/mozilla/buildbot-bridge/blob/master/bbb/schemas/payload.yml
         Required('buildername'): basestring,
         Required('sourcestamp'): {
             'branch': basestring,
@@ -478,17 +478,17 @@ task_description_schema = Schema({
             # name of the produced artifact (root of the names for
             # type=directory)
             Required('name'): basestring,
         }],
     }, {
         Required('implementation'): 'scriptworker-signing',
 
         # the maximum time to run, in seconds
-        Required('max-run-time', default=600): int,
+        Required('max-run-time'): int,
 
         # list of artifact URLs for the artifacts that should be signed
         Required('upstream-artifacts'): [{
             # taskId of the task with the artifact
             Required('taskId'): taskref_or_string,
 
             # type of signing task (for CoT)
             Required('taskType'): basestring,
@@ -500,17 +500,17 @@ task_description_schema = Schema({
             Required('formats'): [basestring],
         }],
     }, {
         Required('implementation'): 'binary-transparency',
     }, {
         Required('implementation'): 'beetmover',
 
         # the maximum time to run, in seconds
-        Required('max-run-time', default=600): int,
+        Required('max-run-time'): int,
 
         # locale key, if this is a locale beetmover job
         Optional('locale'): basestring,
 
         # list of artifact URLs for the artifacts that should be beetmoved
         Required('upstream-artifacts'): [{
             # taskId of the task with the artifact
             Required('taskId'): taskref_or_string,
@@ -523,17 +523,17 @@ task_description_schema = Schema({
 
             # locale is used to map upload path and allow for duplicate simple names
             Required('locale'): basestring,
         }],
     }, {
         Required('implementation'): 'beetmover-cdns',
 
         # the maximum time to run, in seconds
-        Required('max-run-time', default=600): int,
+        Required('max-run-time'): int,
         Required('product'): basestring,
     }, {
         Required('implementation'): 'balrog',
 
         # list of artifact URLs for the artifacts that should be beetmoved
         Required('upstream-artifacts'): [{
             # taskId of the task with the artifact
             Required('taskId'): taskref_or_string,
@@ -566,17 +566,17 @@ task_description_schema = Schema({
             Required('taskType'): basestring,
 
             # Paths to the artifacts to sign
             Required('paths'): [basestring],
         }],
 
         # "Invalid" is a noop for try and other non-supported branches
         Required('google-play-track'): Any('production', 'beta', 'alpha', 'rollout', 'invalid'),
-        Required('commit', default=False): bool,
+        Required('commit'): bool,
         Optional('rollout-percentage'): int,
     }),
 })
 
 TC_TREEHERDER_SCHEMA_URL = 'https://github.com/taskcluster/taskcluster-treeherder/' \
                            'blob/master/schemas/task-treeherder-config.yml'
 
 
@@ -1111,33 +1111,73 @@ def build_buildbot_bridge_payload(config
             "project:releng:buildbot-bridge:builder-name:{}".format(worker['buildername'])
         )
 
 
 transforms = TransformSequence()
 
 
 @transforms.add
+def set_defaults(config, tasks):
+    for task in tasks:
+        task.setdefault('shipping-phase', None)
+        task.setdefault('shipping-product', None)
+        task.setdefault('always-target', False)
+        task.setdefault('optimization', None)
+        task.setdefault('needs-sccache', False)
+
+        worker = task['worker']
+        if worker['implementation'] in ('docker-worker', 'docker-engine'):
+            worker.setdefault('relengapi-proxy', False)
+            worker.setdefault('chain-of-trust', False)
+            worker.setdefault('taskcluster-proxy', False)
+            worker.setdefault('allow-ptrace', False)
+            worker.setdefault('loopback-video', False)
+            worker.setdefault('loopback-audio', False)
+            worker.setdefault('docker-in-docker', False)
+            worker.setdefault('volumes', [])
+            worker.setdefault('env', {})
+            if 'caches' in worker:
+                for c in worker['caches']:
+                    c.setdefault('skip-untrusted', False)
+        elif worker['implementation'] == 'generic-worker':
+            worker.setdefault('env', {})
+            worker.setdefault('os-groups', [])
+            worker.setdefault('chain-of-trust', False)
+        elif worker['implementation'] == 'scriptworker-signing':
+            worker.setdefault('max-run-time', 600)
+        elif worker['implementation'] == 'beetmover':
+            worker.setdefault('max-run-time', 600)
+        elif worker['implementation'] == 'beetmover-cdns':
+            worker.setdefault('max-run-time', 600)
+        elif worker['implementation'] == 'push-apk':
+            worker.setdefault('commit', False)
+
+        yield task
+
+
+@transforms.add
 def task_name_from_label(config, tasks):
     for task in tasks:
         if 'label' not in task:
             if 'name' not in task:
                 raise Exception("task has neither a name nor a label")
             task['label'] = '{}-{}'.format(config.kind, task['name'])
         if task.get('name'):
             del task['name']
         yield task
 
 
 @transforms.add
 def validate(config, tasks):
     for task in tasks:
-        yield validate_schema(
+        validate_schema(
             task_description_schema, task,
             "In task {!r}:".format(task.get('label', '?no-label?')))
+        yield task
 
 
 @index_builder('generic')
 def add_generic_index_routes(config, task):
     index = task.get('index')
     routes = task.setdefault('routes', [])
 
     verify_index(config, index)
--- a/taskcluster/taskgraph/transforms/tests.py
+++ b/taskcluster/taskgraph/transforms/tests.py
@@ -184,98 +184,98 @@ test_description_schema = Schema({
 
     # The `run_on_projects` attribute, defaulting to "all".  This dictates the
     # projects on which this task should be included in the target task set.
     # See the attributes documentation for details.
     #
     # Note that the special case 'built-projects', the default, uses the parent
     # build task's run-on-projects, meaning that tests run only on platforms
     # that are built.
-    Optional('run-on-projects', default='built-projects'): optionally_keyed_by(
+    Optional('run-on-projects'): optionally_keyed_by(
         'test-platform',
         Any([basestring], 'built-projects')),
 
     # the sheriffing tier for this task (default: set based on test platform)
     Optional('tier'): optionally_keyed_by(
         'test-platform',
         Any(int, 'default')),
 
     # number of chunks to create for this task.  This can be keyed by test
     # platform by passing a dictionary in the `by-test-platform` key.  If the
     # test platform is not found, the key 'default' will be tried.
-    Required('chunks', default=1): optionally_keyed_by(
+    Required('chunks'): optionally_keyed_by(
         'test-platform',
         int),
 
     # the time (with unit) after which this task is deleted; default depends on
     # the branch (see below)
     Optional('expires-after'): basestring,
 
     # Whether to run this task with e10s (desktop-test only).  If false, run
     # without e10s; if true, run with e10s; if 'both', run one task with and
     # one task without e10s.  E10s tasks have "-e10s" appended to the test name
     # and treeherder group.
-    Required('e10s', default='true'): optionally_keyed_by(
+    Required('e10s'): optionally_keyed_by(
         'test-platform', 'project',
         Any(bool, 'both')),
 
     # Whether the task should run with WebRender enabled or not.
-    Optional('webrender', default=False): bool,
+    Optional('webrender'): bool,
 
     # The EC2 instance size to run these tests on.
-    Required('instance-size', default='default'): optionally_keyed_by(
+    Required('instance-size'): optionally_keyed_by(
         'test-platform',
         Any('default', 'large', 'xlarge')),
 
     # type of virtualization or hardware required by test.
-    Required('virtualization', default='virtual'): optionally_keyed_by(
+    Required('virtualization'): optionally_keyed_by(
         'test-platform',
         Any('virtual', 'virtual-with-gpu', 'hardware')),
 
     # Whether the task requires loopback audio or video (whatever that may mean
     # on the platform)
-    Required('loopback-audio', default=False): bool,
-    Required('loopback-video', default=False): bool,
+    Required('loopback-audio'): bool,
+    Required('loopback-video'): bool,
 
     # Whether the test can run using a software GL implementation on Linux
     # using the GL compositor. May not be used with "legacy" sized instances
     # due to poor LLVMPipe performance (bug 1296086).  Defaults to true for
     # unit tests on linux platforms and false otherwise
     Optional('allow-software-gl-layers'): bool,
 
     # For tasks that will run in docker-worker or docker-engine, this is the
     # name of the docker image or in-tree docker image to run the task in.  If
     # in-tree, then a dependency will be created automatically.  This is
     # generally `desktop-test`, or an image that acts an awful lot like it.
-    Required('docker-image', default={'in-tree': 'desktop1604-test'}): optionally_keyed_by(
+    Required('docker-image'): optionally_keyed_by(
         'test-platform',
         Any(
             # a raw Docker image path (repo/image:tag)
             basestring,
             # an in-tree generated docker image (from `taskcluster/docker/<name>`)
             {'in-tree': basestring},
             # an indexed docker image
             {'indexed': basestring},
         )
     ),
 
     # seconds of runtime after which the task will be killed.  Like 'chunks',
     # this can be keyed by test pltaform.
-    Required('max-run-time', default=3600): optionally_keyed_by(
+    Required('max-run-time'): optionally_keyed_by(
         'test-platform',
         int),
 
     # the exit status code that indicates the task should be retried
     Optional('retry-exit-status'): int,
 
     # Whether to perform a gecko checkout.
-    Required('checkout', default=False): bool,
+    Required('checkout'): bool,
 
     # Wheter to perform a machine reboot after test is done
-    Optional('reboot', default=False):
+    Optional('reboot'):
         Any(False, 'always', 'on-exception', 'on-failure'),
 
     # What to run
     Required('mozharness'): {
         # the mozharness script used to run this task
         Required('script'): optionally_keyed_by(
             'test-platform',
             basestring),
@@ -288,74 +288,74 @@ test_description_schema = Schema({
         # mochitest flavor for mochitest runs
         Optional('mochitest-flavor'): basestring,
 
         # any additional actions to pass to the mozharness command
         Optional('actions'): [basestring],
 
         # additional command-line options for mozharness, beyond those
         # automatically added
-        Required('extra-options', default=[]): optionally_keyed_by(
+        Required('extra-options'): optionally_keyed_by(
             'test-platform',
             [basestring]),
 
         # the artifact name (including path) to test on the build task; this is
         # generally set in a per-kind transformation
         Optional('build-artifact-name'): basestring,
 
         # If true, tooltool downloads will be enabled via relengAPIProxy.
-        Required('tooltool-downloads', default=False): bool,
+        Required('tooltool-downloads'): bool,
 
         # This mozharness script also runs in Buildbot and tries to read a
         # buildbot config file, so tell it not to do so in TaskCluster
-        Required('no-read-buildbot-config', default=False): bool,
+        Required('no-read-buildbot-config'): bool,
 
         # Add --blob-upload-branch=<project> mozharness parameter
         Optional('include-blob-upload-branch'): bool,
 
         # The setting for --download-symbols (if omitted, the option will not
         # be passed to mozharness)
         Optional('download-symbols'): Any(True, 'ondemand'),
 
         # If set, then MOZ_NODE_PATH=/usr/local/bin/node is included in the
         # environment.  This is more than just a helpful path setting -- it
         # causes xpcshell tests to start additional servers, and runs
         # additional tests.
-        Required('set-moz-node-path', default=False): bool,
+        Required('set-moz-node-path'): bool,
 
         # If true, include chunking information in the command even if the number
         # of chunks is 1
-        Required('chunked', default=False): optionally_keyed_by(
+        Required('chunked'): optionally_keyed_by(
             'test-platform',
             bool),
 
         # The chunking argument format to use
-        Required('chunking-args', default='this-chunk'): Any(
+        Required('chunking-args'): Any(
             # Use the usual --this-chunk/--total-chunk arguments
             'this-chunk',
             # Use --test-suite=<suite>-<chunk-suffix>; see chunk-suffix, below
             'test-suite-suffix',
         ),
 
         # the string to append to the `--test-suite` arugment when
         # chunking-args = test-suite-suffix; "<CHUNK>" in this string will
         # be replaced with the chunk number.
         Optional('chunk-suffix'): basestring,
 
-        Required('requires-signed-builds', default=False): optionally_keyed_by(
+        Required('requires-signed-builds'): optionally_keyed_by(
             'test-platform',
             bool),
     },
 
     # The current chunk; this is filled in by `all_kinds.py`
     Optional('this-chunk'): int,
 
     # os user groups for test task workers; required scopes, will be
     # added automatically
-    Optional('os-groups', default=[]): optionally_keyed_by(
+    Optional('os-groups'): optionally_keyed_by(
         'test-platform',
         [basestring]),
 
     # -- values supplied by the task-generation infrastructure
 
     # the platform of the build this task is testing
     'build-platform': basestring,
 
@@ -387,23 +387,16 @@ test_description_schema = Schema({
         'test-platform',
         Any(basestring, None),
     ),
 
 }, required=True)
 
 
 @transforms.add
-def validate(config, tests):
-    for test in tests:
-        yield validate_schema(test_description_schema, test,
-                              "In test {!r}:".format(test['test-name']))
-
-
-@transforms.add
 def handle_keyed_by_mozharness(config, tests):
     """Resolve a mozharness field if it is keyed by something"""
     for test in tests:
         resolve_keyed_by(test, 'mozharness', item_name=test['test-name'])
         yield test
 
 
 @transforms.add
@@ -417,17 +410,17 @@ def set_defaults(config, tests):
             # Android doesn't do e10s
             test['e10s'] = False
             # loopback-video is always true for Android, but false for other
             # platform phyla
             test['loopback-video'] = True
         else:
             # all non-android tests want to run the bits that require node
             test['mozharness']['set-moz-node-path'] = True
-            test.setdefault('e10s', 'true')
+            test.setdefault('e10s', True)
 
         # software-gl-layers is only meaningful on linux unittests, where it defaults to True
         if test['test-platform'].startswith('linux') and test['suite'] != 'talos':
             test.setdefault('allow-software-gl-layers', True)
         else:
             test['allow-software-gl-layers'] = False
 
         # Enable WebRender by default on the QuantumRender test platforms, since
@@ -441,18 +434,42 @@ def set_defaults(config, tests):
 
         test.setdefault('try-name', test['test-name'])
 
         test.setdefault('os-groups', [])
         test.setdefault('chunks', 1)
         test.setdefault('run-on-projects', 'built-projects')
         test.setdefault('instance-size', 'default')
         test.setdefault('max-run-time', 3600)
-        test.setdefault('reboot', True)
+        test.setdefault('reboot', False)
+        test.setdefault('virtualization', 'virtual')
+        test.setdefault('run-on-projects', 'built-projects')
+        test.setdefault('chunks', 1)
+        test.setdefault('instance-size', 'default')
+        test.setdefault('loopback-audio', False)
+        test.setdefault('loopback-video', False)
+        test.setdefault('docker-image', {'in-tree': 'desktop1604-test'})
+        test.setdefault('max-run-time', 3600)
+        test.setdefault('checkout', False)
+
         test['mozharness'].setdefault('extra-options', [])
+        test['mozharness'].setdefault('requires-signed-builds', False)
+        test['mozharness'].setdefault('tooltool-downloads', False)
+        test['mozharness'].setdefault('no-read-buildbot-config', False)
+        test['mozharness'].setdefault('set-moz-node-path', False)
+        test['mozharness'].setdefault('chunked', False)
+        test['mozharness'].setdefault('chunking-args', 'this-chunk')
+        yield test
+
+
+@transforms.add
+def validate(config, tests):
+    for test in tests:
+        validate_schema(test_description_schema, test,
+                        "In test {!r}:".format(test['test-name']))
         yield test
 
 
 @transforms.add
 def setup_talos(config, tests):
     """Add options that are specific to talos jobs (identified by suite=talos)"""
     for test in tests:
         if test['suite'] != 'talos':
--- a/taskcluster/taskgraph/util/push_apk.py
+++ b/taskcluster/taskgraph/util/push_apk.py
@@ -22,20 +22,21 @@ def fill_labels_tranform(_, jobs):
         job['label'] = job['name']
 
         yield job
 
 
 def validate_jobs_schema_transform_partial(description_schema, transform_type, config, jobs):
     for job in jobs:
         label = job.get('label', '?no-label?')
-        yield validate_schema(
+        validate_schema(
             description_schema, job,
             "In {} ({!r} kind) task for {!r}:".format(transform_type, config.kind, label)
         )
+        yield job
 
 
 def validate_dependent_tasks_transform(_, jobs):
     for job in jobs:
         check_every_architecture_is_present_in_dependent_tasks(job['dependent-tasks'])
         yield job
 
 
--- a/taskcluster/taskgraph/util/schema.py
+++ b/taskcluster/taskgraph/util/schema.py
@@ -1,31 +1,33 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import re
-import copy
 import pprint
 import collections
 import voluptuous
 
+import taskgraph
+
 from .attributes import keymatch
 
 
 def validate_schema(schema, obj, msg_prefix):
     """
     Validate that object satisfies schema.  If not, generate a useful exception
     beginning with msg_prefix.
     """
+    if taskgraph.fast:
+        return
     try:
-        # deep copy the result since it may include mutable defaults
-        return copy.deepcopy(schema(obj))
+        schema(obj)
     except voluptuous.MultipleInvalid as exc:
         msg = [msg_prefix]
         for error in exc.errors:
             msg.append(str(error))
         raise Exception('\n'.join(msg) + '\n' + pprint.pformat(obj))
 
 
 def optionally_keyed_by(*arguments):