Backed out changeset a57aed49dc58 (bug 1524639) for beetmover bustages. a=backout
authorRazvan Maries <rmaries@mozilla.com>
Thu, 16 Jan 2020 14:04:31 +0200
changeset 510446 7e0886a94d70b8696d6fc0481d9f9ae12b85c41a
parent 510445 7541d616ff870d570476299c5985ca2c49b758b6
child 510502 3f72a81bd12cb6048f03a96e5b403621f7fac052
push id37022
push userrmaries@mozilla.com
push dateThu, 16 Jan 2020 12:05:08 +0000
treeherdermozilla-central@7e0886a94d70 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersbackout
bugs1524639
milestone74.0a1
backs outa57aed49dc586c20cd03160920137adaa0f45b37
first release with
nightly linux32
7e0886a94d70 / 74.0a1 / 20200116120508 / files
nightly linux64
7e0886a94d70 / 74.0a1 / 20200116120508 / files
nightly mac
7e0886a94d70 / 74.0a1 / 20200116120508 / files
nightly win32
7e0886a94d70 / 74.0a1 / 20200116120508 / files
nightly win64
7e0886a94d70 / 74.0a1 / 20200116120508 / files
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
releases
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Backed out changeset a57aed49dc58 (bug 1524639) for beetmover bustages. a=backout
taskcluster/mach_commands.py
taskcluster/taskgraph/actions/backfill.py
taskcluster/taskgraph/actions/isolate_test.py
taskcluster/taskgraph/config.py
taskcluster/taskgraph/cron/schema.py
taskcluster/taskgraph/decision.py
taskcluster/taskgraph/generator.py
taskcluster/taskgraph/loader/multi_dep.py
taskcluster/taskgraph/parameters.py
taskcluster/taskgraph/test/test_util_schema.py
taskcluster/taskgraph/transforms/balrog_submit.py
taskcluster/taskgraph/transforms/base.py
taskcluster/taskgraph/transforms/beetmover.py
taskcluster/taskgraph/transforms/beetmover_checksums.py
taskcluster/taskgraph/transforms/beetmover_emefree_checksums.py
taskcluster/taskgraph/transforms/beetmover_geckoview.py
taskcluster/taskgraph/transforms/beetmover_langpack_checksums.py
taskcluster/taskgraph/transforms/beetmover_push_to_release.py
taskcluster/taskgraph/transforms/beetmover_repackage.py
taskcluster/taskgraph/transforms/beetmover_repackage_partner.py
taskcluster/taskgraph/transforms/beetmover_source_checksums.py
taskcluster/taskgraph/transforms/bouncer_check.py
taskcluster/taskgraph/transforms/diffoscope.py
taskcluster/taskgraph/transforms/docker_image.py
taskcluster/taskgraph/transforms/fetch.py
taskcluster/taskgraph/transforms/geckodriver_signing.py
taskcluster/taskgraph/transforms/google_play_strings.py
taskcluster/taskgraph/transforms/job/__init__.py
taskcluster/taskgraph/transforms/job/debian_package.py
taskcluster/taskgraph/transforms/job/hazard.py
taskcluster/taskgraph/transforms/job/mach.py
taskcluster/taskgraph/transforms/job/mozharness.py
taskcluster/taskgraph/transforms/job/mozharness_test.py
taskcluster/taskgraph/transforms/job/python_test.py
taskcluster/taskgraph/transforms/job/run_task.py
taskcluster/taskgraph/transforms/job/spidermonkey.py
taskcluster/taskgraph/transforms/job/toolchain.py
taskcluster/taskgraph/transforms/l10n.py
taskcluster/taskgraph/transforms/openh264.py
taskcluster/taskgraph/transforms/openh264_signing.py
taskcluster/taskgraph/transforms/push_apk.py
taskcluster/taskgraph/transforms/push_apk_checks.py
taskcluster/taskgraph/transforms/raptor.py
taskcluster/taskgraph/transforms/release_beetmover_signed_addons.py
taskcluster/taskgraph/transforms/release_generate_checksums_beetmover.py
taskcluster/taskgraph/transforms/release_generate_checksums_signing.py
taskcluster/taskgraph/transforms/release_sign_and_push_langpacks.py
taskcluster/taskgraph/transforms/release_snap_push.py
taskcluster/taskgraph/transforms/repackage.py
taskcluster/taskgraph/transforms/repackage_partner.py
taskcluster/taskgraph/transforms/repackage_signing.py
taskcluster/taskgraph/transforms/repackage_signing_partner.py
taskcluster/taskgraph/transforms/signing.py
taskcluster/taskgraph/transforms/source_checksums_signing.py
taskcluster/taskgraph/transforms/source_test.py
taskcluster/taskgraph/transforms/task.py
taskcluster/taskgraph/transforms/tests.py
taskcluster/taskgraph/transforms/update_verify.py
taskcluster/taskgraph/util/docker.py
taskcluster/taskgraph/util/hg.py
taskcluster/taskgraph/util/schema.py
taskcluster/taskgraph/util/scriptworker.py
taskcluster/taskgraph/util/taskcluster.py
tools/tryselect/selectors/coverage.py
tools/tryselect/task_config.py
--- a/taskcluster/mach_commands.py
+++ b/taskcluster/mach_commands.py
@@ -6,17 +6,16 @@
 
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import argparse
 import json
 import logging
 import os
-from six import text_type
 import sys
 import traceback
 import re
 from distutils.util import strtobool
 
 from mach.decorators import (
     CommandArgument,
     CommandProvider,
@@ -122,60 +121,73 @@ class MachCommands(MachCommandBase):
     @CommandArgument('--parameters', '-p', default="project=mozilla-central",
                      help="parameters file (.yml or .json; see "
                           "`taskcluster/docs/parameters.rst`)`")
     def taskgraph_actions(self, **options):
         return self.show_actions(options)
 
     @SubCommand('taskgraph', 'decision',
                 description="Run the decision task")
-    @CommandArgument('--root', '-r', type=text_type,
+    @CommandArgument('--root', '-r',
                      help="root of the taskgraph definition relative to topsrcdir")
-    @CommandArgument('--base-repository', type=text_type, required=True,
+    @CommandArgument('--base-repository',
+                     required=True,
                      help='URL for "base" repository to clone')
-    @CommandArgument('--head-repository', type=text_type, required=True,
+    @CommandArgument('--head-repository',
+                     required=True,
                      help='URL for "head" repository to fetch revision from')
-    @CommandArgument('--head-ref', type=text_type, required=True,
+    @CommandArgument('--head-ref',
+                     required=True,
                      help='Reference (this is same as rev usually for hg)')
-    @CommandArgument('--head-rev', type=text_type, required=True,
+    @CommandArgument('--head-rev',
+                     required=True,
                      help='Commit revision to use from head repository')
-    @CommandArgument('--comm-base-repository', type=text_type, required=False,
+    @CommandArgument('--comm-base-repository',
+                     required=False,
                      help='URL for "base" comm-* repository to clone')
-    @CommandArgument('--comm-head-repository', type=text_type, required=False,
+    @CommandArgument('--comm-head-repository',
+                     required=False,
                      help='URL for "head" comm-* repository to fetch revision from')
-    @CommandArgument('--comm-head-ref', type=text_type, required=False,
+    @CommandArgument('--comm-head-ref',
+                     required=False,
                      help='comm-* Reference (this is same as rev usually for hg)')
-    @CommandArgument('--comm-head-rev', type=text_type, required=False,
+    @CommandArgument('--comm-head-rev',
+                     required=False,
                      help='Commit revision to use from head comm-* repository')
-    @CommandArgument(
-        '--project', type=text_type, required=True,
-        help='Project to use for creating task graph. Example: --project=try')
-    @CommandArgument('--pushlog-id', type=text_type, dest='pushlog_id',
-                     required=True, default='0')
+    @CommandArgument('--project',
+                     required=True,
+                     help='Project to use for creating task graph. Example: --project=try')
+    @CommandArgument('--pushlog-id',
+                     dest='pushlog_id',
+                     required=True,
+                     default=0)
     @CommandArgument('--pushdate',
                      dest='pushdate',
                      required=True,
                      type=int,
                      default=0)
-    @CommandArgument('--owner', type=text_type, required=True,
+    @CommandArgument('--owner',
+                     required=True,
                      help='email address of who owns this graph')
-    @CommandArgument('--level', type=text_type, required=True,
+    @CommandArgument('--level',
+                     required=True,
                      help='SCM level of this repository')
-    @CommandArgument('--target-tasks-method', type=text_type,
+    @CommandArgument('--target-tasks-method',
                      help='method for selecting the target tasks to generate')
     @CommandArgument('--optimize-target-tasks',
                      type=lambda flag: bool(strtobool(flag)),
                      nargs='?', const='true',
                      help='If specified, this indicates whether the target '
                           'tasks are eligible for optimization. Otherwise, '
                           'the default for the project is used.')
-    @CommandArgument('--try-task-config-file', type=text_type,
+    @CommandArgument('--try-task-config-file',
                      help='path to try task configuration file')
-    @CommandArgument('--tasks-for', type=text_type, required=True,
-                     help='the tasks_for value used to generate this task')
+    @CommandArgument('--tasks-for',
+                     help='the tasks_for value used to generate this task',
+                     required=True)
     @CommandArgument('--include-push-tasks',
                      action='store_true',
                      help='Whether tasks from the on-push graph should be re-used '
                           'in this graph. This allows cron graphs to avoid rebuilding '
                           'jobs that were built on-push.')
     @CommandArgument('--rebuild-kind',
                      dest='rebuild_kinds',
                      action='append',
--- a/taskcluster/taskgraph/actions/backfill.py
+++ b/taskcluster/taskgraph/actions/backfill.py
@@ -3,17 +3,16 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import json
 import logging
-import six
 
 import requests
 from requests.exceptions import HTTPError
 
 from .registry import register_callback_action
 from .util import create_tasks, combine_task_graph_files, add_args_to_command
 from taskgraph.util.taskcluster import get_artifact_from_index
 from taskgraph.util.taskgraph import find_decision_task
@@ -140,20 +139,19 @@ def backfill_action(parameters, graph_co
                     if is_android:
                         # no --e10s; todo, what about future geckoView?
                         verify_args.remove('--e10s')
 
                     if gpu_required:
                         verify_args.append('--gpu-required')
 
                     if 'testPath' in input:
-                        task.task['payload']['env']['MOZHARNESS_TEST_PATHS'] = six.ensure_text(
-                            json.dumps({
-                                task.task['extra']['suite']['flavor']: [input['testPath']]
-                            }))
+                        task.task['payload']['env']['MOZHARNESS_TEST_PATHS'] = json.dumps({
+                            task.task['extra']['suite']['flavor']: [input['testPath']]
+                        })
 
                     cmd_parts = task.task['payload']['command']
                     keep_args = ['--installer-url', '--download-symbols', '--test-packages-url']
                     cmd_parts = remove_args_from_command(cmd_parts, preamble_length, keep_args)
                     cmd_parts = add_args_to_command(cmd_parts, verify_args)
                     task.task['payload']['command'] = cmd_parts
 
                     # morph the task label to a test-verify job
--- a/taskcluster/taskgraph/actions/isolate_test.py
+++ b/taskcluster/taskgraph/actions/isolate_test.py
@@ -177,18 +177,18 @@ def create_isolate_failure_tasks(task_de
             if is_windows and not is_wpt:
                 failure_path = '\\'.join(failure_path.split('/'))
             if is_wpt:
                 include_args = ['--include={}'.format(failure_path)]
                 task_definition['payload']['command'] = add_args_to_command(
                     saved_command,
                     extra_args=include_args)
             else:
-                task_definition['payload']['env']['MOZHARNESS_TEST_PATHS'] = six.ensure_text(
-                    json.dumps({suite: [failure_path]}))
+                task_definition['payload']['env']['MOZHARNESS_TEST_PATHS'] = json.dumps(
+                    {suite: [failure_path]})
 
             logger.info("Creating task for path {} with command {}".format(
                 failure_path,
                 task_definition['payload']['command']))
             for i in range(times):
                 create_task_from_def(slugid(), task_definition, level)
 
 
--- a/taskcluster/taskgraph/config.py
+++ b/taskcluster/taskgraph/config.py
@@ -14,72 +14,72 @@ from .util.schema import validate_schema
 from voluptuous import Required, Optional, Any
 from .util.yaml import load_yaml
 
 logger = logging.getLogger(__name__)
 
 graph_config_schema = Schema({
     # The trust-domain for this graph.
     # (See https://firefox-source-docs.mozilla.org/taskcluster/taskcluster/taskgraph.html#taskgraph-trust-domain)  # noqa
-    Required('trust-domain'): text_type,
+    Required('trust-domain'): basestring,
     # This specifes the prefix for repo parameters that refer to the project being built.
     # This selects between `head_rev` and `comm_head_rev` and related paramters.
     # (See http://firefox-source-docs.mozilla.org/taskcluster/taskcluster/parameters.html#push-information  # noqa
     # and http://firefox-source-docs.mozilla.org/taskcluster/taskcluster/parameters.html#comm-push-information)  # noqa
-    Required('project-repo-param-prefix'): text_type,
+    Required('project-repo-param-prefix'): basestring,
     # This specifies the top level directory of the application being built.
     # ie. "browser/" for Firefox, "comm/mail/" for Thunderbird.
-    Required('product-dir'): text_type,
+    Required('product-dir'): basestring,
     Required('treeherder'): {
         # Mapping of treeherder group symbols to descriptive names
-        Required('group-names'): {text_type: text_type}
+        Required('group-names'): {basestring: basestring}
     },
     Required('index'): {
-        Required('products'): [text_type]
+        Required('products'): [basestring]
     },
     Required('try'): {
         # We have a few platforms for which we want to do some "extra" builds, or at
         # least build-ish things.  Sort of.  Anyway, these other things are implemented
         # as different "platforms".  These do *not* automatically ride along with "-p
         # all"
-        Required('ridealong-builds'): {text_type: [text_type]},
+        Required('ridealong-builds'): {basestring: [basestring]},
     },
     Required('release-promotion'): {
-        Required('products'): [text_type],
-        Required('flavors'): {text_type: {
-            Required('product'): text_type,
-            Required('target-tasks-method'): text_type,
+        Required('products'): [basestring],
+        Required('flavors'): {basestring: {
+            Required('product'): basestring,
+            Required('target-tasks-method'): basestring,
             Optional('is-rc'): bool,
-            Optional('rebuild-kinds'): [text_type],
+            Optional('rebuild-kinds'): [basestring],
             Optional('version-bump'): bool,
             Optional('partial-updates'): bool,
         }},
     },
     Required('scriptworker'): {
         # Prefix to add to scopes controlling scriptworkers
-        Required('scope-prefix'): text_type,
+        Required('scope-prefix'): basestring,
         # Mapping of scriptworker types to scopes they accept
-        Required('worker-types'): {text_type: [text_type]}
+        Required('worker-types'): {basestring: [basestring]}
     },
     Required('task-priority'): optionally_keyed_by('project', Any(
         'highest',
         'very-high',
         'high',
         'medium',
         'low',
         'very-low',
         'lowest',
     )),
     Required('partner-urls'): {
         Required('release-partner-repack'):
             optionally_keyed_by('release-product', 'release-level', 'release-type',
-                                Any(text_type, None)),
+                                Any(basestring, None)),
         Required('release-eme-free-repack'):
             optionally_keyed_by('release-product', 'release-level', 'release-type',
-                                Any(text_type, None)),
+                                Any(basestring, None)),
     },
     Required('workers'): {
         Required('aliases'): {
             text_type: {
                 Required('provisioner'): optionally_keyed_by('level', text_type),
                 Required('implementation'): text_type,
                 Required('os'): text_type,
                 Required('worker-type'): optionally_keyed_by('level', 'release-level', text_type),
--- a/taskcluster/taskgraph/cron/schema.py
+++ b/taskcluster/taskgraph/cron/schema.py
@@ -2,72 +2,70 @@
 
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-from six import text_type
-
 from voluptuous import Any, Required, All, Optional
 from taskgraph.util.schema import (
     optionally_keyed_by,
     validate_schema,
     Schema,
 )
 
 
 def even_15_minutes(minutes):
     if minutes % 15 != 0:
         raise ValueError("minutes must be evenly divisible by 15")
 
 
 cron_yml_schema = Schema({
     'jobs': [{
         # Name of the crontask (must be unique)
-        Required('name'): text_type,
+        Required('name'): basestring,
 
         # what to run
 
         # Description of the job to run, keyed by 'type'
         Required('job'): {
             Required('type'): 'decision-task',
 
             # Treeherder symbol for the cron task
-            Required('treeherder-symbol'): text_type,
+            Required('treeherder-symbol'): basestring,
 
             # --target-tasks-method './mach taskgraph decision' argument
-            Required('target-tasks-method'): text_type,
+            Required('target-tasks-method'): basestring,
 
             Optional(
                 'optimize-target-tasks',
                 description='If specified, this indicates whether the target '
                             'tasks are eligible for optimization. Otherwise, '
                             'the default for the project is used.',
             ): bool,
             Optional(
                 'include-push-tasks',
                 description='Whether tasks from the on-push graph should be re-used '
                             'in the cron graph.',
             ): bool,
             Optional(
                 'rebuild-kinds',
                 description='Kinds that should not be re-used from the on-push graph.',
-            ): [text_type],
+            ): [basestring],
         },
 
         # when to run it
 
         # Optional set of projects on which this job should run; if omitted, this will
         # run on all projects for which cron tasks are set up.  This works just like the
         # `run_on_projects` attribute, where strings like "release" and "integration" are
         # expanded to cover multiple repositories.  (taskcluster/docs/attributes.rst)
-        'run-on-projects': [text_type],
+        'run-on-projects': [basestring],
 
         # Array of times at which this task should run.  These *must* be a
         # multiple of 15 minutes, the minimum scheduling interval.  This field
         # can be keyed by project so that each project has a different schedule
         # for the same job.
         'when': optionally_keyed_by(
             'project',
             [
--- a/taskcluster/taskgraph/decision.py
+++ b/taskcluster/taskgraph/decision.py
@@ -7,17 +7,16 @@ from __future__ import absolute_import, 
 
 import os
 import json
 import logging
 import time
 import sys
 from collections import defaultdict
 
-import six
 from six import text_type
 from redo import retry
 import yaml
 
 from . import GECKO
 from .actions import render_actions_json
 from .create import create_tasks
 from .generator import TaskGraphGenerator
@@ -116,21 +115,21 @@ visual_metrics_jobs_schema = Schema({
                 Required('test_name'): str,
                 Required('json_location'): str,
                 Required('video_location'): str,
             }
         ]
 })
 
 try_task_config_schema = Schema({
-    Required('tasks'): [text_type],
+    Required('tasks'): [basestring],
     Optional('browsertime'): bool,
     Optional('chemspill-prio'): bool,
     Optional('disable-pgo'): bool,
-    Optional('env'): {text_type: text_type},
+    Optional('env'): {basestring: basestring},
     Optional('gecko-profile'): bool,
     Optional('rebuild'): int,
     Optional('use-artifact-builds'): bool,
     # Keep in sync with JOB_SCHEMA in taskcluster/docker/visual-metrics/run-visual-metrics.py.
     Optional('visual-metrics-jobs'): visual_metrics_jobs_schema,
     Optional(
         "ubuntu-bionic",
         description="Run linux desktop tests on Ubuntu 18.04 (bionic)."
@@ -140,17 +139,17 @@ try_task_config_schema = Schema({
         description="Mapping of worker alias to worker pools to use for those aliases."
     ): {text_type: text_type}
 })
 """
 Schema for try_task_config.json files.
 """
 
 try_task_config_schema_v2 = Schema({
-    Optional('parameters'): {text_type: object},
+    Optional('parameters'): {basestring: object},
 })
 
 
 def full_task_graph_to_runnable_jobs(full_task_json):
     runnable_jobs = {}
     for label, node in full_task_json.iteritems():
         if not ('extra' in node['task'] and 'treeherder' in node['task']['extra']):
             continue
@@ -307,18 +306,18 @@ def get_decision_parameters(graph_config
     # owner must be an email, but sometimes (e.g., for ffxbld) it is not, in which
     # case, fake it
     if '@' not in parameters['owner']:
         parameters['owner'] += '@noreply.mozilla.org'
 
     # use the pushdate as build_date if given, else use current time
     parameters['build_date'] = parameters['pushdate'] or int(time.time())
     # moz_build_date is the build identifier based on build_date
-    parameters['moz_build_date'] = six.ensure_text(
-        time.strftime("%Y%m%d%H%M%S", time.gmtime(parameters['build_date'])))
+    parameters['moz_build_date'] = time.strftime("%Y%m%d%H%M%S",
+                                                 time.gmtime(parameters['build_date']))
 
     project = parameters['project']
     try:
         parameters.update(PER_PROJECT_PARAMETERS[project])
     except KeyError:
         logger.warning("using default project parameters; add {} to "
                        "PER_PROJECT_PARAMETERS in {} to customize behavior "
                        "for this project".format(project, __file__))
--- a/taskcluster/taskgraph/generator.py
+++ b/taskcluster/taskgraph/generator.py
@@ -2,17 +2,16 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
 import logging
 import os
 import copy
 import attr
-from six import text_type
 
 from . import filter_tasks
 from .graph import Graph
 from .taskgraph import TaskGraph
 from .task import Task
 from .optimize import optimize_task_graph
 from .morph import morph
 from .util.python_path import find_object
@@ -31,18 +30,18 @@ class KindNotFound(Exception):
     """
     Raised when trying to load kind from a directory without a kind.yml.
     """
 
 
 @attr.s(frozen=True)
 class Kind(object):
 
-    name = attr.ib(type=text_type)
-    path = attr.ib(type=text_type)
+    name = attr.ib(type=basestring)
+    path = attr.ib(type=basestring)
     config = attr.ib(type=dict)
     graph_config = attr.ib(type=GraphConfig)
 
     def _get_loader(self):
         try:
             loader = self.config['loader']
         except KeyError:
             raise KeyError("{!r} does not define `loader`".format(self.path))
--- a/taskcluster/taskgraph/loader/multi_dep.py
+++ b/taskcluster/taskgraph/loader/multi_dep.py
@@ -1,29 +1,28 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import copy
-from six import text_type
 
 from voluptuous import Required
 
 from ..task import Task
 from ..util.attributes import sorted_unique_list
 from ..util.schema import Schema
 
 schema = Schema({
     Required('primary-dependency', 'primary dependency task'): Task,
     Required(
         'dependent-tasks',
         'dictionary of dependent tasks, keyed by kind',
-    ): {text_type: Task},
+    ): {basestring: Task},
 })
 
 
 # Define a collection of group_by functions
 GROUP_BY_MAP = {}
 
 
 def group_by(name):
@@ -174,17 +173,17 @@ def get_primary_dep(config, dep_tasks):
 
     If ``primary-dependency`` is defined in ``kind.yml`` and is a string,
     then find the first dep with that task kind and return it. If it is
     defined and is a list, the first kind in that list with a matching dep
     is the primary dependency. If it's undefined, return the first dep.
 
     """
     primary_dependencies = config.get('primary-dependency')
-    if isinstance(primary_dependencies, text_type):
+    if isinstance(primary_dependencies, basestring):
         primary_dependencies = [primary_dependencies]
     if not primary_dependencies:
         assert len(dep_tasks) == 1, "Must define a primary-dependency!"
         return dep_tasks.values()[0]
     primary_dep = None
     for primary_kind in primary_dependencies:
         for dep_kind in dep_tasks:
             if dep_kind == primary_kind:
--- a/taskcluster/taskgraph/parameters.py
+++ b/taskcluster/taskgraph/parameters.py
@@ -1,51 +1,47 @@
 # -*- coding: utf-8 -*-
 
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-import io
 import os.path
 import json
 from datetime import datetime
 
 from mozbuild.util import ReadOnlyDict, memoize
 from mozversioncontrol import get_repository_object
 from taskgraph.util.schema import validate_schema
 from voluptuous import (
     ALLOW_EXTRA,
     Any,
     Inclusive,
     PREVENT_EXTRA,
     Required,
     Schema,
 )
 
-import six
-from six import text_type
-
 from . import GECKO
 from .util.attributes import release_level
 
 
 class ParameterMismatch(Exception):
     """Raised when a parameters.yml has extra or missing parameters."""
 
 
 @memoize
 def get_head_ref():
-    return six.ensure_text(get_repository_object(GECKO).head_ref)
+    return get_repository_object(GECKO).head_ref
 
 
 def get_contents(path):
-    with io.open(path, "r") as fh:
+    with open(path, "r") as fh:
         contents = fh.readline().rstrip()
     return contents
 
 
 def get_version(product_dir='browser'):
     version_path = os.path.join(GECKO, product_dir, 'config',
                                 'version_display.txt')
     return get_contents(version_path)
@@ -53,58 +49,58 @@ def get_version(product_dir='browser'):
 
 def get_app_version(product_dir='browser'):
     app_version_path = os.path.join(GECKO, product_dir, 'config',
                                     'version.txt')
     return get_contents(app_version_path)
 
 
 base_schema = {
-    Required('app_version'): text_type,
-    Required('base_repository'): text_type,
+    Required('app_version'): basestring,
+    Required('base_repository'): basestring,
     Required('build_date'): int,
     Required('build_number'): int,
-    Inclusive('comm_base_repository', 'comm'): text_type,
-    Inclusive('comm_head_ref', 'comm'): text_type,
-    Inclusive('comm_head_repository', 'comm'): text_type,
-    Inclusive('comm_head_rev', 'comm'): text_type,
-    Required('do_not_optimize'): [text_type],
-    Required('existing_tasks'): {text_type: text_type},
-    Required('filters'): [text_type],
-    Required('head_ref'): text_type,
-    Required('head_repository'): text_type,
-    Required('head_rev'): text_type,
-    Required('hg_branch'): text_type,
-    Required('level'): text_type,
-    Required('message'): text_type,
-    Required('moz_build_date'): text_type,
-    Required('next_version'): Any(None, text_type),
+    Inclusive('comm_base_repository', 'comm'): basestring,
+    Inclusive('comm_head_ref', 'comm'): basestring,
+    Inclusive('comm_head_repository', 'comm'): basestring,
+    Inclusive('comm_head_rev', 'comm'): basestring,
+    Required('do_not_optimize'): [basestring],
+    Required('existing_tasks'): {basestring: basestring},
+    Required('filters'): [basestring],
+    Required('head_ref'): basestring,
+    Required('head_repository'): basestring,
+    Required('head_rev'): basestring,
+    Required('hg_branch'): basestring,
+    Required('level'): basestring,
+    Required('message'): basestring,
+    Required('moz_build_date'): basestring,
+    Required('next_version'): Any(None, basestring),
     Required('optimize_target_tasks'): bool,
-    Required('owner'): text_type,
-    Required('phabricator_diff'): Any(None, text_type),
-    Required('project'): text_type,
+    Required('owner'): basestring,
+    Required('phabricator_diff'): Any(None, basestring),
+    Required('project'): basestring,
     Required('pushdate'): int,
-    Required('pushlog_id'): text_type,
+    Required('pushlog_id'): basestring,
     Required('release_enable_emefree'): bool,
     Required('release_enable_partners'): bool,
-    Required('release_eta'): Any(None, text_type),
-    Required('release_history'): {text_type: dict},
-    Required('release_partners'): Any(None, [text_type]),
+    Required('release_eta'): Any(None, basestring),
+    Required('release_history'): {basestring: dict},
+    Required('release_partners'): Any(None, [basestring]),
     Required('release_partner_config'): Any(None, dict),
     Required('release_partner_build_number'): int,
-    Required('release_type'): text_type,
-    Required('release_product'): Any(None, text_type),
-    Required('required_signoffs'): [text_type],
+    Required('release_type'): basestring,
+    Required('release_product'): Any(None, basestring),
+    Required('required_signoffs'): [basestring],
     Required('signoff_urls'): dict,
-    Required('target_tasks_method'): text_type,
-    Required('tasks_for'): text_type,
-    Required('try_mode'): Any(None, text_type),
+    Required('target_tasks_method'): basestring,
+    Required('tasks_for'): basestring,
+    Required('try_mode'): Any(None, basestring),
     Required('try_options'): Any(None, dict),
     Required('try_task_config'): dict,
-    Required('version'): text_type,
+    Required('version'): basestring,
 }
 
 
 COMM_PARAMETERS = [
     'comm_base_repository',
     'comm_head_ref',
     'comm_head_repository',
     'comm_head_rev',
@@ -138,17 +134,17 @@ class Parameters(ReadOnlyDict):
             'existing_tasks': {},
             'filters': ['target_tasks_method'],
             'head_ref': get_head_ref(),
             'head_repository': 'https://hg.mozilla.org/mozilla-central',
             'head_rev': get_head_ref(),
             'hg_branch': 'default',
             'level': '3',
             'message': '',
-            'moz_build_date': six.ensure_text(now.strftime("%Y%m%d%H%M%S")),
+            'moz_build_date': now.strftime("%Y%m%d%H%M%S"),
             'next_version': None,
             'optimize_target_tasks': True,
             'owner': 'nobody@mozilla.com',
             'phabricator_diff': None,
             'project': 'mozilla-central',
             'pushdate': seconds_from_epoch,
             'pushlog_id': '0',
             'release_enable_emefree': False,
@@ -199,20 +195,20 @@ class Parameters(ReadOnlyDict):
         """
         return 'try' in self['project'] or self['try_mode'] == 'try_select'
 
     def file_url(self, path, pretty=False):
         """
         Determine the VCS URL for viewing a file in the tree, suitable for
         viewing by a human.
 
-        :param text_type path: The path, relative to the root of the repository.
+        :param basestring path: The path, relative to the root of the repository.
         :param bool pretty: Whether to return a link to a formatted version of the
             file, or the raw file version.
-        :return text_type: The URL displaying the given path.
+        :return basestring: The URL displaying the given path.
         """
         if path.startswith('comm/'):
             path = path[len('comm/'):]
             repo = self['comm_head_repository']
             rev = self['comm_head_rev']
         else:
             repo = self['head_repository']
             rev = self['head_rev']
--- a/taskcluster/taskgraph/test/test_util_schema.py
+++ b/taskcluster/taskgraph/test/test_util_schema.py
@@ -1,26 +1,25 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-from six import text_type
 import unittest
 from mozunit import main
 from taskgraph.util.schema import (
     validate_schema,
     resolve_keyed_by,
     Schema,
 )
 
 schema = Schema({
     'x': int,
-    'y': text_type,
+    'y': basestring,
 })
 
 
 class TestValidateSchema(unittest.TestCase):
 
     def test_valid(self):
         validate_schema(schema, {'x': 10, 'y': 'foo'}, "pfx")
 
--- a/taskcluster/taskgraph/transforms/balrog_submit.py
+++ b/taskcluster/taskgraph/transforms/balrog_submit.py
@@ -2,34 +2,33 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 Transform the per-locale balrog task into an actual task description.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-from six import text_type
 from taskgraph.loader.single_dep import schema
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.util.attributes import copy_attributes_from_dependent_job
 from taskgraph.util.schema import (
     optionally_keyed_by, resolve_keyed_by,
 )
 from taskgraph.util.scriptworker import (
     get_balrog_server_scope, get_worker_type_for_scope
 )
 from taskgraph.util.treeherder import replace_group
 from taskgraph.transforms.task import task_description_schema
 from voluptuous import Optional
 
 
 balrog_description_schema = schema.extend({
     # unique label to describe this balrog task, defaults to balrog-{dep.label}
-    Optional('label'): text_type,
+    Optional('label'): basestring,
 
 
     Optional(
         'update-no-wnp',
         description="Whether the parallel `-No-WNP` blob should be updated as well.",
     ): optionally_keyed_by('release-type', bool),
 
     # treeherder is allowed here to override any defaults we use for beetmover.  See
--- a/taskcluster/taskgraph/transforms/base.py
+++ b/taskcluster/taskgraph/transforms/base.py
@@ -1,16 +1,15 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import attr
-from six import text_type
 
 from ..config import GraphConfig
 from ..parameters import Parameters
 from ..util.schema import Schema, validate_schema
 
 
 @attr.s(frozen=True)
 class TransformConfig(object):
@@ -18,17 +17,17 @@ class TransformConfig(object):
     A container for configuration affecting transforms.  The `config` argument
     to transforms is an instance of this class.
     """
 
     # the name of the current kind
     kind = attr.ib()
 
     # the path to the kind configuration directory
-    path = attr.ib(type=text_type)
+    path = attr.ib(type=basestring)
 
     # the parsed contents of kind.yml
     config = attr.ib(type=dict)
 
     # the parameters for this task-graph generation run
     params = attr.ib(type=Parameters)
 
     # a list of all the tasks associated with the kind dependencies of the
--- a/taskcluster/taskgraph/transforms/beetmover.py
+++ b/taskcluster/taskgraph/transforms/beetmover.py
@@ -4,45 +4,44 @@
 """
 Transform the beetmover task into an actual task description.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 from voluptuous import Optional, Required
 
-from six import text_type
 from taskgraph.loader.single_dep import schema
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.transforms.task import task_description_schema
 from taskgraph.util.attributes import copy_attributes_from_dependent_job
 from taskgraph.util.scriptworker import (generate_beetmover_artifact_map,
                                          generate_beetmover_upstream_artifacts,
                                          get_beetmover_bucket_scope,
                                          get_beetmover_action_scope,
                                          get_worker_type_for_scope)
 from taskgraph.util.treeherder import replace_group
 
 
 transforms = TransformSequence()
 
 beetmover_description_schema = schema.extend({
     # depname is used in taskref's to identify the taskID of the unsigned things
-    Required('depname', default='build'): text_type,
+    Required('depname', default='build'): basestring,
 
     # unique label to describe this beetmover task, defaults to {dep.label}-beetmover
-    Optional('label'): text_type,
+    Optional('label'): basestring,
 
     # treeherder is allowed here to override any defaults we use for beetmover.  See
     # taskcluster/taskgraph/transforms/task.py for the schema details, and the
     # below transforms for defaults of various values.
     Optional('treeherder'): task_description_schema['treeherder'],
 
     # locale is passed only for l10n beetmoving
-    Optional('locale'): text_type,
+    Optional('locale'): basestring,
 
     Required('shipping-phase'): task_description_schema['shipping-phase'],
     Optional('shipping-product'): task_description_schema['shipping-product'],
     Optional('attributes'): task_description_schema['attributes'],
 })
 
 
 transforms.add_validate(beetmover_description_schema)
@@ -126,19 +125,19 @@ def craft_release_properties(config, job
     elif config.graph_config['trust-domain'] == 'comm':
         app_name = 'Thunderbird'
     else:
         # XXX Even DevEdition is called Firefox
         app_name = 'Firefox'
 
     return {
         'app-name': app_name,
-        'app-version': params['app_version'],
+        'app-version': str(params['app_version']),
         'branch': params['project'],
-        'build-id': params['moz_build_date'],
+        'build-id': str(params['moz_build_date']),
         'hash-type': 'sha512',
         'platform': build_platform,
     }
 
 
 @transforms.add
 def make_task_worker(config, jobs):
     for job in jobs:
--- a/taskcluster/taskgraph/transforms/beetmover_checksums.py
+++ b/taskcluster/taskgraph/transforms/beetmover_checksums.py
@@ -3,36 +3,35 @@
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 Transform the checksums signing task into an actual task description.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 
-from six import text_type
 from taskgraph.loader.single_dep import schema
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.transforms.beetmover import craft_release_properties
 from taskgraph.util.attributes import copy_attributes_from_dependent_job
 from taskgraph.util.scriptworker import (generate_beetmover_artifact_map,
                                          generate_beetmover_upstream_artifacts,
                                          get_beetmover_action_scope,
                                          get_beetmover_bucket_scope,
                                          get_worker_type_for_scope)
 from voluptuous import Optional, Required
 from taskgraph.util.treeherder import replace_group
 from taskgraph.transforms.task import task_description_schema
 
 beetmover_checksums_description_schema = schema.extend({
-    Required('depname', default='build'): text_type,
-    Required('attributes'): {text_type: object},
-    Optional('label'): text_type,
+    Required('depname', default='build'): basestring,
+    Required('attributes'): {basestring: object},
+    Optional('label'): basestring,
     Optional('treeherder'): task_description_schema['treeherder'],
-    Optional('locale'): text_type,
+    Optional('locale'): basestring,
     Optional('shipping-phase'): task_description_schema['shipping-phase'],
     Optional('shipping-product'): task_description_schema['shipping-product'],
 })
 
 transforms = TransformSequence()
 transforms.add_validate(beetmover_checksums_description_schema)
 
 
--- a/taskcluster/taskgraph/transforms/beetmover_emefree_checksums.py
+++ b/taskcluster/taskgraph/transforms/beetmover_emefree_checksums.py
@@ -2,27 +2,26 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 Transform release-beetmover-source-checksums into an actual task description.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-from six import text_type
 from taskgraph.loader.single_dep import schema
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.transforms.beetmover import craft_release_properties
 from taskgraph.util.attributes import copy_attributes_from_dependent_job
 from taskgraph.transforms.task import task_description_schema
 from voluptuous import Required, Optional
 
 beetmover_checksums_description_schema = schema.extend({
-    Required('depname', default='build'): text_type,
-    Optional('label'): text_type,
+    Required('depname', default='build'): basestring,
+    Optional('label'): basestring,
     Optional('extra'): object,
     Optional('shipping-phase'): task_description_schema['shipping-phase'],
     Optional('shipping-product'): task_description_schema['shipping-product'],
 })
 
 
 transforms = TransformSequence()
 transforms.add_validate(beetmover_checksums_description_schema)
--- a/taskcluster/taskgraph/transforms/beetmover_geckoview.py
+++ b/taskcluster/taskgraph/transforms/beetmover_geckoview.py
@@ -4,17 +4,16 @@
 """
 Transform the beetmover task into an actual task description.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 from copy import deepcopy
 
-from six import text_type
 from taskgraph.loader.single_dep import schema
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.transforms.beetmover import \
     craft_release_properties as beetmover_craft_release_properties
 from taskgraph.util.attributes import copy_attributes_from_dependent_job
 from taskgraph.util.declarative_artifacts import (
     get_geckoview_template_vars,
     get_geckoview_upstream_artifacts,
@@ -23,24 +22,24 @@ from taskgraph.util.declarative_artifact
 from taskgraph.util.schema import resolve_keyed_by, optionally_keyed_by
 from taskgraph.util.scriptworker import (generate_beetmover_artifact_map,
                                          get_worker_type_for_scope)
 from taskgraph.transforms.task import task_description_schema
 from voluptuous import Required, Optional
 
 
 beetmover_description_schema = schema.extend({
-    Required('depname', default='build'): text_type,
-    Optional('label'): text_type,
+    Required('depname', default='build'): basestring,
+    Optional('label'): basestring,
     Optional('treeherder'): task_description_schema['treeherder'],
 
     Required('run-on-projects'): task_description_schema['run-on-projects'],
     Required('run-on-hg-branches'): task_description_schema['run-on-hg-branches'],
 
-    Optional('bucket-scope'): optionally_keyed_by('release-level', text_type),
+    Optional('bucket-scope'): optionally_keyed_by('release-level', basestring),
     Optional('shipping-phase'): optionally_keyed_by(
         'project', task_description_schema['shipping-phase']
     ),
     Optional('shipping-product'): task_description_schema['shipping-product'],
     Optional('attributes'): task_description_schema['attributes'],
 })
 
 transforms = TransformSequence()
--- a/taskcluster/taskgraph/transforms/beetmover_langpack_checksums.py
+++ b/taskcluster/taskgraph/transforms/beetmover_langpack_checksums.py
@@ -2,36 +2,35 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 Transform release-beetmover-langpack-checksums into an actual task description.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-from six import text_type
 from taskgraph.loader.single_dep import schema
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.transforms.beetmover import craft_release_properties
 from taskgraph.util.attributes import copy_attributes_from_dependent_job
 from taskgraph.util.scriptworker import (generate_beetmover_artifact_map,
                                          generate_beetmover_upstream_artifacts,
                                          get_beetmover_action_scope,
                                          get_beetmover_bucket_scope,
                                          get_worker_type_for_scope)
 from taskgraph.util.treeherder import inherit_treeherder_from_dep
 from taskgraph.transforms.task import task_description_schema
 from voluptuous import Required, Optional
 
 beetmover_checksums_description_schema = schema.extend({
-    Required('depname', default='build'): text_type,
-    Required('attributes'): {text_type: object},
-    Optional('label'): text_type,
+    Required('depname', default='build'): basestring,
+    Required('attributes'): {basestring: object},
+    Optional('label'): basestring,
     Optional('treeherder'): task_description_schema['treeherder'],
-    Optional('locale'): text_type,
+    Optional('locale'): basestring,
     Optional('shipping-phase'): task_description_schema['shipping-phase'],
     Optional('shipping-product'): task_description_schema['shipping-product'],
 })
 
 transforms = TransformSequence()
 transforms.add_validate(beetmover_checksums_description_schema)
 
 
--- a/taskcluster/taskgraph/transforms/beetmover_push_to_release.py
+++ b/taskcluster/taskgraph/transforms/beetmover_push_to_release.py
@@ -2,41 +2,40 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 Transform the beetmover-push-to-release task into a task description.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-from six import text_type
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.util.schema import (
     Schema,
     taskref_or_string,
 )
 from taskgraph.util.scriptworker import (
     get_beetmover_bucket_scope, add_scope_prefix,
     get_worker_type_for_scope,
 )
 from taskgraph.transforms.task import task_description_schema
 from voluptuous import Required, Optional
 
 
 beetmover_push_to_release_description_schema = Schema({
-    Required('name'): text_type,
-    Required('product'): text_type,
-    Required('treeherder-platform'): text_type,
-    Optional('attributes'): {text_type: object},
+    Required('name'): basestring,
+    Required('product'): basestring,
+    Required('treeherder-platform'): basestring,
+    Optional('attributes'): {basestring: object},
     Optional('job-from'): task_description_schema['job-from'],
-    Optional('run'): {text_type: object},
+    Optional('run'): {basestring: object},
     Optional('run-on-projects'): task_description_schema['run-on-projects'],
-    Optional('dependencies'): {text_type: taskref_or_string},
-    Optional('index'): {text_type: text_type},
-    Optional('routes'): [text_type],
+    Optional('dependencies'): {basestring: taskref_or_string},
+    Optional('index'): {basestring: basestring},
+    Optional('routes'): [basestring],
     Required('shipping-phase'): task_description_schema['shipping-phase'],
     Required('shipping-product'): task_description_schema['shipping-product'],
     Optional('extra'): task_description_schema['extra'],
 })
 
 
 transforms = TransformSequence()
 transforms.add_validate(beetmover_push_to_release_description_schema)
--- a/taskcluster/taskgraph/transforms/beetmover_repackage.py
+++ b/taskcluster/taskgraph/transforms/beetmover_repackage.py
@@ -2,17 +2,16 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 Transform the beetmover task into an actual task description.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-from six import text_type
 from taskgraph.loader.multi_dep import schema
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.transforms.beetmover import craft_release_properties
 from taskgraph.util.attributes import copy_attributes_from_dependent_job
 from taskgraph.util.partials import (get_balrog_platform_name,
                                      get_partials_artifacts_from_params,
                                      get_partials_info_from_params)
 from taskgraph.util.scriptworker import (generate_beetmover_artifact_map,
@@ -28,30 +27,30 @@ from voluptuous import Required, Optiona
 
 import logging
 
 logger = logging.getLogger(__name__)
 
 
 beetmover_description_schema = schema.extend({
     # depname is used in taskref's to identify the taskID of the unsigned things
-    Required('depname', default='build'): text_type,
+    Required('depname', default='build'): basestring,
 
     # unique label to describe this beetmover task, defaults to {dep.label}-beetmover
-    Required('label'): text_type,
+    Required('label'): basestring,
 
     # treeherder is allowed here to override any defaults we use for beetmover.  See
     # taskcluster/taskgraph/transforms/task.py for the schema details, and the
     # below transforms for defaults of various values.
     Optional('treeherder'): task_description_schema['treeherder'],
 
     Optional('attributes'): task_description_schema['attributes'],
 
     # locale is passed only for l10n beetmoving
-    Optional('locale'): text_type,
+    Optional('locale'): basestring,
     Required('shipping-phase'): task_description_schema['shipping-phase'],
     # Optional until we fix asan (run_on_projects?)
     Optional('shipping-product'): task_description_schema['shipping-product'],
 })
 
 transforms = TransformSequence()
 transforms.add_validate(beetmover_description_schema)
 
--- a/taskcluster/taskgraph/transforms/beetmover_repackage_partner.py
+++ b/taskcluster/taskgraph/transforms/beetmover_repackage_partner.py
@@ -2,17 +2,16 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 Transform the beetmover task into an actual task description.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-from six import text_type
 from taskgraph.loader.single_dep import schema
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.transforms.beetmover import craft_release_properties
 from taskgraph.util.attributes import copy_attributes_from_dependent_job
 from taskgraph.util.partners import (
     check_if_partners_enabled,
     get_ftp_platform,
     get_partner_config_by_kind,
@@ -33,24 +32,24 @@ from voluptuous import Any, Required, Op
 from copy import deepcopy
 import logging
 
 logger = logging.getLogger(__name__)
 
 
 beetmover_description_schema = schema.extend({
     # depname is used in taskref's to identify the taskID of the unsigned things
-    Required('depname', default='build'): text_type,
+    Required('depname', default='build'): basestring,
 
     # unique label to describe this beetmover task, defaults to {dep.label}-beetmover
-    Optional('label'): text_type,
+    Optional('label'): basestring,
 
-    Required('partner-bucket-scope'): optionally_keyed_by('release-level', text_type),
-    Required('partner-public-path'): Any(None, text_type),
-    Required('partner-private-path'): Any(None, text_type),
+    Required('partner-bucket-scope'): optionally_keyed_by('release-level', basestring),
+    Required('partner-public-path'): Any(None, basestring),
+    Required('partner-private-path'): Any(None, basestring),
 
     Optional('extra'): object,
     Required('shipping-phase'): task_description_schema['shipping-phase'],
     Optional('shipping-product'): task_description_schema['shipping-product'],
     Optional('priority'): task_description_schema['priority'],
 })
 
 transforms = TransformSequence()
--- a/taskcluster/taskgraph/transforms/beetmover_source_checksums.py
+++ b/taskcluster/taskgraph/transforms/beetmover_source_checksums.py
@@ -2,34 +2,33 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 Transform release-beetmover-source-checksums into an actual task description.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-from six import text_type
 from taskgraph.loader.single_dep import schema
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.transforms.beetmover import craft_release_properties
 from taskgraph.util.attributes import copy_attributes_from_dependent_job
 from taskgraph.util.scriptworker import (generate_beetmover_artifact_map,
                                          generate_beetmover_upstream_artifacts,
                                          get_beetmover_bucket_scope,
                                          get_beetmover_action_scope,
                                          get_worker_type_for_scope)
 from taskgraph.transforms.task import task_description_schema
 from voluptuous import Required, Optional
 
 beetmover_checksums_description_schema = schema.extend({
-    Required('depname', default='build'): text_type,
-    Optional('label'): text_type,
+    Required('depname', default='build'): basestring,
+    Optional('label'): basestring,
     Optional('treeherder'): task_description_schema['treeherder'],
-    Optional('locale'): text_type,
+    Optional('locale'): basestring,
     Optional('shipping-phase'): task_description_schema['shipping-phase'],
     Optional('shipping-product'): task_description_schema['shipping-product'],
     Optional('attributes'): task_description_schema['attributes'],
 })
 
 transforms = TransformSequence()
 transforms.add_validate(beetmover_checksums_description_schema)
 
--- a/taskcluster/taskgraph/transforms/bouncer_check.py
+++ b/taskcluster/taskgraph/transforms/bouncer_check.py
@@ -1,17 +1,16 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
 import json
 from pipes import quote as shell_quote
 
-import six
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.util.scriptworker import get_release_config
 from taskgraph.util.schema import (
     resolve_keyed_by,
 )
 
 import logging
 logger = logging.getLogger(__name__)
@@ -83,18 +82,17 @@ def handle_keyed_by(config, jobs):
             del job["run"]["products-url"]
         elif config.kind == "release-bouncer-check":
             job["run"]["mach"].append("--version={}".format(version))
 
         del job["run"]["config"]
 
         if 'extra-config' in job['run']:
             env = job['worker'].setdefault('env', {})
-            env['EXTRA_MOZHARNESS_CONFIG'] = six.ensure_text(
-                json.dumps(job['run']['extra-config']))
+            env['EXTRA_MOZHARNESS_CONFIG'] = json.dumps(job['run']['extra-config'])
             del job["run"]["extra-config"]
 
         yield job
 
 
 @transforms.add
 def command_to_string(config, jobs):
     """Convert command to string to make it work properly with run-task"""
--- a/taskcluster/taskgraph/transforms/diffoscope.py
+++ b/taskcluster/taskgraph/transforms/diffoscope.py
@@ -3,65 +3,64 @@
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 This transform construct tasks to perform diffs between builds, as
 defined in kind.yml
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-from six import text_type
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.transforms.task import task_description_schema
 from taskgraph.util.schema import (
     Schema,
 )
 from taskgraph.util.taskcluster import get_artifact_path
 from voluptuous import (
     Any,
     Optional,
     Required,
 )
 
 index_or_string = Any(
-    text_type,
-    {Required('index-search'): text_type},
+    basestring,
+    {Required('index-search'): basestring},
 )
 
 diff_description_schema = Schema({
     # Name of the diff task.
-    Required('name'): text_type,
+    Required('name'): basestring,
 
     # Treeherder symbol.
-    Required('symbol'): text_type,
+    Required('symbol'): basestring,
 
     # relative path (from config.path) to the file the task was defined in.
-    Optional('job-from'): text_type,
+    Optional('job-from'): basestring,
 
     # Original and new builds to compare.
     Required('original'): index_or_string,
     Required('new'): index_or_string,
 
     # Arguments to pass to diffoscope, used for job-defaults in
     # taskcluster/ci/diffoscope/kind.yml
-    Optional('args'): text_type,
+    Optional('args'): basestring,
 
     # Extra arguments to pass to diffoscope, that can be set per job.
-    Optional('extra-args'): text_type,
+    Optional('extra-args'): basestring,
 
     # Fail the task when differences are detected.
     Optional('fail-on-diff'): bool,
 
     # Whether to unpack first. Diffoscope can normally work without unpacking,
     # but when one needs to --exclude some contents, that doesn't work out well
     # if said content is packed (e.g. in omni.ja).
     Optional('unpack'): bool,
 
     # Commands to run before performing the diff.
-    Optional('pre-diff-commands'): [text_type],
+    Optional('pre-diff-commands'): [basestring],
 
     # Only run the task on a set of projects/branches.
     Optional('run-on-projects'): task_description_schema['run-on-projects'],
 })
 
 transforms = TransformSequence()
 transforms.add_validate(diff_description_schema)
 
@@ -73,17 +72,17 @@ def fill_template(config, tasks):
     for task in tasks:
         name = task['name']
 
         deps = {}
         urls = {}
         previous_artifact = None
         for k in ('original', 'new'):
             value = task[k]
-            if isinstance(value, text_type):
+            if isinstance(value, basestring):
                 deps[k] = value
                 dep_name = k
                 os_hint = value
             else:
                 index = value['index-search']
                 if index not in dummy_tasks:
                     dummy_tasks[index] = {
                         'label': 'index-search-' + index,
--- a/taskcluster/taskgraph/transforms/docker_image.py
+++ b/taskcluster/taskgraph/transforms/docker_image.py
@@ -3,17 +3,16 @@
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import os
 import re
 
 from collections import deque
-from six import text_type
 import taskgraph
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.transforms.task import _run_task_suffix
 from .. import GECKO
 from taskgraph.util.docker import (
     generate_context_hash,
 )
 from taskgraph.util.taskcluster import get_root_url
@@ -27,37 +26,37 @@ from voluptuous import (
 from .task import task_description_schema
 
 DIGEST_RE = re.compile('^[0-9a-f]{64}$')
 
 transforms = TransformSequence()
 
 docker_image_schema = Schema({
     # Name of the docker image.
-    Required('name'): text_type,
+    Required('name'): basestring,
 
     # Name of the parent docker image.
-    Optional('parent'): text_type,
+    Optional('parent'): basestring,
 
     # Treeherder symbol.
-    Required('symbol'): text_type,
+    Required('symbol'): basestring,
 
     # relative path (from config.path) to the file the docker image was defined
     # in.
-    Optional('job-from'): text_type,
+    Optional('job-from'): basestring,
 
     # Arguments to use for the Dockerfile.
-    Optional('args'): {text_type: text_type},
+    Optional('args'): {basestring: basestring},
 
     # Name of the docker image definition under taskcluster/docker, when
     # different from the docker image name.
-    Optional('definition'): text_type,
+    Optional('definition'): basestring,
 
     # List of package tasks this docker image depends on.
-    Optional('packages'): [text_type],
+    Optional('packages'): [basestring],
 
     Optional(
         "index",
         description="information for indexing this build so its artifacts can be discovered",
     ): task_description_schema['index'],
 
     Optional(
         "cache",
--- a/taskcluster/taskgraph/transforms/fetch.py
+++ b/taskcluster/taskgraph/transforms/fetch.py
@@ -4,20 +4,18 @@
 
 # Support for running tasks that download remote content and re-export
 # it as task artifacts.
 
 from __future__ import absolute_import, unicode_literals
 
 from mozbuild.shellutil import quote as shell_quote
 
-import io
 import os
 import re
-from six import text_type
 
 from voluptuous import (
     Any,
     Optional,
     Required,
 )
 
 import taskgraph
@@ -35,87 +33,87 @@ from ..util.treeherder import (
     join_symbol,
 )
 
 
 CACHE_TYPE = 'content.v1'
 
 FETCH_SCHEMA = Schema({
     # Name of the task.
-    Required('name'): text_type,
+    Required('name'): basestring,
 
     # Relative path (from config.path) to the file the task was defined
     # in.
-    Optional('job-from'): text_type,
+    Optional('job-from'): basestring,
 
     # Description of the task.
-    Required('description'): text_type,
+    Required('description'): basestring,
 
     Required('fetch'): Any(
         {
             'type': 'static-url',
 
             # The URL to download.
-            Required('url'): text_type,
+            Required('url'): basestring,
 
             # The SHA-256 of the downloaded content.
-            Required('sha256'): text_type,
+            Required('sha256'): basestring,
 
             # Size of the downloaded entity, in bytes.
             Required('size'): int,
 
             # GPG signature verification.
             Optional('gpg-signature'): {
                 # URL where GPG signature document can be obtained. Can contain the
                 # value ``{url}``, which will be substituted with the value from
                 # ``url``.
-                Required('sig-url'): text_type,
+                Required('sig-url'): basestring,
                 # Path to file containing GPG public key(s) used to validate
                 # download.
-                Required('key-path'): text_type,
+                Required('key-path'): basestring,
             },
 
             # The name to give to the generated artifact. Defaults to the file
             # portion of the URL. Using a different extension converts the
             # archive to the given type. Only conversion to .tar.zst is
             # supported.
-            Optional('artifact-name'): text_type,
+            Optional('artifact-name'): basestring,
 
             # Strip the given number of path components at the beginning of
             # each file entry in the archive.
             # Requires an artifact-name ending with .tar.zst.
             Optional('strip-components'): int,
 
             # Add the given prefix to each file entry in the archive.
             # Requires an artifact-name ending with .tar.zst.
-            Optional('add-prefix'): text_type,
+            Optional('add-prefix'): basestring,
 
             # IMPORTANT: when adding anything that changes the behavior of the task,
             # it is important to update the digest data used to compute cache hits.
         },
         {
             'type': 'chromium-fetch',
 
-            Required('script'): text_type,
+            Required('script'): basestring,
 
             # Platform type for chromium build
-            Required('platform'): text_type,
+            Required('platform'): basestring,
 
             # Chromium revision to obtain
-            Optional('revision'): text_type,
+            Optional('revision'): basestring,
 
             # The name to give to the generated artifact.
-            Required('artifact-name'): text_type
+            Required('artifact-name'): basestring
         },
         {
             'type': 'git',
-            Required('repo'): text_type,
-            Required('revision'): text_type,
-            Optional('artifact-name'): text_type,
-            Optional('path-prefix'): text_type,
+            Required('repo'): basestring,
+            Required('revision'): basestring,
+            Optional('artifact-name'): basestring,
+            Optional('path-prefix'): basestring,
         }
     ),
 })
 
 transforms = TransformSequence()
 transforms.add_validate(FETCH_SCHEMA)
 
 
@@ -202,17 +200,17 @@ def create_fetch_url_task(config, job):
 
     env = {}
 
     if 'gpg-signature' in fetch:
         sig_url = fetch['gpg-signature']['sig-url'].format(url=fetch['url'])
         key_path = os.path.join(taskgraph.GECKO, fetch['gpg-signature'][
             'key-path'])
 
-        with io.open(key_path, 'r') as fh:
+        with open(key_path, 'rb') as fh:
             gpg_key = fh.read()
 
         env['FETCH_GPG_KEY'] = gpg_key
         command.extend([
             '--gpg-sig-url', sig_url,
             '--gpg-key-env', 'FETCH_GPG_KEY',
         ])
 
--- a/taskcluster/taskgraph/transforms/geckodriver_signing.py
+++ b/taskcluster/taskgraph/transforms/geckodriver_signing.py
@@ -2,29 +2,28 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 Transform the repackage signing task into an actual task description.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-from six import text_type
 from taskgraph.loader.single_dep import schema
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.util.attributes import copy_attributes_from_dependent_job
 from taskgraph.util.scriptworker import (
     get_signing_cert_scope_per_platform,
 )
 from taskgraph.transforms.task import task_description_schema
 from voluptuous import Required, Optional
 
 repackage_signing_description_schema = schema.extend({
-    Required('depname', default='geckodriver-repackage'): text_type,
-    Optional('label'): text_type,
+    Required('depname', default='geckodriver-repackage'): basestring,
+    Optional('label'): basestring,
     Optional('treeherder'): task_description_schema['treeherder'],
     Optional('shipping-phase'): task_description_schema['shipping-phase'],
 })
 
 transforms = TransformSequence()
 transforms.add_validate(repackage_signing_description_schema)
 
 
--- a/taskcluster/taskgraph/transforms/google_play_strings.py
+++ b/taskcluster/taskgraph/transforms/google_play_strings.py
@@ -2,25 +2,24 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 Transform the push-apk kind into an actual task description.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-from six import text_type
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.transforms.task import task_description_schema
 from taskgraph.util.schema import resolve_keyed_by, Schema
 
 from voluptuous import Required
 
 google_play_description_schema = Schema({
-    Required('name'): text_type,
+    Required('name'): basestring,
     Required('description'): task_description_schema['description'],
     Required('job-from'): task_description_schema['job-from'],
     Required('attributes'): task_description_schema['attributes'],
     Required('treeherder'): task_description_schema['treeherder'],
     Required('run-on-projects'): task_description_schema['run-on-projects'],
     Required('shipping-phase'): task_description_schema['shipping-phase'],
     Required('shipping-product'): task_description_schema['shipping-product'],
     Required('worker-type'): task_description_schema['worker-type'],
--- a/taskcluster/taskgraph/transforms/job/__init__.py
+++ b/taskcluster/taskgraph/transforms/job/__init__.py
@@ -9,18 +9,16 @@ the job at a higher level, using a "run"
 run-using handlers in `taskcluster/taskgraph/transforms/job`.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import copy
 import logging
 import json
-import six
-from six import text_type
 
 import mozpack.path as mozpath
 
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.transforms.cached_tasks import order_tasks
 from taskgraph.util.schema import (
     validate_schema,
     Schema,
@@ -38,18 +36,18 @@ from voluptuous import (
 
 logger = logging.getLogger(__name__)
 
 # Schema for a build description
 job_description_schema = Schema({
     # The name of the job and the job's label.  At least one must be specified,
     # and the label will be generated from the name if necessary, by prepending
     # the kind.
-    Optional('name'): text_type,
-    Optional('label'): text_type,
+    Optional('name'): basestring,
+    Optional('label'): basestring,
 
     # the following fields are passed directly through to the task description,
     # possibly modified by the run implementation.  See
     # taskcluster/taskgraph/transforms/task.py for the schema details.
     Required('description'): task_description_schema['description'],
     Optional('attributes'): task_description_schema['attributes'],
     Optional('job-from'): task_description_schema['job-from'],
     Optional('dependencies'): task_description_schema['dependencies'],
@@ -75,35 +73,35 @@ job_description_schema = Schema({
     # The "when" section contains descriptions of the circumstances under which
     # this task should be included in the task graph.  This will be converted
     # into an optimization, so it cannot be specified in a job description that
     # also gives 'optimization'.
     Exclusive('when', 'optimization'): {
         # This task only needs to be run if a file matching one of the given
         # patterns has changed in the push.  The patterns use the mozpack
         # match function (python/mozbuild/mozpack/path.py).
-        Optional('files-changed'): [text_type],
+        Optional('files-changed'): [basestring],
     },
 
     # A list of artifacts to install from 'fetch' tasks.
     Optional('fetches'): {
-        text_type: [text_type, {
-            Required('artifact'): text_type,
-            Optional('dest'): text_type,
+        basestring: [basestring, {
+            Required('artifact'): basestring,
+            Optional('dest'): basestring,
             Optional('extract'): bool,
         }],
     },
 
     # A description of how to run this job.
     'run': {
         # The key to a job implementation in a peer module to this one
-        'using': text_type,
+        'using': basestring,
 
         # Base work directory used to set up the task.
-        Optional('workdir'): text_type,
+        Optional('workdir'): basestring,
 
         # Any remaining content is verified against that job implementation's
         # own schema.
         Extra: object,
     },
 
     Required('worker-type'): task_description_schema['worker-type'],
 
@@ -256,17 +254,17 @@ def use_fetches(config, jobs):
                                 if len(dep_tasks) == 0
                                 else "multiple tasks",
                             )
                         )
 
                     prefix = get_artifact_prefix(dep_tasks[0])
 
                 for artifact in artifacts:
-                    if isinstance(artifact, text_type):
+                    if isinstance(artifact, basestring):
                         path = artifact
                         dest = None
                         extract = True
                     else:
                         path = artifact['artifact']
                         dest = artifact.get('dest')
                         extract = artifact.get('extract', True)
 
@@ -290,20 +288,17 @@ def use_fetches(config, jobs):
             # 'scopes: [queue:get-artifact:path/to/*]' for 'path/to/artifact.tar.xz'.
             worker["taskcluster-proxy"] = True
             for prefix in sorted(job_artifact_prefixes):
                 scope = "queue:get-artifact:{}/*".format(prefix)
                 if scope not in job.setdefault("scopes", []):
                     job["scopes"].append(scope)
 
         env = worker.setdefault('env', {})
-        env['MOZ_FETCHES'] = {
-            'task-reference': six.ensure_text(json.dumps(job_fetches,
-                                                         sort_keys=True))
-        }
+        env['MOZ_FETCHES'] = {'task-reference': json.dumps(job_fetches, sort_keys=True)}
         # The path is normalized to an absolute path in run-task
         env.setdefault('MOZ_FETCHES_DIR', 'fetches')
 
         yield job
 
 
 @transforms.add
 def make_task_description(config, jobs):
--- a/taskcluster/taskgraph/transforms/job/debian_package.py
+++ b/taskcluster/taskgraph/transforms/job/debian_package.py
@@ -4,76 +4,75 @@
 """
 Support for running spidermonkey jobs via dedicated scripts
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import os
 import re
-from six import text_type
 
 from taskgraph.util.schema import Schema
 from voluptuous import Any, Optional, Required
 
 from taskgraph.transforms.job import run_job_using
 from taskgraph.transforms.job.common import add_artifacts
 
 from taskgraph.util.hash import hash_path
 from taskgraph.util.taskcluster import get_root_url
 from taskgraph import GECKO
 import taskgraph
 
 DSC_PACKAGE_RE = re.compile('.*(?=_)')
 SOURCE_PACKAGE_RE = re.compile('.*(?=[-_]\d)')
 
 source_definition = {
-    Required('url'): text_type,
-    Required('sha256'): text_type,
+    Required('url'): basestring,
+    Required('sha256'): basestring,
 }
 
 run_schema = Schema({
     Required('using'): 'debian-package',
     # Debian distribution
-    Required('dist'): text_type,
+    Required('dist'): basestring,
 
     # Date of the snapshot (from snapshot.debian.org) to use, in the format
     # YYYYMMDDTHHMMSSZ. The same date is used for the base docker-image name
     # (only the YYYYMMDD part).
-    Required('snapshot'): text_type,
+    Required('snapshot'): basestring,
 
     # URL/SHA256 of a source file to build, which can either be a source
     # control (.dsc), or a tarball.
     Required(Any('dsc', 'tarball')): source_definition,
 
     # Package name. Normally derived from the source control or tarball file
     # name. Use in case the name doesn't match DSC_PACKAGE_RE or
     # SOURCE_PACKAGE_RE.
-    Optional('name'): text_type,
+    Optional('name'): basestring,
 
     # Patch to apply to the extracted source.
-    Optional('patch'): text_type,
+    Optional('patch'): basestring,
 
     # Command to run before dpkg-buildpackage.
-    Optional('pre-build-command'): text_type,
+    Optional('pre-build-command'): basestring,
 
     # Architecture to build the package for.
-    Optional('arch'): text_type,
+    Optional('arch'): basestring,
 
     # List of package tasks to get build dependencies from.
-    Optional('packages'): [text_type],
+    Optional('packages'): [basestring],
 
     # What resolver to use to install build dependencies. The default
     # (apt-get) is good in most cases, but in subtle cases involving
     # a *-backports archive, its solver might not be able to find a
     # solution that satisfies the build dependencies.
     Optional('resolver'): Any('apt-get', 'aptitude'),
 
     # Base work directory used to set up the task.
-    Required('workdir'): text_type,
+    Required('workdir'): basestring,
 })
 
 
 @run_job_using("docker-worker", "debian-package", schema=run_schema)
 def docker_worker_debian_package(config, job, taskdesc):
     run = job['run']
 
     name = taskdesc['label'].replace('{}-'.format(config.kind), '', 1)
--- a/taskcluster/taskgraph/transforms/job/hazard.py
+++ b/taskcluster/taskgraph/transforms/job/hazard.py
@@ -2,17 +2,16 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 Support for running hazard jobs via dedicated scripts
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-from six import text_type
 from taskgraph.util.schema import Schema
 from voluptuous import Required, Optional, Any
 
 from taskgraph.transforms.job import (
     run_job_using,
     configure_taskdesc_for_run,
 )
 from taskgraph.transforms.job.common import (
@@ -21,30 +20,30 @@ from taskgraph.transforms.job.common imp
     docker_worker_add_artifacts,
     add_tooltool,
 )
 
 haz_run_schema = Schema({
     Required('using'): 'hazard',
 
     # The command to run within the task image (passed through to the worker)
-    Required('command'): text_type,
+    Required('command'): basestring,
 
     # The mozconfig to use; default in the script is used if omitted
-    Optional('mozconfig'): text_type,
+    Optional('mozconfig'): basestring,
 
     # The set of secret names to which the task has access; these are prefixed
     # with `project/releng/gecko/{treeherder.kind}/level-{level}/`.   Setting
     # this will enable any worker features required and set the task's scopes
     # appropriately.  `true` here means ['*'], all secrets.  Not supported on
     # Windows
-    Required('secrets', default=False): Any(bool, [text_type]),
+    Required('secrets', default=False): Any(bool, [basestring]),
 
     # Base work directory used to set up the task.
-    Required('workdir'): text_type,
+    Required('workdir'): basestring,
 })
 
 
 @run_job_using("docker-worker", "hazard", schema=haz_run_schema)
 def docker_worker_hazard(config, job, taskdesc):
     run = job['run']
 
     worker = taskdesc['worker'] = job['worker']
--- a/taskcluster/taskgraph/transforms/job/mach.py
+++ b/taskcluster/taskgraph/transforms/job/mach.py
@@ -2,40 +2,39 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 Support for running mach tasks (via run-task)
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-from six import text_type
 from taskgraph.transforms.job import run_job_using, configure_taskdesc_for_run
 from taskgraph.util.schema import (
     Schema,
     taskref_or_string,
 )
 from voluptuous import Required, Optional, Any
 
 mach_schema = Schema({
     Required('using'): 'mach',
 
     # The mach command (omitting `./mach`) to run
     Required('mach'): taskref_or_string,
 
     # The sparse checkout profile to use. Value is the filename relative to the
     # directory where sparse profiles are defined (build/sparse-profiles/).
-    Optional('sparse-profile'): Any(text_type, None),
+    Optional('sparse-profile'): Any(basestring, None),
 
     # if true, perform a checkout of a comm-central based branch inside the
     # gecko checkout
     Required('comm-checkout'): bool,
 
     # Base work directory used to set up the task.
-    Required('workdir'): text_type,
+    Required('workdir'): basestring,
 })
 
 
 defaults = {
     'comm-checkout': False,
 }
 
 
--- a/taskcluster/taskgraph/transforms/job/mozharness.py
+++ b/taskcluster/taskgraph/transforms/job/mozharness.py
@@ -6,18 +6,16 @@
 Support for running jobs via mozharness.  Ideally, most stuff gets run this
 way, and certainly anything using mozharness should use this approach.
 
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 import json
 
-import six
-from six import text_type
 from textwrap import dedent
 
 from taskgraph.util.schema import Schema
 from voluptuous import Required, Optional, Any
 from voluptuous.validators import Match
 
 from taskgraph.transforms.job import (
     configure_taskdesc_for_run,
@@ -34,77 +32,77 @@ from taskgraph.transforms.task import (
     get_branch_rev,
 )
 
 mozharness_run_schema = Schema({
     Required('using'): 'mozharness',
 
     # the mozharness script used to run this task, relative to the testing/
     # directory and using forward slashes even on Windows
-    Required('script'): text_type,
+    Required('script'): basestring,
 
     # Additional paths to look for mozharness configs in. These should be
     # relative to the base of the source checkout
-    Optional('config-paths'): [text_type],
+    Optional('config-paths'): [basestring],
 
     # the config files required for the task, relative to
     # testing/mozharness/configs or one of the paths specified in
     # `config-paths` and using forward slashes even on Windows
-    Required('config'): [text_type],
+    Required('config'): [basestring],
 
     # any additional actions to pass to the mozharness command
     Optional('actions'): [Match(
         '^[a-z0-9-]+$',
         "actions must be `-` seperated alphanumeric strings"
     )],
 
     # any additional options (without leading --) to be passed to mozharness
     Optional('options'): [Match(
         '^[a-z0-9-]+(=[^ ]+)?$',
         "options must be `-` seperated alphanumeric strings (with optional argument)"
     )],
 
     # --custom-build-variant-cfg value
-    Optional('custom-build-variant-cfg'): text_type,
+    Optional('custom-build-variant-cfg'): basestring,
 
     # Extra configuration options to pass to mozharness.
     Optional('extra-config'): dict,
 
     # Extra metadata to use toward the workspace caching.
     # Only supported on docker-worker
-    Optional('extra-workspace-cache-key'): text_type,
+    Optional('extra-workspace-cache-key'): basestring,
 
     # If not false, tooltool downloads will be enabled via relengAPIProxy
     # for either just public files, or all files.  Not supported on Windows
     Required('tooltool-downloads'): Any(
         False,
         'public',
         'internal',
     ),
 
     # The set of secret names to which the task has access; these are prefixed
     # with `project/releng/gecko/{treeherder.kind}/level-{level}/`.  Setting
     # this will enable any worker features required and set the task's scopes
     # appropriately.  `true` here means ['*'], all secrets.  Not supported on
     # Windows
-    Required('secrets'): Any(bool, [text_type]),
+    Required('secrets'): Any(bool, [basestring]),
 
     # If true, taskcluster proxy will be enabled; note that it may also be enabled
     # automatically e.g., for secrets support.  Not supported on Windows.
     Required('taskcluster-proxy'): bool,
 
     # If true, the build scripts will start Xvfb.  Not supported on Windows.
     Required('need-xvfb'): bool,
 
     # If false, indicate that builds should skip producing artifacts.  Not
     # supported on Windows.
     Required('keep-artifacts'): bool,
 
     # If specified, use the in-tree job script specified.
-    Optional('job-script'): text_type,
+    Optional('job-script'): basestring,
 
     Required('requires-signed-builds'): bool,
 
     # Whether or not to use caches.
     Optional('use-caches'): bool,
 
     # If false, don't set MOZ_SIMPLE_PACKAGE_NAME
     # Only disableable on windows
@@ -114,17 +112,17 @@ mozharness_run_schema = Schema({
     # Only disableable on windows
     Required('use-magic-mh-args'): bool,
 
     # if true, perform a checkout of a comm-central based branch inside the
     # gecko checkout
     Required('comm-checkout'): bool,
 
     # Base work directory used to set up the task.
-    Required('workdir'): text_type,
+    Required('workdir'): basestring,
 })
 
 
 mozharness_defaults = {
     'tooltool-downloads': False,
     'secrets': False,
     'taskcluster-proxy': False,
     'need-xvfb': False,
@@ -187,18 +185,17 @@ def mozharness_on_docker_worker_setup(co
 
     if 'config-paths' in run:
         env['MOZHARNESS_CONFIG_PATHS'] = ' '.join(run.pop('config-paths'))
 
     if 'custom-build-variant-cfg' in run:
         env['MH_CUSTOM_BUILD_VARIANT_CFG'] = run.pop('custom-build-variant-cfg')
 
     if 'extra-config' in run:
-        env['EXTRA_MOZHARNESS_CONFIG'] = six.ensure_text(
-            json.dumps(run.pop('extra-config')))
+        env['EXTRA_MOZHARNESS_CONFIG'] = json.dumps(run.pop('extra-config'))
 
     if 'job-script' in run:
         env['JOB_SCRIPT'] = run['job-script']
 
     if config.params.is_try():
         env['TRY_COMMIT_MSG'] = config.params['message']
 
     # if we're not keeping artifacts, set some env variables to empty values
@@ -274,18 +271,17 @@ def mozharness_on_generic_worker(config,
         'MH_BRANCH': config.params['project'],
         'MOZ_SOURCE_CHANGESET': get_branch_rev(config),
         'MOZ_SOURCE_REPO': get_branch_repo(config),
     })
     if run.pop('use-simple-package'):
         env.update({'MOZ_SIMPLE_PACKAGE_NAME': 'target'})
 
     if 'extra-config' in run:
-        env['EXTRA_MOZHARNESS_CONFIG'] = six.ensure_text(
-            json.dumps(run.pop('extra-config')))
+        env['EXTRA_MOZHARNESS_CONFIG'] = json.dumps(run.pop('extra-config'))
 
     # The windows generic worker uses batch files to pass environment variables
     # to commands.  Setting a variable to empty in a batch file unsets, so if
     # there is no `TRY_COMMIT_MESSAGE`, pass a space instead, so that
     # mozharness doesn't try to find the commit message on its own.
     if config.params.is_try():
         env['TRY_COMMIT_MSG'] = config.params['message'] or 'no commit message'
 
--- a/taskcluster/taskgraph/transforms/job/mozharness_test.py
+++ b/taskcluster/taskgraph/transforms/job/mozharness_test.py
@@ -2,18 +2,16 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import json
 import os
 
-import six
-from six import text_type
 from voluptuous import Required
 
 from taskgraph.util.taskcluster import get_artifact_url
 from taskgraph.transforms.job import (
     configure_taskdesc_for_run,
     run_job_using,
 )
 from taskgraph.util.schema import Schema
@@ -45,17 +43,17 @@ def get_variant(test_platform):
             return v
     return ''
 
 
 mozharness_test_run_schema = Schema({
     Required('using'): 'mozharness-test',
     Required('test'): test_description_schema,
     # Base work directory used to set up the task.
-    Required('workdir'): text_type,
+    Required('workdir'): basestring,
 })
 
 
 def test_packages_url(taskdesc):
     """Account for different platforms that name their test packages differently"""
     artifact_url = get_artifact_url('<build>', get_artifact_path(taskdesc,
                                     'target.test_packages.json'))
     # for android nightly we need to add 'en-US' to the artifact url
@@ -108,17 +106,17 @@ def mozharness_test_on_docker(config, jo
     env = worker.setdefault('env', {})
     env.update({
         'MOZHARNESS_CONFIG': ' '.join(mozharness['config']),
         'MOZHARNESS_SCRIPT': mozharness['script'],
         'MOZILLA_BUILD_URL': {'task-reference': installer_url},
         'NEED_PULSEAUDIO': 'true',
         'NEED_WINDOW_MANAGER': 'true',
         'NEED_COMPIZ': 'true',
-        'ENABLE_E10S': text_type(bool(test.get('e10s'))).lower(),
+        'ENABLE_E10S': str(bool(test.get('e10s'))).lower(),
         'WORKING_DIR': '/builds/worker',
     })
 
     # remninder to remove this conditional and remove NEED_COMPIZ from tree
     # once test are migrated over to Ubuntu 18.04/ubuntu1804.
     if test['docker-image'] == 'ubuntu1804-test':
         env.update({
             'NEED_COMPIZ': 'false'
@@ -173,28 +171,25 @@ def mozharness_test_on_docker(config, jo
         env['MOZHARNESS_PATH'] = '{workdir}/checkouts/gecko/testing/mozharness'.format(**run)
     else:
         env['MOZHARNESS_URL'] = {'task-reference': mozharness_url}
 
     extra_config = {
         'installer_url': installer_url,
         'test_packages_url': test_packages_url(taskdesc),
     }
-    env['EXTRA_MOZHARNESS_CONFIG'] = {
-        'task-reference': six.ensure_text(json.dumps(extra_config))
-    }
+    env['EXTRA_MOZHARNESS_CONFIG'] = {'task-reference': json.dumps(extra_config)}
 
     command = [
         '{workdir}/bin/test-linux.sh'.format(**run),
     ]
     command.extend(mozharness.get('extra-options', []))
 
     if test.get('test-manifests'):
-        env['MOZHARNESS_TEST_PATHS'] = six.ensure_text(
-            json.dumps({test['suite']: test['test-manifests']}))
+        env['MOZHARNESS_TEST_PATHS'] = json.dumps({test['suite']: test['test-manifests']})
 
     # TODO: remove the need for run['chunked']
     elif mozharness.get('chunked') or test['chunks'] > 1:
         command.append('--total-chunk={}'.format(test['chunks']))
         command.append('--this-chunk={}'.format(test['this-chunk']))
 
     if 'download-symbols' in mozharness:
         download_symbols = mozharness['download-symbols']
@@ -322,19 +317,17 @@ def mozharness_test_on_generic_worker(co
             "MOZ_NODE_PATH": "/usr/local/bin/node",
             'TASKCLUSTER_WORKER_TYPE': job['worker-type'],
         })
 
     extra_config = {
         'installer_url': installer_url,
         'test_packages_url': test_packages_url(taskdesc),
     }
-    env['EXTRA_MOZHARNESS_CONFIG'] = {
-        'task-reference': six.ensure_text(json.dumps(extra_config))
-    }
+    env['EXTRA_MOZHARNESS_CONFIG'] = {'task-reference': json.dumps(extra_config)}
 
     if is_windows:
         mh_command = [
             'c:\\mozilla-build\\python\\python.exe',
             '-u',
             'mozharness\\scripts\\' + normpath(mozharness['script'])
         ]
     elif is_bitbar:
@@ -361,26 +354,25 @@ def mozharness_test_on_generic_worker(co
 
     for mh_config in mozharness['config']:
         cfg_path = 'mozharness/configs/' + mh_config
         if is_windows:
             cfg_path = normpath(cfg_path)
         mh_command.extend(['--cfg', cfg_path])
     mh_command.extend(mozharness.get('extra-options', []))
     if mozharness.get('download-symbols'):
-        if isinstance(mozharness['download-symbols'], text_type):
+        if isinstance(mozharness['download-symbols'], basestring):
             mh_command.extend(['--download-symbols', mozharness['download-symbols']])
         else:
             mh_command.extend(['--download-symbols', 'true'])
     if mozharness.get('include-blob-upload-branch'):
         mh_command.append('--blob-upload-branch=' + config.params['project'])
 
     if test.get('test-manifests'):
-        env['MOZHARNESS_TEST_PATHS'] = six.ensure_text(
-            json.dumps({test['suite']: test['test-manifests']}))
+        env['MOZHARNESS_TEST_PATHS'] = json.dumps({test['suite']: test['test-manifests']})
 
     # TODO: remove the need for run['chunked']
     elif mozharness.get('chunked') or test['chunks'] > 1:
         mh_command.append('--total-chunk={}'.format(test['chunks']))
         mh_command.append('--this-chunk={}'.format(test['this-chunk']))
 
     if config.params.is_try():
         env['TRY_COMMIT_MSG'] = config.params['message']
@@ -478,33 +470,30 @@ def mozharness_test_on_script_engine_aut
     # talos tests don't need Xvfb
     if is_talos:
         env['NEED_XVFB'] = 'false'
 
     extra_config = {
         'installer_url': installer_url,
         'test_packages_url': test_packages_url(taskdesc),
     }
-    env['EXTRA_MOZHARNESS_CONFIG'] = {
-        'task-reference': six.ensure_text(json.dumps(extra_config))
-    }
+    env['EXTRA_MOZHARNESS_CONFIG'] = {'task-reference': json.dumps(extra_config)}
 
     script = 'test-linux.sh'
     worker['context'] = config.params.file_url(
         'taskcluster/scripts/tester/{}'.format(script),
     )
 
     command = worker['command'] = ["./{}".format(script)]
     if mozharness.get('include-blob-upload-branch'):
         command.append('--blob-upload-branch=' + config.params['project'])
     command.extend(mozharness.get('extra-options', []))
 
     if test.get('test-manifests'):
-        env['MOZHARNESS_TEST_PATHS'] = six.ensure_text(
-            json.dumps({test['suite']: test['test-manifests']}))
+        env['MOZHARNESS_TEST_PATHS'] = json.dumps({test['suite']: test['test-manifests']})
 
     # TODO: remove the need for run['chunked']
     elif mozharness.get('chunked') or test['chunks'] > 1:
         command.append('--total-chunk={}'.format(test['chunks']))
         command.append('--this-chunk={}'.format(test['this-chunk']))
 
     if 'download-symbols' in mozharness:
         download_symbols = mozharness['download-symbols']
--- a/taskcluster/taskgraph/transforms/job/python_test.py
+++ b/taskcluster/taskgraph/transforms/job/python_test.py
@@ -2,32 +2,31 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 Support for running mach python-test tasks (via run-task)
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-from six import text_type
 from taskgraph.transforms.job import run_job_using, configure_taskdesc_for_run
 from taskgraph.util.schema import Schema
 from voluptuous import Required
 
 python_test_schema = Schema({
     Required('using'): 'python-test',
 
     # Python version to use
     Required('python-version'): int,
 
     # The subsuite to run
-    Required('subsuite'): text_type,
+    Required('subsuite'): basestring,
 
     # Base work directory used to set up the task.
-    Required('workdir'): text_type,
+    Required('workdir'): basestring,
 })
 
 
 defaults = {
     'python-version': 2,
     'subsuite': 'default',
 }
 
--- a/taskcluster/taskgraph/transforms/job/run_task.py
+++ b/taskcluster/taskgraph/transforms/job/run_task.py
@@ -37,29 +37,29 @@ run_task_schema = Schema({
     Optional(
         "cwd",
         description="Path to run command in. If a checkout is present, the path "
         "to the checkout will be interpolated with the key `checkout`",
     ): text_type,
 
     # The sparse checkout profile to use. Value is the filename relative to the
     # directory where sparse profiles are defined (build/sparse-profiles/).
-    Required('sparse-profile'): Any(text_type, None),
+    Required('sparse-profile'): Any(basestring, None),
 
     # if true, perform a checkout of a comm-central based branch inside the
     # gecko checkout
     Required('comm-checkout'): bool,
 
     # The command arguments to pass to the `run-task` script, after the
     # checkout arguments.  If a list, it will be passed directly; otherwise
     # it will be included in a single argument to `bash -cx`.
     Required('command'): Any([taskref_or_string], taskref_or_string),
 
     # Base work directory used to set up the task.
-    Required('workdir'): text_type,
+    Required('workdir'): basestring,
 
     # If not false, tooltool downloads will be enabled via relengAPIProxy
     # for either just public files, or all files. Only supported on
     # docker-worker.
     Required('tooltool-downloads'): Any(
         False,
         'public',
         'internal',
@@ -126,18 +126,18 @@ def docker_worker_run_task(config, job, 
     elif run_cwd and "{checkout}" in run_cwd:
         raise Exception(
             "Found `{{checkout}}` interpolation in `cwd` for task {name} "
             "but the task doesn't have a checkout: {cwd}".format(
                 cwd=run_cwd, name=job.get("name", job.get("label"))
             )
         )
 
-    # dict is for the case of `{'task-reference': text_type}`.
-    if isinstance(run_command, (text_type, dict)):
+    # dict is for the case of `{'task-reference': basestring}`.
+    if isinstance(run_command, (basestring, dict)):
         run_command = ['bash', '-cx', run_command]
     if run['comm-checkout']:
         command.append('--comm-checkout={}/comm'.format(
             taskdesc['worker']['env']['GECKO_PATH']))
     command.append('--fetch-hgfingerprint')
     if run['run-as-root']:
         command.extend(('--user', 'root', '--group', 'root'))
     if run_cwd:
@@ -197,17 +197,17 @@ def generic_worker_run_task(config, job,
     elif run_cwd and "{checkout}" in run_cwd:
         raise Exception(
             "Found `{{checkout}}` interpolation in `cwd` for task {name} "
             "but the task doesn't have a checkout: {cwd}".format(
                 cwd=run_cwd, name=job.get("name", job.get("label"))
             )
         )
 
-    if isinstance(run_command, text_type):
+    if isinstance(run_command, basestring):
         if is_win:
             run_command = '"{}"'.format(run_command)
         run_command = ['bash', '-cx', run_command]
 
     if run['comm-checkout']:
         command.append('--comm-checkout={}/comm'.format(
             taskdesc['worker']['env']['GECKO_PATH']))
 
--- a/taskcluster/taskgraph/transforms/job/spidermonkey.py
+++ b/taskcluster/taskgraph/transforms/job/spidermonkey.py
@@ -2,17 +2,16 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 Support for running spidermonkey jobs via dedicated scripts
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-from six import text_type
 from taskgraph.util.schema import Schema
 from voluptuous import Required, Any, Optional
 
 from taskgraph.transforms.job import (
     run_job_using,
     configure_taskdesc_for_run,
 )
 from taskgraph.transforms.job.common import (
@@ -20,21 +19,21 @@ from taskgraph.transforms.job.common imp
     generic_worker_add_artifacts,
 )
 
 sm_run_schema = Schema({
     Required('using'): Any('spidermonkey', 'spidermonkey-package', 'spidermonkey-mozjs-crate',
                            'spidermonkey-rust-bindings'),
 
     # SPIDERMONKEY_VARIANT and SPIDERMONKEY_PLATFORM
-    Required('spidermonkey-variant'): text_type,
-    Optional('spidermonkey-platform'): text_type,
+    Required('spidermonkey-variant'): basestring,
+    Optional('spidermonkey-platform'): basestring,
 
     # Base work directory used to set up the task.
-    Required('workdir'): text_type,
+    Required('workdir'): basestring,
 
     Required('tooltool-downloads'): Any(
         False,
         'public',
         'internal',
     ),
 })
 
--- a/taskcluster/taskgraph/transforms/job/toolchain.py
+++ b/taskcluster/taskgraph/transforms/job/toolchain.py
@@ -5,17 +5,16 @@
 Support for running toolchain-building jobs via dedicated scripts
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 from mozbuild.shellutil import quote as shell_quote
 from mozpack import path
 
-from six import text_type
 from taskgraph.util.schema import Schema
 from voluptuous import Optional, Required, Any
 
 from taskgraph.transforms.job import (
     configure_taskdesc_for_run,
     run_job_using,
 )
 from taskgraph.transforms.job.common import (
@@ -29,49 +28,49 @@ import taskgraph
 CACHE_TYPE = 'toolchains.v3'
 
 toolchain_run_schema = Schema({
     Required('using'): 'toolchain-script',
 
     # The script (in taskcluster/scripts/misc) to run.
     # Python scripts are invoked with `mach python` so vendored libraries
     # are available.
-    Required('script'): text_type,
+    Required('script'): basestring,
 
     # Arguments to pass to the script.
-    Optional('arguments'): [text_type],
+    Optional('arguments'): [basestring],
 
     # If not false, tooltool downloads will be enabled via relengAPIProxy
     # for either just public files, or all files.  Not supported on Windows
     Required('tooltool-downloads'): Any(
         False,
         'public',
         'internal',
     ),
 
     # Sparse profile to give to checkout using `run-task`.  If given,
     # a filename in `build/sparse-profiles`.  Defaults to
     # "toolchain-build", i.e., to
     # `build/sparse-profiles/toolchain-build`.  If `None`, instructs
     # `run-task` to not use a sparse profile at all.
-    Required('sparse-profile'): Any(text_type, None),
+    Required('sparse-profile'): Any(basestring, None),
 
     # Paths/patterns pointing to files that influence the outcome of a
     # toolchain build.
-    Optional('resources'): [text_type],
+    Optional('resources'): [basestring],
 
     # Path to the artifact produced by the toolchain job
-    Required('toolchain-artifact'): text_type,
+    Required('toolchain-artifact'): basestring,
 
     # An alias that can be used instead of the real toolchain job name in
     # the toolchains list for build jobs.
-    Optional('toolchain-alias'): text_type,
+    Optional('toolchain-alias'): basestring,
 
     # Base work directory used to set up the task.
-    Required('workdir'): text_type,
+    Required('workdir'): basestring,
 })
 
 
 def get_digest_data(config, run, taskdesc):
     files = list(run.pop('resources', []))
     # This file
     files.append('taskcluster/taskgraph/transforms/job/toolchain.py')
     # The script
--- a/taskcluster/taskgraph/transforms/l10n.py
+++ b/taskcluster/taskgraph/transforms/l10n.py
@@ -3,21 +3,19 @@
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 Do transforms specific to l10n kind
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import copy
-import io
 import json
 
 from mozbuild.chunkify import chunkify
-from six import text_type
 from taskgraph.loader.multi_dep import schema
 from taskgraph.transforms.base import (
     TransformSequence,
 )
 from taskgraph.util.schema import (
     optionally_keyed_by,
     resolve_keyed_by,
     taskref_or_string,
@@ -35,118 +33,118 @@ from voluptuous import (
 
 
 def _by_platform(arg):
     return optionally_keyed_by('build-platform', arg)
 
 
 l10n_description_schema = schema.extend({
     # Name for this job, inferred from the dependent job before validation
-    Required('name'): text_type,
+    Required('name'): basestring,
 
     # build-platform, inferred from dependent job before validation
-    Required('build-platform'): text_type,
+    Required('build-platform'): basestring,
 
     # max run time of the task
     Required('run-time'): _by_platform(int),
 
     # Locales not to repack for
-    Required('ignore-locales'): _by_platform([text_type]),
+    Required('ignore-locales'): _by_platform([basestring]),
 
     # All l10n jobs use mozharness
     Required('mozharness'): {
         # Script to invoke for mozharness
-        Required('script'): _by_platform(text_type),
+        Required('script'): _by_platform(basestring),
 
         # Config files passed to the mozharness script
-        Required('config'): _by_platform([text_type]),
+        Required('config'): _by_platform([basestring]),
 
         # Additional paths to look for mozharness configs in. These should be
         # relative to the base of the source checkout
-        Optional('config-paths'): [text_type],
+        Optional('config-paths'): [basestring],
 
         # Options to pass to the mozharness script
-        Optional('options'): _by_platform([text_type]),
+        Optional('options'): _by_platform([basestring]),
 
         # Action commands to provide to mozharness script
-        Required('actions'): _by_platform([text_type]),
+        Required('actions'): _by_platform([basestring]),
 
         # if true, perform a checkout of a comm-central based branch inside the
         # gecko checkout
         Required('comm-checkout', default=False): bool,
     },
     # Items for the taskcluster index
     Optional('index'): {
         # Product to identify as in the taskcluster index
-        Required('product'): _by_platform(text_type),
+        Required('product'): _by_platform(basestring),
 
         # Job name to identify as in the taskcluster index
-        Required('job-name'): _by_platform(text_type),
+        Required('job-name'): _by_platform(basestring),
 
         # Type of index
-        Optional('type'): _by_platform(text_type),
+        Optional('type'): _by_platform(basestring),
     },
     # Description of the localized task
-    Required('description'): _by_platform(text_type),
+    Required('description'): _by_platform(basestring),
 
     Optional('run-on-projects'): job_description_schema['run-on-projects'],
 
     # worker-type to utilize
-    Required('worker-type'): _by_platform(text_type),
+    Required('worker-type'): _by_platform(basestring),
 
     # File which contains the used locales
-    Required('locales-file'): _by_platform(text_type),
+    Required('locales-file'): _by_platform(basestring),
 
     # Tooltool visibility required for task.
     Required('tooltool'): _by_platform(Any('internal', 'public')),
 
     # Docker image required for task.  We accept only in-tree images
     # -- generally desktop-build or android-build -- for now.
     Required('docker-image', default=None): _by_platform(Any(
         # an in-tree generated docker image (from `taskcluster/docker/<name>`)
-        {'in-tree': text_type},
+        {'in-tree': basestring},
         None,
     )),
 
     Optional('fetches'): {
-        text_type: _by_platform([text_type]),
+        basestring: _by_platform([basestring]),
     },
 
     # The set of secret names to which the task has access; these are prefixed
     # with `project/releng/gecko/{treeherder.kind}/level-{level}/`.  Setting
     # this will enable any worker features required and set the task's scopes
     # appropriately.  `true` here means ['*'], all secrets.  Not supported on
     # Windows
-    Required('secrets', default=False): _by_platform(Any(bool, [text_type])),
+    Required('secrets', default=False): _by_platform(Any(bool, [basestring])),
 
     # Information for treeherder
     Required('treeherder'): {
         # Platform to display the task on in treeherder
-        Required('platform'): _by_platform(text_type),
+        Required('platform'): _by_platform(basestring),
 
         # Symbol to use
-        Required('symbol'): text_type,
+        Required('symbol'): basestring,
 
         # Tier this task is
         Required('tier'): _by_platform(int),
     },
 
     # Extra environment values to pass to the worker
-    Optional('env'): _by_platform({text_type: taskref_or_string}),
+    Optional('env'): _by_platform({basestring: taskref_or_string}),
 
     # Max number locales per chunk
     Optional('locales-per-chunk'): _by_platform(int),
 
     # Task deps to chain this task with, added in transforms from primary-dependency
     # if this is a nightly
-    Optional('dependencies'): {text_type: text_type},
+    Optional('dependencies'): {basestring: basestring},
 
     # Run the task when the listed files change (if present).
     Optional('when'): {
-        'files-changed': [text_type]
+        'files-changed': [basestring]
     },
 
     # passed through directly to the job description
     Optional('attributes'): job_description_schema['attributes'],
     Optional('extra'): job_description_schema['extra'],
 
     # Shipping product and phase
     Optional('shipping-product'): task_description_schema['shipping-product'],
@@ -156,22 +154,23 @@ l10n_description_schema = schema.extend(
 transforms = TransformSequence()
 
 
 def parse_locales_file(locales_file, platform=None):
     """ Parse the passed locales file for a list of locales.
     """
     locales = []
 
-    with io.open(locales_file, mode='r') as f:
+    with open(locales_file, mode='r') as f:
         if locales_file.endswith('json'):
             all_locales = json.load(f)
             # XXX Only single locales are fetched
             locales = {
-                locale: data['revision'] for locale, data in all_locales.items()
+                locale: data['revision']
+                for locale, data in all_locales.items()
                 if platform is None or platform in data['platforms']
             }
         else:
             all_locales = f.read().split()
             # 'default' is the hg revision at the top of hg repo, in this context
             locales = {locale: 'default' for locale in all_locales}
     return locales
 
@@ -260,17 +259,17 @@ def handle_keyed_by(config, jobs):
 
 
 @transforms.add
 def handle_artifact_prefix(config, jobs):
     """Resolve ``artifact_prefix`` in env vars"""
     for job in jobs:
         artifact_prefix = get_artifact_prefix(job)
         for k1, v1 in job.get('env', {}).iteritems():
-            if isinstance(v1, text_type):
+            if isinstance(v1, basestring):
                 job['env'][k1] = v1.format(
                     artifact_prefix=artifact_prefix
                 )
             elif isinstance(v1, dict):
                 for k2, v2 in v1.iteritems():
                     job['env'][k1][k2] = v2.format(
                         artifact_prefix=artifact_prefix
                     )
--- a/taskcluster/taskgraph/transforms/openh264.py
+++ b/taskcluster/taskgraph/transforms/openh264.py
@@ -1,16 +1,16 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 This transform is used to help populate mozharness options for openh264 jobs
 """
 
-from __future__ import absolute_import, print_function, unicode_literals
+from __future__ import absolute_import
 
 from taskgraph.transforms.base import TransformSequence
 
 transforms = TransformSequence()
 
 
 @transforms.add
 def set_mh_options(config, jobs):
--- a/taskcluster/taskgraph/transforms/openh264_signing.py
+++ b/taskcluster/taskgraph/transforms/openh264_signing.py
@@ -2,32 +2,31 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 Transform the repackage signing task into an actual task description.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-from six import text_type
 from taskgraph.loader.single_dep import schema
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.util.attributes import copy_attributes_from_dependent_job
 from taskgraph.util.scriptworker import (
     get_signing_cert_scope_per_platform,
 )
 from taskgraph.util.treeherder import inherit_treeherder_from_dep
 from taskgraph.transforms.task import task_description_schema
 from voluptuous import Required, Optional
 
 transforms = TransformSequence()
 
 signing_description_schema = schema.extend({
-    Required('depname', default='repackage'): text_type,
-    Optional('label'): text_type,
+    Required('depname', default='repackage'): basestring,
+    Optional('label'): basestring,
     Optional('extra'): object,
     Optional('shipping-product'): task_description_schema['shipping-product'],
     Optional('shipping-phase'): task_description_schema['shipping-phase'],
 })
 
 transforms.add_validate(signing_description_schema)
 
 
--- a/taskcluster/taskgraph/transforms/push_apk.py
+++ b/taskcluster/taskgraph/transforms/push_apk.py
@@ -4,36 +4,35 @@
 """
 Transform the push-apk kind into an actual task description.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import re
 
-from six import text_type
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.transforms.task import task_description_schema
 from taskgraph.util.schema import optionally_keyed_by, resolve_keyed_by, Schema
 from taskgraph.util.scriptworker import get_push_apk_scope
 from taskgraph.util.taskcluster import get_artifact_prefix
 
 from voluptuous import Optional, Required
 
 
 push_apk_description_schema = Schema({
     Required('dependent-tasks'): object,
-    Required('name'): text_type,
+    Required('name'): basestring,
     Required('label'): task_description_schema['label'],
     Required('description'): task_description_schema['description'],
     Required('job-from'): task_description_schema['job-from'],
     Required('attributes'): task_description_schema['attributes'],
     Required('treeherder'): task_description_schema['treeherder'],
     Required('run-on-projects'): task_description_schema['run-on-projects'],
-    Required('worker-type'): optionally_keyed_by('release-level', text_type),
+    Required('worker-type'): optionally_keyed_by('release-level', basestring),
     Required('worker'): object,
     Required('scopes'): None,
     Required('shipping-phase'): task_description_schema['shipping-phase'],
     Required('shipping-product'): task_description_schema['shipping-product'],
     Optional('extra'): task_description_schema['extra'],
 })
 
 
--- a/taskcluster/taskgraph/transforms/push_apk_checks.py
+++ b/taskcluster/taskgraph/transforms/push_apk_checks.py
@@ -2,42 +2,41 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 Transform the push-apk-checks kind into an actual task description.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-from six import text_type
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.transforms.task import task_description_schema
 from taskgraph.transforms.push_apk import (
     validate_dependent_tasks,
     generate_dependencies,
     delete_non_required_fields,
 )
 from taskgraph.transforms.google_play_strings import set_worker_data
 from taskgraph.transforms.job.mozharness_test import get_artifact_url
 from taskgraph.util.schema import optionally_keyed_by, resolve_keyed_by, Schema
 
 from voluptuous import Required
 
 transforms = TransformSequence()
 transforms.add_validate(Schema({
     Required('dependent-tasks'): object,
-    Required('name'): text_type,
+    Required('name'): basestring,
     Required('label'): task_description_schema['label'],
     Required('description'): task_description_schema['description'],
     Required('job-from'): task_description_schema['job-from'],
     Required('attributes'): task_description_schema['attributes'],
     Required('treeherder'): task_description_schema['treeherder'],
-    Required('package-name'): optionally_keyed_by('project', text_type),
+    Required('package-name'): optionally_keyed_by('project', basestring),
     Required('run-on-projects'): task_description_schema['run-on-projects'],
-    Required('worker-type'): text_type,
+    Required('worker-type'): basestring,
     Required('worker'): object,
     Required('shipping-phase'): task_description_schema['shipping-phase'],
     Required('shipping-product'): task_description_schema['shipping-product'],
 }))
 transforms.add(validate_dependent_tasks)
 transforms.add(set_worker_data)
 
 
--- a/taskcluster/taskgraph/transforms/raptor.py
+++ b/taskcluster/taskgraph/transforms/raptor.py
@@ -1,16 +1,15 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 from copy import deepcopy
-from six import text_type
 
 from voluptuous import (
     Any,
     Optional,
     Required,
     Extra,
 )
 
@@ -21,27 +20,27 @@ from taskgraph.util.treeherder import sp
 
 transforms = TransformSequence()
 
 
 raptor_description_schema = Schema({
     # Raptor specific configs.
     Optional('apps'): optionally_keyed_by(
         'test-platform',
-        [text_type]
+        [basestring]
     ),
-    Optional('raptor-test'): text_type,
-    Optional('raptor-subtests'): [text_type],
+    Optional('raptor-test'): basestring,
+    Optional('raptor-subtests'): [basestring],
     Optional('activity'): optionally_keyed_by(
         'app',
-        text_type
+        basestring
     ),
     Optional('binary-path'): optionally_keyed_by(
         'app',
-        text_type
+        basestring
     ),
     Optional('pageload'): optionally_keyed_by(
         'test-platform', 'app',
         Any('cold', 'warm', 'both'),
     ),
     # Configs defined in the 'test_description_schema'.
     Optional('max-run-time'): optionally_keyed_by(
         'app',
--- a/taskcluster/taskgraph/transforms/release_beetmover_signed_addons.py
+++ b/taskcluster/taskgraph/transforms/release_beetmover_signed_addons.py
@@ -2,17 +2,16 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 Transform the beetmover task into an actual task description.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-from six import text_type
 from taskgraph.loader.single_dep import schema
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.transforms.beetmover import craft_release_properties
 from taskgraph.util.attributes import copy_attributes_from_dependent_job
 from taskgraph.util.schema import optionally_keyed_by, resolve_keyed_by
 from taskgraph.util.scriptworker import (get_beetmover_bucket_scope,
                                          get_beetmover_action_scope,
                                          generate_beetmover_upstream_artifacts,
@@ -27,35 +26,35 @@ import copy
 logger = logging.getLogger(__name__)
 
 
 transforms = TransformSequence()
 
 
 beetmover_description_schema = schema.extend({
     # depname is used in taskref's to identify the taskID of the unsigned things
-    Required('depname', default='build'): text_type,
+    Required('depname', default='build'): basestring,
 
     # attributes is used for enabling artifact-map by declarative artifacts
-    Required('attributes'): {text_type: object},
+    Required('attributes'): {basestring: object},
 
     # unique label to describe this beetmover task, defaults to {dep.label}-beetmover
-    Optional('label'): text_type,
+    Optional('label'): basestring,
 
     # treeherder is allowed here to override any defaults we use for beetmover.  See
     # taskcluster/taskgraph/transforms/task.py for the schema details, and the
     # below transforms for defaults of various values.
     Optional('treeherder'): task_description_schema['treeherder'],
 
-    Required('description'): text_type,
-    Required('worker-type'): optionally_keyed_by('release-level', text_type),
+    Required('description'): basestring,
+    Required('worker-type'): optionally_keyed_by('release-level', basestring),
     Required('run-on-projects'): [],
 
     # locale is passed only for l10n beetmoving
-    Optional('locale'): text_type,
+    Optional('locale'): basestring,
     Optional('shipping-phase'): task_description_schema['shipping-phase'],
     Optional('shipping-product'): task_description_schema['shipping-product'],
 })
 
 
 transforms.add_validate(beetmover_description_schema)
 
 
--- a/taskcluster/taskgraph/transforms/release_generate_checksums_beetmover.py
+++ b/taskcluster/taskgraph/transforms/release_generate_checksums_beetmover.py
@@ -1,17 +1,16 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 Transform the `release-generate-checksums-beetmover` task to also append `build` as dependency
 """
 from __future__ import absolute_import, print_function, unicode_literals
 
-from six import text_type
 from taskgraph.loader.single_dep import schema
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.util.attributes import copy_attributes_from_dependent_job
 from taskgraph.util.scriptworker import (generate_beetmover_artifact_map,
                                          generate_beetmover_upstream_artifacts,
                                          get_beetmover_bucket_scope,
                                          get_beetmover_action_scope,
                                          get_worker_type_for_scope)
@@ -19,20 +18,20 @@ from taskgraph.transforms.beetmover impo
 from taskgraph.transforms.task import task_description_schema
 from voluptuous import Required, Optional
 
 transforms = TransformSequence()
 
 
 release_generate_checksums_beetmover_schema = schema.extend({
     # depname is used in taskref's to identify the taskID of the unsigned things
-    Required('depname', default='build'): text_type,
+    Required('depname', default='build'): basestring,
 
     # unique label to describe this beetmover task, defaults to {dep.label}-beetmover
-    Optional('label'): text_type,
+    Optional('label'): basestring,
 
     # treeherder is allowed here to override any defaults we use for beetmover.  See
     # taskcluster/taskgraph/transforms/task.py for the schema details, and the
     # below transforms for defaults of various values.
     Optional('treeherder'): task_description_schema['treeherder'],
 
     Optional('shipping-phase'): task_description_schema['shipping-phase'],
     Optional('shipping-product'): task_description_schema['shipping-product'],
--- a/taskcluster/taskgraph/transforms/release_generate_checksums_signing.py
+++ b/taskcluster/taskgraph/transforms/release_generate_checksums_signing.py
@@ -2,30 +2,29 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 Transform the release-generate-checksums-signing task into task description.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-from six import text_type
 from taskgraph.loader.single_dep import schema
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.util.attributes import copy_attributes_from_dependent_job
 from taskgraph.util.scriptworker import (
     get_signing_cert_scope,
 )
 from taskgraph.util.taskcluster import get_artifact_path
 from taskgraph.transforms.task import task_description_schema
 from voluptuous import Required, Optional
 
 release_generate_checksums_signing_schema = schema.extend({
-    Required('depname', default='release-generate-checksums'): text_type,
-    Optional('label'): text_type,
+    Required('depname', default='release-generate-checksums'): basestring,
+    Optional('label'): basestring,
     Optional('treeherder'): task_description_schema['treeherder'],
     Optional('shipping-product'): task_description_schema['shipping-product'],
     Optional('shipping-phase'): task_description_schema['shipping-phase'],
 })
 
 transforms = TransformSequence()
 transforms.add_validate(release_generate_checksums_signing_schema)
 
@@ -44,17 +43,19 @@ def make_release_generate_checksums_sign
                               "{}/opt".format(dep_th_platform))
         treeherder.setdefault('tier', 1)
         treeherder.setdefault('kind', 'build')
 
         job_template = "{}-{}".format(dep_job.label, "signing")
         label = job.get("label", job_template)
         description = "Signing of the overall release-related checksums"
 
-        dependencies = {dep_job.kind: dep_job.label}
+        dependencies = {
+            str(dep_job.kind): dep_job.label
+        }
 
         upstream_artifacts = [{
             "taskId": {"task-reference": "<{}>".format(str(dep_job.kind))},
             "taskType": "build",
             "paths": [
                 get_artifact_path(dep_job, "SHA256SUMS"),
                 get_artifact_path(dep_job, "SHA512SUMS"),
             ],
--- a/taskcluster/taskgraph/transforms/release_sign_and_push_langpacks.py
+++ b/taskcluster/taskgraph/transforms/release_sign_and_push_langpacks.py
@@ -2,41 +2,40 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 Transform the release-sign-and-push task into an actual task description.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-from six import text_type
 from taskgraph.loader.single_dep import schema
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.util.attributes import copy_attributes_from_dependent_job
 from taskgraph.util.schema import resolve_keyed_by, optionally_keyed_by
 from taskgraph.util.treeherder import inherit_treeherder_from_dep
 from taskgraph.transforms.task import task_description_schema
 from voluptuous import Any, Required
 
 transforms = TransformSequence()
 
 langpack_sign_push_description_schema = schema.extend({
-    Required('label'): text_type,
-    Required('description'): text_type,
-    Required('worker-type'): optionally_keyed_by('release-level', text_type),
+    Required('label'): basestring,
+    Required('description'): basestring,
+    Required('worker-type'): optionally_keyed_by('release-level', basestring),
     Required('worker'): {
         Required('implementation'): 'push-addons',
         Required('channel'): optionally_keyed_by(
             'project',
             optionally_keyed_by('platform', Any('listed', 'unlisted'))),
         Required('upstream-artifacts'): None,   # Processed here below
     },
 
     Required('run-on-projects'): [],
-    Required('scopes'): optionally_keyed_by('release-level', [text_type]),
+    Required('scopes'): optionally_keyed_by('release-level', [basestring]),
     Required('shipping-phase'): task_description_schema['shipping-phase'],
     Required('shipping-product'): task_description_schema['shipping-product'],
 })
 
 
 @transforms.add
 def set_label(config, jobs):
     for job in jobs:
--- a/taskcluster/taskgraph/transforms/release_snap_push.py
+++ b/taskcluster/taskgraph/transforms/release_snap_push.py
@@ -2,34 +2,33 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 Transform the release-snap-push kind into an actual task description.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-from six import text_type
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.transforms.task import task_description_schema
 from taskgraph.util.schema import optionally_keyed_by, resolve_keyed_by, Schema
 from taskgraph.util.scriptworker import add_scope_prefix
 
 from voluptuous import Optional, Required
 
 push_snap_description_schema = Schema({
-    Required('name'): text_type,
+    Required('name'): basestring,
     Required('job-from'): task_description_schema['job-from'],
     Required('dependencies'): task_description_schema['dependencies'],
     Required('description'): task_description_schema['description'],
     Required('treeherder'): task_description_schema['treeherder'],
     Required('run-on-projects'): task_description_schema['run-on-projects'],
-    Required('worker-type'): optionally_keyed_by('release-level', text_type),
+    Required('worker-type'): optionally_keyed_by('release-level', basestring),
     Required('worker'): object,
-    Optional('scopes'): [text_type],
+    Optional('scopes'): [basestring],
     Required('shipping-phase'): task_description_schema['shipping-phase'],
     Required('shipping-product'): task_description_schema['shipping-product'],
     Optional('extra'): task_description_schema['extra'],
     Optional('attributes'): task_description_schema['attributes'],
 })
 
 transforms = TransformSequence()
 transforms.add_validate(push_snap_description_schema)
--- a/taskcluster/taskgraph/transforms/repackage.py
+++ b/taskcluster/taskgraph/transforms/repackage.py
@@ -4,74 +4,73 @@
 """
 Transform the repackage task into an actual task description.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import copy
 
-from six import text_type
 from taskgraph.loader.single_dep import schema
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.util.attributes import copy_attributes_from_dependent_job
 from taskgraph.util.schema import (
     optionally_keyed_by,
     resolve_keyed_by,
 )
 from taskgraph.util.taskcluster import get_artifact_prefix
 from taskgraph.util.platforms import archive_format, architecture
 from taskgraph.util.workertypes import worker_type_implementation
 from taskgraph.transforms.job import job_description_schema
 from voluptuous import Required, Optional, Extra
 
 
 packaging_description_schema = schema.extend({
     # depname is used in taskref's to identify the taskID of the signed things
-    Required('depname', default='build'): text_type,
+    Required('depname', default='build'): basestring,
 
     # unique label to describe this repackaging task
-    Optional('label'): text_type,
+    Optional('label'): basestring,
 
-    Optional('worker-type'): text_type,
+    Optional('worker-type'): basestring,
     Optional('worker'): object,
 
     # treeherder is allowed here to override any defaults we use for repackaging.  See
     # taskcluster/taskgraph/transforms/task.py for the schema details, and the
     # below transforms for defaults of various values.
     Optional('treeherder'): job_description_schema['treeherder'],
 
     # If a l10n task, the corresponding locale
-    Optional('locale'): text_type,
+    Optional('locale'): basestring,
 
     # Routes specific to this task, if defined
-    Optional('routes'): [text_type],
+    Optional('routes'): [basestring],
 
     # passed through directly to the job description
     Optional('extra'): job_description_schema['extra'],
 
     # passed through to job description
     Optional('fetches'): job_description_schema['fetches'],
 
     # Shipping product and phase
     Optional('shipping-product'): job_description_schema['shipping-product'],
     Optional('shipping-phase'): job_description_schema['shipping-phase'],
 
     Required('package-formats'): optionally_keyed_by(
-        'build-platform', 'release-type', [text_type]),
+        'build-platform', 'release-type', [basestring]),
 
     # All l10n jobs use mozharness
     Required('mozharness'): {
         Extra: object,
         # Config files passed to the mozharness script
-        Required('config'): optionally_keyed_by('build-platform', [text_type]),
+        Required('config'): optionally_keyed_by('build-platform', [basestring]),
 
         # Additional paths to look for mozharness configs in. These should be
         # relative to the base of the source checkout
-        Optional('config-paths'): [text_type],
+        Optional('config-paths'): [basestring],
 
         # if true, perform a checkout of a comm-central based branch inside the
         # gecko checkout
         Required('comm-checkout', default=False): bool,
     }
 })
 
 # The configuration passed to the mozharness repackage script. This defines the
@@ -284,17 +283,17 @@ def make_job_description(config, jobs):
             'chain-of-trust': True,
             'max-run-time': 7200 if build_platform.startswith('win') else 3600,
             # Don't add generic artifact directory.
             'skip-artifacts': True,
         })
 
         if locale:
             # Make sure we specify the locale-specific upload dir
-            worker.setdefault('env', {})['LOCALE'] = locale
+            worker.setdefault('env', {}).update(LOCALE=locale)
 
         worker['artifacts'] = _generate_task_output_files(
             dep_job, worker_type_implementation(config.graph_config, worker_type),
             repackage_config=repackage_config,
             locale=locale,
         )
 
         description = (
--- a/taskcluster/taskgraph/transforms/repackage_partner.py
+++ b/taskcluster/taskgraph/transforms/repackage_partner.py
@@ -4,17 +4,16 @@
 """
 Transform the repackage task into an actual task description.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import copy
 
-from six import text_type
 from taskgraph.loader.single_dep import schema
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.util.attributes import copy_attributes_from_dependent_job
 from taskgraph.util.schema import (
     optionally_keyed_by,
     resolve_keyed_by,
 )
 from taskgraph.util.taskcluster import get_artifact_prefix
@@ -33,41 +32,41 @@ def _by_platform(arg):
 # When repacking the stub installer we need to pass a zip file and package name to the
 # repackage task. This is not needed for vanilla stub but analogous to the full installer.
 PACKAGE_FORMATS = copy.deepcopy(PACKAGE_FORMATS_VANILLA)
 PACKAGE_FORMATS['installer-stub']['inputs']['package'] = 'target-stub{archive_format}'
 PACKAGE_FORMATS['installer-stub']['args'].extend(["--package-name", "{package-name}"])
 
 packaging_description_schema = schema.extend({
     # depname is used in taskref's to identify the taskID of the signed things
-    Required('depname', default='build'): text_type,
+    Required('depname', default='build'): basestring,
 
     # unique label to describe this repackaging task
-    Optional('label'): text_type,
+    Optional('label'): basestring,
 
     # Routes specific to this task, if defined
-    Optional('routes'): [text_type],
+    Optional('routes'): [basestring],
 
     # passed through directly to the job description
     Optional('extra'): task_description_schema['extra'],
 
     # Shipping product and phase
     Optional('shipping-product'): task_description_schema['shipping-product'],
     Optional('shipping-phase'): task_description_schema['shipping-phase'],
 
-    Required('package-formats'): _by_platform([text_type]),
+    Required('package-formats'): _by_platform([basestring]),
 
     # All l10n jobs use mozharness
     Required('mozharness'): {
         # Config files passed to the mozharness script
-        Required('config'): _by_platform([text_type]),
+        Required('config'): _by_platform([basestring]),
 
         # Additional paths to look for mozharness configs in. These should be
         # relative to the base of the source checkout
-        Optional('config-paths'): [text_type],
+        Optional('config-paths'): [basestring],
 
         # if true, perform a checkout of a comm-central based branch inside the
         # gecko checkout
         Required('comm-checkout', default=False): bool,
     },
 
     # Override the default priority for the project
     Optional('priority'): task_description_schema['priority'],
--- a/taskcluster/taskgraph/transforms/repackage_signing.py
+++ b/taskcluster/taskgraph/transforms/repackage_signing.py
@@ -4,29 +4,28 @@
 """
 Transform the repackage signing task into an actual task description.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import os
 
-from six import text_type
 from taskgraph.loader.single_dep import schema
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.util.attributes import copy_attributes_from_dependent_job
 from taskgraph.util.scriptworker import (
     get_signing_cert_scope_per_platform,
 )
 from taskgraph.transforms.task import task_description_schema
 from voluptuous import Required, Optional
 
 repackage_signing_description_schema = schema.extend({
-    Required('depname', default='repackage'): text_type,
-    Optional('label'): text_type,
+    Required('depname', default='repackage'): basestring,
+    Optional('label'): basestring,
     Optional('treeherder'): task_description_schema['treeherder'],
     Optional('shipping-product'): task_description_schema['shipping-product'],
     Optional('shipping-phase'): task_description_schema['shipping-phase'],
 })
 
 SIGNING_FORMATS = {
     "target.installer.exe": ["autograph_authenticode"],
     "target.stub-installer.exe": ["autograph_authenticode_stub"],
--- a/taskcluster/taskgraph/transforms/repackage_signing_partner.py
+++ b/taskcluster/taskgraph/transforms/repackage_signing_partner.py
@@ -2,33 +2,32 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 Transform the repackage signing task into an actual task description.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-from six import text_type
 from taskgraph.loader.single_dep import schema
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.util.attributes import copy_attributes_from_dependent_job
 from taskgraph.util.partners import check_if_partners_enabled, get_partner_config_by_kind
 from taskgraph.util.scriptworker import (
     get_signing_cert_scope_per_platform,
 )
 from taskgraph.util.taskcluster import get_artifact_path
 from taskgraph.transforms.task import task_description_schema
 from voluptuous import Required, Optional
 
 transforms = TransformSequence()
 
 repackage_signing_description_schema = schema.extend({
-    Required('depname', default='repackage'): text_type,
-    Optional('label'): text_type,
+    Required('depname', default='repackage'): basestring,
+    Optional('label'): basestring,
     Optional('extra'): object,
     Optional('shipping-product'): task_description_schema['shipping-product'],
     Optional('shipping-phase'): task_description_schema['shipping-phase'],
     Optional('priority'): task_description_schema['priority'],
 })
 
 transforms.add(check_if_partners_enabled)
 transforms.add_validate(repackage_signing_description_schema)
--- a/taskcluster/taskgraph/transforms/signing.py
+++ b/taskcluster/taskgraph/transforms/signing.py
@@ -2,17 +2,16 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 Transform the signing task into an actual task description.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-from six import text_type
 from taskgraph.loader.single_dep import schema
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.util.attributes import copy_attributes_from_dependent_job
 from taskgraph.util.keyed_by import evaluate_keyed_by
 from taskgraph.util.schema import taskref_or_string
 from taskgraph.util.scriptworker import (
     get_signing_cert_scope_per_platform,
 )
@@ -25,48 +24,48 @@ transforms = TransformSequence()
 signing_description_schema = schema.extend({
     # Artifacts from dep task to sign - Sync with taskgraph/transforms/task.py
     # because this is passed directly into the signingscript worker
     Required('upstream-artifacts'): [{
         # taskId of the task with the artifact
         Required('taskId'): taskref_or_string,
 
         # type of signing task (for CoT)
-        Required('taskType'): text_type,
+        Required('taskType'): basestring,
 
         # Paths to the artifacts to sign
-        Required('paths'): [text_type],
+        Required('paths'): [basestring],
 
         # Signing formats to use on each of the paths
-        Required('formats'): [text_type],
+        Required('formats'): [basestring],
     }],
 
     # depname is used in taskref's to identify the taskID of the unsigned things
-    Required('depname'): text_type,
+    Required('depname'): basestring,
 
     # attributes for this task
-    Optional('attributes'): {text_type: object},
+    Optional('attributes'): {basestring: object},
 
     # unique label to describe this signing task, defaults to {dep.label}-signing
-    Optional('label'): text_type,
+    Optional('label'): basestring,
 
     # treeherder is allowed here to override any defaults we use for signing.  See
     # taskcluster/taskgraph/transforms/task.py for the schema details, and the
     # below transforms for defaults of various values.
     Optional('treeherder'): task_description_schema['treeherder'],
 
     # Routes specific to this task, if defined
-    Optional('routes'): [text_type],
+    Optional('routes'): [basestring],
 
     Optional('shipping-phase'): task_description_schema['shipping-phase'],
     Optional('shipping-product'): task_description_schema['shipping-product'],
 
     # Optional control for how long a task may run (aka maxRunTime)
     Optional('max-run-time'): int,
-    Optional('extra'): {text_type: object},
+    Optional('extra'): {basestring: object},
 
     # Max number of partner repacks per chunk
     Optional('repacks-per-chunk'): int,
 
     # Override the default priority for the project
     Optional('priority'): task_description_schema['priority'],
 })
 
--- a/taskcluster/taskgraph/transforms/source_checksums_signing.py
+++ b/taskcluster/taskgraph/transforms/source_checksums_signing.py
@@ -1,29 +1,28 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 """
 Transform the checksums signing task into an actual task description.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
-from six import text_type
 from taskgraph.loader.single_dep import schema
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.util.attributes import copy_attributes_from_dependent_job
 from taskgraph.util.scriptworker import (
     get_signing_cert_scope,
 )
 from taskgraph.transforms.task import task_description_schema
 from voluptuous import Required, Optional
 
 checksums_signing_description_schema = schema.extend({
-    Required('depname', default='beetmover'): text_type,
-    Optional('label'): text_type,
+    Required('depname', default='beetmover'): basestring,
+    Optional('label'): basestring,
     Optional('treeherder'): task_description_schema['treeherder'],
     Optional('shipping-product'): task_description_schema['shipping-product'],
     Optional('shipping-phase'): task_description_schema['shipping-phase'],
 })
 
 transforms = TransformSequence()
 transforms.add_validate(checksums_signing_description_schema)
 
--- a/taskcluster/taskgraph/transforms/source_test.py
+++ b/taskcluster/taskgraph/transforms/source_test.py
@@ -5,18 +5,16 @@ Source-test jobs can run on multiple pla
 with either `platform` or a list of `platforms`, and set the appropriate
 treeherder configuration and attributes for that platform.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import copy
 import os
-import six
-from six import text_type
 
 from taskgraph.transforms.base import TransformSequence
 from taskgraph.transforms.job import job_description_schema
 from taskgraph.util.attributes import keymatch
 from taskgraph.util.schema import (
     resolve_keyed_by,
     optionally_keyed_by,
 )
@@ -33,17 +31,17 @@ from voluptuous import (
 source_test_description_schema = Schema({
     # most fields are passed directly through as job fields, and are not
     # repeated here
     Extra: object,
 
     # The platform on which this task runs.  This will be used to set up attributes
     # (for try selection) and treeherder metadata (for display).  If given as a list,
     # the job will be "split" into multiple tasks, one with each platform.
-    Required('platform'): Any(text_type, [text_type]),
+    Required('platform'): Any(basestring, [basestring]),
 
     # Whether the job requires a build artifact or not. If True, the task will
     # depend on a build task and the installer url will be saved to the
     # GECKO_INSTALLER_URL environment variable. Build labels are determined by the
     # `dependent-build-platforms` config in kind.yml.
     Required('require-build'): bool,
 
     # These fields can be keyed by "platform", and are otherwise identical to
@@ -55,18 +53,18 @@ source_test_description_schema = Schema(
         'platform', job_description_schema['worker']),
 
     Optional('python-version'): [int],
     # If true, the DECISION_TASK_ID env will be populated.
     Optional('require-decision-task-id'): bool,
 
     # A list of artifacts to install from 'fetch' tasks.
     Optional('fetches'): {
-        text_type: optionally_keyed_by(
-            'platform', job_description_schema['fetches'][text_type]),
+        basestring: optionally_keyed_by(
+            'platform', job_description_schema['fetches'][basestring]),
     },
 })
 
 transforms = TransformSequence()
 
 
 @transforms.add
 def set_defaults(config, jobs):
@@ -85,17 +83,17 @@ def set_job_name(config, jobs):
             from_name = os.path.splitext(job['job-from'])[0]
             job['name'] = '{}-{}'.format(from_name, job['name'])
         yield job
 
 
 @transforms.add
 def expand_platforms(config, jobs):
     for job in jobs:
-        if isinstance(job['platform'], text_type):
+        if isinstance(job['platform'], basestring):
             yield job
             continue
 
         for platform in job['platform']:
             pjob = copy.deepcopy(job)
             pjob['platform'] = platform
 
             if 'name' in pjob:
@@ -232,17 +230,17 @@ def add_decision_task_id_to_env(config, 
     `require-decision-task-id` config.
     """
     for job in jobs:
         if not job.pop('require-decision-task-id', False):
             yield job
             continue
 
         env = job['worker'].setdefault('env', {})
-        env['DECISION_TASK_ID'] = six.ensure_text(os.environ.get('TASK_ID', ''))
+        env['DECISION_TASK_ID'] = os.environ.get('TASK_ID', '')
         yield job
 
 
 @transforms.add
 def set_code_review_env(config, jobs):
     """
     Add a CODE_REVIEW environment variable when running in code-review bot mode
     """
--- a/taskcluster/taskgraph/transforms/task.py
+++ b/taskcluster/taskgraph/transforms/task.py
@@ -63,66 +63,66 @@ def _compute_geckoview_version(app_versi
     version_without_milestone = re.sub(r'a[0-9]', '', app_version, 1)
     parts = version_without_milestone.split('.')
     return "%s.%s.%s" % (parts[0], parts[1], moz_build_date)
 
 
 # A task description is a general description of a TaskCluster task
 task_description_schema = Schema({
     # the label for this task
-    Required('label'): text_type,
+    Required('label'): basestring,
 
     # description of the task (for metadata)
-    Required('description'): text_type,
+    Required('description'): basestring,
 
     # attributes for this task
-    Optional('attributes'): {text_type: object},
+    Optional('attributes'): {basestring: object},
 
     # relative path (from config.path) to the file task was defined in
-    Optional('job-from'): text_type,
+    Optional('job-from'): basestring,
 
     # dependencies of this task, keyed by name; these are passed through
     # verbatim and subject to the interpretation of the Task's get_dependencies
     # method.
-    Optional('dependencies'): {text_type: object},
+    Optional('dependencies'): {basestring: object},
 
     # Soft dependencies of this task, as a list of tasks labels
-    Optional('soft-dependencies'): [text_type],
+    Optional('soft-dependencies'): [basestring],
 
     Optional('requires'): Any('all-completed', 'all-resolved'),
 
     # expiration and deadline times, relative to task creation, with units
     # (e.g., "14 days").  Defaults are set based on the project.
-    Optional('expires-after'): text_type,
-    Optional('deadline-after'): text_type,
+    Optional('expires-after'): basestring,
+    Optional('deadline-after'): basestring,
 
     # custom routes for this task; the default treeherder routes will be added
     # automatically
-    Optional('routes'): [text_type],
+    Optional('routes'): [basestring],
 
     # custom scopes for this task; any scopes required for the worker will be
     # added automatically. The following parameters will be substituted in each
     # scope:
     #  {level} -- the scm level of this push
     #  {project} -- the project of this push
-    Optional('scopes'): [text_type],
+    Optional('scopes'): [basestring],
 
     # Tags
-    Optional('tags'): {text_type: text_type},
+    Optional('tags'): {basestring: basestring},
 
     # custom "task.extra" content
-    Optional('extra'): {text_type: object},
+    Optional('extra'): {basestring: object},
 
     # treeherder-related information; see
     # https://schemas.taskcluster.net/taskcluster-treeherder/v1/task-treeherder-config.json
     # If not specified, no treeherder extra information or routes will be
     # added to the task
     Optional('treeherder'): {
         # either a bare symbol, or "grp(sym)".
-        'symbol': text_type,
+        'symbol': basestring,
 
         # the job kind
         'kind': Any('build', 'test', 'other'),
 
         # tier for this task
         'tier': int,
 
         # task platform, in the form platform/collection, used to set
@@ -130,20 +130,20 @@ task_description_schema = Schema({
         # treeherder.labels
         'platform': Match('^[A-Za-z0-9_-]{1,50}/[A-Za-z0-9_-]{1,50}$'),
     },
 
     # information for indexing this build so its artifacts can be discovered;
     # if omitted, the build will not be indexed.
     Optional('index'): {
         # the name of the product this build produces
-        'product': text_type,
+        'product': basestring,
 
         # the names to use for this job in the TaskCluster index
-        'job-name': text_type,
+        'job-name': basestring,
 
         # Type of gecko v2 index to use
         'type': Any('generic', 'nightly', 'l10n', 'nightly-with-multi-l10n',
                     'nightly-l10n', 'shippable', 'shippable-l10n',
                     'android-nightly', 'android-nightly-with-multi-l10n'),
 
         # The rank that the task will receive in the TaskCluster
         # index.  A newly completed task supercedes the currently
@@ -164,48 +164,48 @@ task_description_schema = Schema({
             # for non-tier-1 tasks.
             'build_date',
         ),
     },
 
     # The `run_on_projects` attribute, defaulting to "all".  This dictates the
     # projects on which this task should be included in the target task set.
     # See the attributes documentation for details.
-    Optional('run-on-projects'): optionally_keyed_by('build-platform', [text_type]),
+    Optional('run-on-projects'): optionally_keyed_by('build-platform', [basestring]),
 
     # Like `run_on_projects`, `run-on-hg-branches` defaults to "all".
-    Optional('run-on-hg-branches'): optionally_keyed_by('project', [text_type]),
+    Optional('run-on-hg-branches'): optionally_keyed_by('project', [basestring]),
 
     # The `shipping_phase` attribute, defaulting to None. This specifies the
     # release promotion phase that this task belongs to.
     Required('shipping-phase'): Any(
         None,
         'build',
         'promote',
         'push',
         'ship',
     ),
 
     # The `shipping_product` attribute, defaulting to None. This specifies the
     # release promotion product that this task belongs to.
     Required('shipping-product'): Any(
         None,
-        text_type
+        basestring
     ),
 
     # Coalescing provides the facility for tasks to be superseded by the same
     # task in a subsequent commit, if the current task backlog reaches an
     # explicit threshold. Both age and size thresholds need to be met in order
     # for coalescing to be triggered.
     Optional('coalesce'): {
         # A unique identifier per job (typically a hash of the job label) in
         # order to partition tasks into appropriate sets for coalescing. This
         # is combined with the project in order to generate a unique coalescing
         # key for the coalescing service.
-        'job-identifier': text_type,
+        'job-identifier': basestring,
 
         # The minimum amount of time in seconds between two pending tasks with
         # the same coalescing key, before the coalescing service will return
         # tasks.
         'age': int,
 
         # The minimum number of backlogged tasks with the same coalescing key,
         # before the coalescing service will return tasks.
@@ -221,32 +221,32 @@ task_description_schema = Schema({
 
     # Optimization to perform on this task during the optimization phase.
     # Optimizations are defined in taskcluster/taskgraph/optimize.py.
     Required('optimization'): OptimizationSchema,
 
     # the provisioner-id/worker-type for the task.  The following parameters will
     # be substituted in this string:
     #  {level} -- the scm level of this push
-    'worker-type': text_type,
+    'worker-type': basestring,
 
     # Whether the job should use sccache compiler caching.
     Required('needs-sccache'): bool,
 
     # Set of artifacts relevant to release tasks
-    Optional('release-artifacts'): [text_type],
+    Optional('release-artifacts'): [basestring],
 
     # information specific to the worker implementation that will run this task
     Optional('worker'): {
-        Required('implementation'): text_type,
+        Required('implementation'): basestring,
         Extra: object,
     },
 
     # Override the default priority for the project
-    Optional('priority'): text_type,
+    Optional('priority'): basestring,
 })
 
 TC_TREEHERDER_SCHEMA_URL = 'https://github.com/taskcluster/taskcluster-treeherder/' \
                            'blob/master/schemas/task-treeherder-config.yml'
 
 
 UNKNOWN_GROUP_NAME = "Treeherder group {} (from {}) has no name; " \
                      "add it to taskcluster/ci/config.yml"
@@ -397,21 +397,21 @@ def verify_index(config, index):
     Required('os'): 'linux',
 
     # For tasks that will run in docker-worker, this is the
     # name of the docker image or in-tree docker image to run the task in.  If
     # in-tree, then a dependency will be created automatically.  This is
     # generally `desktop-test`, or an image that acts an awful lot like it.
     Required('docker-image'): Any(
         # a raw Docker image path (repo/image:tag)
-        text_type,
+        basestring,
         # an in-tree generated docker image (from `taskcluster/docker/<name>`)
-        {'in-tree': text_type},
+        {'in-tree': basestring},
         # an indexed docker image
-        {'indexed': text_type},
+        {'indexed': basestring},
     ),
 
     # worker features that should be enabled
     Required('chain-of-trust'): bool,
     Required('taskcluster-proxy'): bool,
     Required('allow-ptrace'): bool,
     Required('loopback-video'): bool,
     Required('loopback-audio'): bool,
@@ -423,50 +423,50 @@ def verify_index(config, index):
     # For in-tree Docker images, volumes can be parsed from Dockerfile.
     # This only works for the Dockerfile itself: if a volume is defined in
     # a base image, it will need to be declared here. Out-of-tree Docker
     # images will also require explicit volume annotation.
     #
     # Caches are often mounted to the same path as Docker volumes. In this
     # case, they take precedence over a Docker volume. But a volume still
     # needs to be declared for the path.
-    Optional('volumes'): [text_type],
+    Optional('volumes'): [basestring],
 
     # caches to set up for the task
     Optional('caches'): [{
         # only one type is supported by any of the workers right now
         'type': 'persistent',
 
         # name of the cache, allowing re-use by subsequent tasks naming the
         # same cache
-        'name': text_type,
+        'name': basestring,
 
         # location in the task image where the cache will be mounted
-        'mount-point': text_type,
+        'mount-point': basestring,
 
         # Whether the cache is not used in untrusted environments
         # (like the Try repo).
         Optional('skip-untrusted'): bool,
     }],
 
     # artifacts to extract from the task image after completion
     Optional('artifacts'): [{
         # type of artifact -- simple file, or recursive directory
         'type': Any('file', 'directory'),
 
         # task image path from which to read artifact
-        'path': text_type,
+        'path': basestring,
 
         # name of the produced artifact (root of the names for
         # type=directory)
-        'name': text_type,
+        'name': basestring,
     }],
 
     # environment variables
-    Required('env'): {text_type: taskref_or_string},
+    Required('env'): {basestring: taskref_or_string},
 
     # the command to run; if not given, docker-worker will default to the
     # command in the docker image
     Optional('command'): [taskref_or_string],
 
     # the maximum time to run, in seconds
     Required('max-run-time'): int,
 
@@ -596,17 +596,17 @@ def build_docker_worker_payload(config, 
         for artifact in worker['artifacts']:
             artifacts[artifact['name']] = {
                 'path': artifact['path'],
                 'type': artifact['type'],
                 'expires': task_def['expires'],  # always expire with the task
             }
         payload['artifacts'] = artifacts
 
-    if isinstance(worker.get('docker-image'), text_type):
+    if isinstance(worker.get('docker-image'), basestring):
         out_of_tree_image = worker['docker-image']
         run_task = run_task or out_of_tree_image.startswith(
             'taskcluster/image_builder')
     else:
         out_of_tree_image = None
         image = worker.get('docker-image', {}).get('in-tree')
         run_task = run_task or image == 'image_builder'
 
@@ -710,69 +710,69 @@ def build_docker_worker_payload(config, 
 
     # artifacts to extract from the task image after completion; note that artifacts
     # for the generic worker cannot have names
     Optional('artifacts'): [{
         # type of artifact -- simple file, or recursive directory
         'type': Any('file', 'directory'),
 
         # filesystem path from which to read artifact
-        'path': text_type,
+        'path': basestring,
 
         # if not specified, path is used for artifact name
-        Optional('name'): text_type
+        Optional('name'): basestring
     }],
 
     # Directories and/or files to be mounted.
     # The actual allowed combinations are stricter than the model below,
     # but this provides a simple starting point.
     # See https://docs.taskcluster.net/reference/workers/generic-worker/payload
     Optional('mounts'): [{
         # A unique name for the cache volume, implies writable cache directory
         # (otherwise mount is a read-only file or directory).
-        Optional('cache-name'): text_type,
+        Optional('cache-name'): basestring,
         # Optional content for pre-loading cache, or mandatory content for
         # read-only file or directory. Pre-loaded content can come from either
         # a task artifact or from a URL.
         Optional('content'): {
 
             # *** Either (artifact and task-id) or url must be specified. ***
 
             # Artifact name that contains the content.
-            Optional('artifact'): text_type,
+            Optional('artifact'): basestring,
             # Task ID that has the artifact that contains the content.
             Optional('task-id'): taskref_or_string,
             # URL that supplies the content in response to an unauthenticated
             # GET request.
-            Optional('url'): text_type
+            Optional('url'): basestring
         },
 
         # *** Either file or directory must be specified. ***
 
         # If mounting a cache or read-only directory, the filesystem location of
         # the directory should be specified as a relative path to the task
         # directory here.
-        Optional('directory'): text_type,
+        Optional('directory'): basestring,
         # If mounting a file, specify the relative path within the task
         # directory to mount the file (the file will be read only).
-        Optional('file'): text_type,
+        Optional('file'): basestring,
         # Required if and only if `content` is specified and mounting a
         # directory (not a file). This should be the archive format of the
         # content (either pre-loaded cache or read-only directory).
         Optional('format'): Any('rar', 'tar.bz2', 'tar.gz', 'zip')
     }],
 
     # environment variables
-    Required('env'): {text_type: taskref_or_string},
+    Required('env'): {basestring: taskref_or_string},
 
     # the maximum time to run, in seconds
     Required('max-run-time'): int,
 
     # os user groups for test task workers
-    Optional('os-groups'): [text_type],
+    Optional('os-groups'): [basestring],
 
     # feature for test task to run as administarotr
     Optional('run-as-administrator'): bool,
 
     # optional features
     Required('chain-of-trust'): bool,
     Optional('taskcluster-proxy'): bool,
 
@@ -894,30 +894,30 @@ def build_generic_worker_payload(config,
     Required('max-run-time'): int,
 
     # list of artifact URLs for the artifacts that should be signed
     Required('upstream-artifacts'): [{
         # taskId of the task with the artifact
         Required('taskId'): taskref_or_string,
 
         # type of signing task (for CoT)
-        Required('taskType'): text_type,
+        Required('taskType'): basestring,
 
         # Paths to the artifacts to sign
-        Required('paths'): [text_type],
+        Required('paths'): [basestring],
 
         # Signing formats to use on each of the paths
-        Required('formats'): [text_type],
+        Required('formats'): [basestring],
     }],
 
     # behavior for mac iscript
     Optional('mac-behavior'): Any(
         "mac_notarize", "mac_sign", "mac_sign_and_pkg", "mac_geckodriver",
     ),
-    Optional('entitlements-url'): text_type,
+    Optional('entitlements-url'): basestring,
 })
 def build_scriptworker_signing_payload(config, task, task_def):
     worker = task['worker']
 
     task_def['payload'] = {
         'maxRunTime': worker['max-run-time'],
         'upstreamArtifacts':  worker['upstream-artifacts']
     }
@@ -936,42 +936,42 @@ def build_scriptworker_signing_payload(c
     task['release-artifacts'] = list(artifacts)
 
 
 @payload_builder('beetmover', schema={
     # the maximum time to run, in seconds
     Required('max-run-time', default=600): int,
 
     # locale key, if this is a locale beetmover job
-    Optional('locale'): text_type,
+    Optional('locale'): basestring,
 
     Optional('partner-public'): bool,
 
     Required('release-properties'): {
-        'app-name': text_type,
-        'app-version': text_type,
-        'branch': text_type,
-        'build-id': text_type,
-        'hash-type': text_type,
-        'platform': text_type,
+        'app-name': basestring,
+        'app-version': basestring,
+        'branch': basestring,
+        'build-id': basestring,
+        'hash-type': basestring,
+        'platform': basestring,
     },
 
     # list of artifact URLs for the artifacts that should be beetmoved
     Required('upstream-artifacts'): [{
         # taskId of the task with the artifact
         Required('taskId'): taskref_or_string,
 
         # type of signing task (for CoT)
-        Required('taskType'): text_type,
+        Required('taskType'): basestring,
 
         # Paths to the artifacts to sign
-        Required('paths'): [text_type],
+        Required('paths'): [basestring],
 
         # locale is used to map upload path and allow for duplicate simple names
-        Required('locale'): text_type,
+        Required('locale'): basestring,
     }],
     Optional('artifact-map'): object,
 })
 def build_beetmover_payload(config, task, task_def):
     worker = task['worker']
     release_config = get_release_config(config)
     release_properties = worker['release-properties']
 
@@ -996,17 +996,17 @@ def build_beetmover_payload(config, task
         task_def['payload']['is_partner_repack_public'] = worker['partner-public']
     if release_config:
         task_def['payload'].update(release_config)
 
 
 @payload_builder('beetmover-push-to-release', schema={
     # the maximum time to run, in seconds
     Required('max-run-time'): int,
-    Required('product'): text_type,
+    Required('product'): basestring,
 })
 def build_beetmover_push_to_release_payload(config, task, task_def):
     worker = task['worker']
     release_config = get_release_config(config)
     partners = ['{}/{}'.format(p, s) for p, s, _ in get_partners_to_be_published(config)]
 
     task_def['payload'] = {
         'maxRunTime': worker['max-run-time'],
@@ -1015,29 +1015,29 @@ def build_beetmover_push_to_release_payl
         'build_number': release_config['build_number'],
         'partners': partners,
     }
 
 
 @payload_builder('beetmover-maven', schema={
     Required('max-run-time', default=600): int,
     Required('release-properties'): {
-        'app-name': text_type,
-        'app-version': text_type,
-        'branch': text_type,
-        'build-id': text_type,
-        'artifact-id': text_type,
-        'hash-type': text_type,
-        'platform': text_type,
+        'app-name': basestring,
+        'app-version': basestring,
+        'branch': basestring,
+        'build-id': basestring,
+        'artifact-id': basestring,
+        'hash-type': basestring,
+        'platform': basestring,
     },
 
     Required('upstream-artifacts'): [{
         Required('taskId'): taskref_or_string,
-        Required('taskType'): text_type,
-        Required('paths'): [text_type],
+        Required('taskType'): basestring,
+        Required('paths'): [basestring],
         Required('zipExtract', default=False): bool,
     }],
     Optional('artifact-map'): object,
 })
 def build_beetmover_maven_payload(config, task, task_def):
     build_beetmover_payload(config, task, task_def)
 
     task_def['payload']['artifact_id'] = task['worker']['release-properties']['artifact-id']
@@ -1045,47 +1045,47 @@ def build_beetmover_maven_payload(config
         task_def['payload']['artifactMap'] = task['worker']['artifact-map']
 
     del task_def['payload']['releaseProperties']['hashType']
     del task_def['payload']['releaseProperties']['platform']
 
 
 @payload_builder('balrog', schema={
     Required('balrog-action'): Any(*BALROG_ACTIONS),
-    Optional('product'): text_type,
-    Optional('platforms'): [text_type],
-    Optional('release-eta'): text_type,
-    Optional('channel-names'): optionally_keyed_by('release-type', [text_type]),
+    Optional('product'): basestring,
+    Optional('platforms'): [basestring],
+    Optional('release-eta'): basestring,
+    Optional('channel-names'): optionally_keyed_by('release-type', [basestring]),
     Optional('require-mirrors'): bool,
     Optional('publish-rules'): optionally_keyed_by('release-type', 'release-level', [int]),
     Optional('rules-to-update'): optionally_keyed_by(
-        'release-type', 'release-level', [text_type]),
-    Optional('archive-domain'): optionally_keyed_by('release-level', text_type),
-    Optional('download-domain'): optionally_keyed_by('release-level', text_type),
-    Optional('blob-suffix'): text_type,
-    Optional('complete-mar-filename-pattern'): text_type,
-    Optional('complete-mar-bouncer-product-pattern'): text_type,
+        'release-type', 'release-level', [basestring]),
+    Optional('archive-domain'): optionally_keyed_by('release-level', basestring),
+    Optional('download-domain'): optionally_keyed_by('release-level', basestring),
+    Optional('blob-suffix'): basestring,
+    Optional('complete-mar-filename-pattern'): basestring,
+    Optional('complete-mar-bouncer-product-pattern'): basestring,
     Optional('update-line'): object,
-    Optional('suffixes'): [text_type],
+    Optional('suffixes'): [basestring],
     Optional('background-rate'): optionally_keyed_by(
         'release-type', 'beta-number', Any(int, None)),
     Optional('force-fallback-mapping-update'): optionally_keyed_by(
         'release-type', 'beta-number', bool),
 
 
     # list of artifact URLs for the artifacts that should be beetmoved
     Optional('upstream-artifacts'): [{
         # taskId of the task with the artifact
         Required('taskId'): taskref_or_string,
 
         # type of signing task (for CoT)
-        Required('taskType'): text_type,
+        Required('taskType'): basestring,
 
         # Paths to the artifacts to sign
-        Required('paths'): [text_type],
+        Required('paths'): [basestring],
     }],
 })
 def build_balrog_payload(config, task, task_def):
     worker = task['worker']
     release_config = get_release_config(config)
     beta_number = None
     if 'b' in release_config['version']:
         beta_number = release_config['version'].split('b')[-1]
@@ -1149,47 +1149,47 @@ def build_bouncer_aliases_payload(config
 
     task_def['payload'] = {
         'aliases_entries': worker['entries']
     }
 
 
 @payload_builder('bouncer-locations', schema={
     Required('implementation'): 'bouncer-locations',
-    Required('bouncer-products'): [text_type],
+    Required('bouncer-products'): [basestring],
 })
 def build_bouncer_locations_payload(config, task, task_def):
     worker = task['worker']
     release_config = get_release_config(config)
 
     task_def['payload'] = {
         'bouncer_products': worker['bouncer-products'],
         'version': release_config['version'],
         'product': task['shipping-product'],
     }
 
 
 @payload_builder('bouncer-submission', schema={
-    Required('locales'): [text_type],
+    Required('locales'): [basestring],
     Required('entries'): object,
 })
 def build_bouncer_submission_payload(config, task, task_def):
     worker = task['worker']
 
     task_def['payload'] = {
         'locales':  worker['locales'],
         'submission_entries': worker['entries']
     }
 
 
 @payload_builder('push-apk', schema={
     Required('upstream-artifacts'): [{
         Required('taskId'): taskref_or_string,
-        Required('taskType'): text_type,
-        Required('paths'): [text_type],
+        Required('taskType'): basestring,
+        Required('paths'): [basestring],
         Optional('optional', default=False): bool,
     }],
 
     # "Invalid" is a noop for try and other non-supported branches
     Required('google-play-track'): Any('production', 'beta', 'alpha', 'rollout', 'internal'),
     Required('commit'): bool,
     Optional('rollout-percentage'): Any(int, None),
 })
@@ -1202,45 +1202,45 @@ def build_push_apk_payload(config, task,
         'google_play_track': worker['google-play-track'],
     }
 
     if worker.get('rollout-percentage', None):
         task_def['payload']['rollout_percentage'] = worker['rollout-percentage']
 
 
 @payload_builder('push-snap', schema={
-    Required('channel'): text_type,
+    Required('channel'): basestring,
     Required('upstream-artifacts'): [{
         Required('taskId'): taskref_or_string,
-        Required('taskType'): text_type,
-        Required('paths'): [text_type],
+        Required('taskType'): basestring,
+        Required('paths'): [basestring],
     }],
 })
 def build_push_snap_payload(config, task, task_def):
     worker = task['worker']
 
     task_def['payload'] = {
         'channel': worker['channel'],
         'upstreamArtifacts':  worker['upstream-artifacts'],
     }
 
 
 @payload_builder('shipit-shipped', schema={
-    Required('release-name'): text_type,
+    Required('release-name'): basestring,
 })
 def build_ship_it_shipped_payload(config, task, task_def):
     worker = task['worker']
 
     task_def['payload'] = {
         'release_name': worker['release-name']
     }
 
 
 @payload_builder('shipit-maybe-release', schema={
-    Required('phase'): text_type,
+    Required('phase'): basestring,
 })
 def build_ship_it_maybe_release_payload(config, task, task_def):
     # expect branch name, including path
     branch = config.params['head_repository'][len('https://hg.mozilla.org/'):]
     # 'version' is e.g. '71.0b13' (app_version doesn't have beta number)
     version = config.params['version']
 
     task_def['payload'] = {
@@ -1251,49 +1251,49 @@ def build_ship_it_maybe_release_payload(
         'cron_revision': config.params['head_rev'],
     }
 
 
 @payload_builder('push-addons', schema={
     Required('channel'): Any('listed', 'unlisted'),
     Required('upstream-artifacts'): [{
         Required('taskId'): taskref_or_string,
-        Required('taskType'): text_type,
-        Required('paths'): [text_type],
+        Required('taskType'): basestring,
+        Required('paths'): [basestring],
     }],
 })
 def build_push_addons_payload(config, task, task_def):
     worker = task['worker']
 
     task_def['payload'] = {
         'channel': worker['channel'],
         'upstreamArtifacts': worker['upstream-artifacts'],
     }
 
 
 @payload_builder('treescript', schema={
     Required('tags'): [Any('buildN', 'release', None)],
     Required('bump'): bool,
-    Optional('bump-files'): [text_type],
-    Optional('repo-param-prefix'): text_type,
+    Optional('bump-files'): [basestring],
+    Optional('repo-param-prefix'): basestring,
     Optional('dontbuild'): bool,
     Optional('ignore-closed-tree'): bool,
     Required('force-dry-run', default=True): bool,
     Required('push', default=False): bool,
-    Optional('source-repo'): text_type,
+    Optional('source-repo'): basestring,
     Optional('l10n-bump-info'): {
-        Required('name'): text_type,
-        Required('path'): text_type,
-        Required('version-path'): text_type,
-        Optional('revision-url'): text_type,
+        Required('name'): basestring,
+        Required('path'): basestring,
+        Required('version-path'): basestring,
+        Optional('revision-url'): basestring,
         Optional('ignore-config'): object,
         Required('platform-configs'): [{
-            Required('platforms'): [text_type],
-            Required('path'): text_type,
-            Optional('format'): text_type,
+            Required('platforms'): [basestring],
+            Required('path'): basestring,
+            Optional('format'): basestring,
         }],
     },
 })
 def build_treescript_payload(config, task, task_def):
     worker = task['worker']
     release_config = get_release_config(config)
 
     task_def['payload'] = {'actions': []}
@@ -1368,40 +1368,40 @@ def build_invalid_payload(config, task, 
 def build_dummy_payload(config, task, task_def):
     task_def['payload'] = {}
 
 
 @payload_builder('script-engine-autophone', schema={
     Required('os'): Any('macosx', 'linux'),
 
     # A link for an executable to download
-    Optional('context'): text_type,
+    Optional('context'): basestring,
 
     # Tells the worker whether machine should reboot
     # after the task is finished.
     Optional('reboot'):
     Any(False, 'always', 'never', 'on-exception', 'on-failure'),
 
     # the command to run
     Optional('command'): [taskref_or_string],
 
     # environment variables
-    Optional('env'): {text_type: taskref_or_string},
+    Optional('env'): {basestring: taskref_or_string},
 
     # artifacts to extract from the task image after completion
     Optional('artifacts'): [{
         # type of artifact -- simple file, or recursive directory
         Required('type'): Any('file', 'directory'),
 
         # task image path from which to read artifact
-        Required('path'): text_type,
+        Required('path'): basestring,
 
         # name of the produced artifact (root of the names for
         # type=directory)
-        Required('name'): text_type,
+        Required('name'): basestring,
     }],
 })
 def build_script_engine_autophone_payload(config, task, task_def):
     worker = task['worker']
     artifacts = map(lambda artifact: {
         'name': artifact['name'],
         'path': artifact['path'],
         'type': artifact['type'],
@@ -1950,17 +1950,17 @@ def build_task(config, tasks):
         # Resolve run-on-projects
         build_platform = attributes.get('build_platform')
         resolve_keyed_by(task, 'run-on-projects', item_name=task['label'],
                          **{'build-platform': build_platform})
         attributes['run_on_projects'] = task.get('run-on-projects', ['all'])
         attributes['always_target'] = task['always-target']
         # This logic is here since downstream tasks don't always match their
         # upstream dependency's shipping_phase.
-        # A text_type task['shipping-phase'] takes precedence, then
+        # A basestring task['shipping-phase'] takes precedence, then
         # an existing attributes['shipping_phase'], then fall back to None.
         if task.get('shipping-phase') is not None:
             attributes['shipping_phase'] = task['shipping-phase']
         else:
             attributes.setdefault('shipping_phase', None)
         # shipping_product will always match the upstream task's
         # shipping_product, so a pre-set existing attributes['shipping_product']
         # takes precedence over task['shipping-product']. However, make sure
@@ -2088,25 +2088,25 @@ def check_run_task_caches(config, tasks)
     )
 
     suffix = _run_task_suffix()
 
     for task in tasks:
         payload = task['task'].get('payload', {})
         command = payload.get('command') or ['']
 
-        main_command = command[0] if isinstance(command[0], text_type) else ''
+        main_command = command[0] if isinstance(command[0], basestring) else ''
         run_task = main_command.endswith('run-task')
 
         require_sparse_cache = False
         have_sparse_cache = False
 
         if run_task:
             for arg in command[1:]:
-                if not isinstance(arg, text_type):
+                if not isinstance(arg, basestring):
                     continue
 
                 if arg == '--':
                     break
 
                 if arg.startswith('--gecko-sparse-profile'):
                     if '=' not in arg:
                         raise Exception(
--- a/taskcluster/taskgraph/transforms/tests.py
+++ b/taskcluster/taskgraph/transforms/tests.py
@@ -16,17 +16,16 @@ transforms, and these transforms should 
 what should run where. this is the wrong place for special-casing platforms,
 for example - use `all_tests.py` instead.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import copy
 import logging
-from six import text_type
 
 from mozbuild.schedules import INCLUSIVE_COMPONENTS
 from moztest.resolve import TEST_SUITES
 from voluptuous import (
     Any,
     Optional,
     Required,
     Exclusive,
@@ -210,70 +209,70 @@ transforms = TransformSequence()
 #
 # This is a great place for baffling cruft to accumulate, and that makes
 # everyone move more slowly.  Be considerate of your fellow hackers!
 # See the warnings in taskcluster/docs/how-tos.rst
 #
 # *****WARNING*****
 test_description_schema = Schema({
     # description of the suite, for the task metadata
-    'description': text_type,
+    'description': basestring,
 
     # test suite category and name
     Optional('suite'): Any(
-        text_type,
-        {Optional('category'): text_type, Optional('name'): text_type},
+        basestring,
+        {Optional('category'): basestring, Optional('name'): basestring},
     ),
 
     # base work directory used to set up the task.
     Optional('workdir'): optionally_keyed_by(
         'test-platform',
-        Any(text_type, 'default')),
+        Any(basestring, 'default')),
 
     # the name by which this test suite is addressed in try syntax; defaults to
     # the test-name.  This will translate to the `unittest_try_name` or
     # `talos_try_name` attribute.
-    Optional('try-name'): text_type,
+    Optional('try-name'): basestring,
 
     # additional tags to mark up this type of test
-    Optional('tags'): {text_type: object},
+    Optional('tags'): {basestring: object},
 
     # the symbol, or group(symbol), under which this task should appear in
     # treeherder.
-    'treeherder-symbol': text_type,
+    'treeherder-symbol': basestring,
 
     # the value to place in task.extra.treeherder.machine.platform; ideally
     # this is the same as build-platform, and that is the default, but in
     # practice it's not always a match.
-    Optional('treeherder-machine-platform'): text_type,
+    Optional('treeherder-machine-platform'): basestring,
 
     # attributes to appear in the resulting task (later transforms will add the
     # common attributes)
-    Optional('attributes'): {text_type: object},
+    Optional('attributes'): {basestring: object},
 
     # relative path (from config.path) to the file task was defined in
-    Optional('job-from'): text_type,
+    Optional('job-from'): basestring,
 
     # The `run_on_projects` attribute, defaulting to "all".  This dictates the
     # projects on which this task should be included in the target task set.
     # See the attributes documentation for details.
     #
     # Note that the special case 'built-projects', the default, uses the parent
     # build task's run-on-projects, meaning that tests run only on platforms
     # that are built.
     Optional('run-on-projects'): optionally_keyed_by(
         'test-platform',
-        Any([text_type], 'built-projects')),
+        Any([basestring], 'built-projects')),
 
     # Same as `run-on-projects` except it only applies to Fission tasks. Fission
     # tasks will ignore `run_on_projects` and non-Fission tasks will ignore
     # `fission-run-on-projects`.
     Optional('fission-run-on-projects'): optionally_keyed_by(
         'test-platform',
-        Any([text_type], 'built-projects')),
+        Any([basestring], 'built-projects')),
 
     # the sheriffing tier for this task (default: set based on test platform)
     Optional('tier'): optionally_keyed_by(
         'test-platform',
         Any(int, 'default')),
 
     # Same as `tier` except it only applies to Fission tasks. Fission tasks
     # will ignore `tier` and non-Fission tasks will ignore `fission-tier`.
@@ -285,17 +284,17 @@ test_description_schema = Schema({
     # platform by passing a dictionary in the `by-test-platform` key.  If the
     # test platform is not found, the key 'default' will be tried.
     Required('chunks'): optionally_keyed_by(
         'test-platform',
         int),
 
     # the time (with unit) after which this task is deleted; default depends on
     # the branch (see below)
-    Optional('expires-after'): text_type,
+    Optional('expires-after'): basestring,
 
     # The different configurations that should be run against this task, defined
     # in the TEST_VARIANTS object.
     Optional('variants'): optionally_keyed_by(
         'test-platform', 'project',
         Any(TEST_VARIANTS.keys())),
 
     # Whether to run this task with e10s.  If false, run
@@ -333,21 +332,21 @@ test_description_schema = Schema({
     # For tasks that will run in docker-worker, this is the
     # name of the docker image or in-tree docker image to run the task in.  If
     # in-tree, then a dependency will be created automatically.  This is
     # generally `desktop-test`, or an image that acts an awful lot like it.
     Required('docker-image'): optionally_keyed_by(
         'test-platform',
         Any(
             # a raw Docker image path (repo/image:tag)
-            text_type,
+            basestring,
             # an in-tree generated docker image (from `taskcluster/docker/<name>`)
-            {'in-tree': text_type},
+            {'in-tree': basestring},
             # an indexed docker image
-            {'indexed': text_type},
+            {'indexed': basestring},
         )
     ),
 
     # seconds of runtime after which the task will be killed.  Like 'chunks',
     # this can be keyed by test pltaform.
     Required('max-run-time'): optionally_keyed_by(
         'test-platform',
         int),
@@ -362,39 +361,39 @@ test_description_schema = Schema({
     Optional('reboot'):
         Any(False, 'always', 'on-exception', 'on-failure'),
 
     # What to run
     Required('mozharness'): {
         # the mozharness script used to run this task
         Required('script'): optionally_keyed_by(
             'test-platform',
-            text_type),
+            basestring),
 
         # the config files required for the task
         Required('config'): optionally_keyed_by(
             'test-platform',
-            [text_type]),
+            [basestring]),
 
         # mochitest flavor for mochitest runs
-        Optional('mochitest-flavor'): text_type,
+        Optional('mochitest-flavor'): basestring,
 
         # any additional actions to pass to the mozharness command
-        Optional('actions'): [text_type],
+        Optional('actions'): [basestring],
 
         # additional command-line options for mozharness, beyond those
         # automatically added
         Required('extra-options'): optionally_keyed_by(
             'test-platform',
-            [text_type]),
+            [basestring]),
 
         # the artifact name (including path) to test on the build task; this is
         # generally set in a per-kind transformation
-        Optional('build-artifact-name'): text_type,
-        Optional('installer-url'): text_type,
+        Optional('build-artifact-name'): basestring,
+        Optional('installer-url'): basestring,
 
         # If not false, tooltool downloads will be enabled via relengAPIProxy
         # for either just public files, or all files.  Not supported on Windows
         Required('tooltool-downloads'): Any(
             False,
             'public',
             'internal',
         ),
@@ -419,97 +418,97 @@ test_description_schema = Schema({
             bool),
 
         Required('requires-signed-builds'): optionally_keyed_by(
             'test-platform',
             bool),
     },
 
     # The set of test manifests to run.
-    Optional('test-manifests'): [text_type],
+    Optional('test-manifests'): [basestring],
 
     # The current chunk (if chunking is enabled).
     Optional('this-chunk'): int,
 
     # os user groups for test task workers; required scopes, will be
     # added automatically
     Optional('os-groups'): optionally_keyed_by(
         'test-platform',
-        [text_type]),
+        [basestring]),
 
     Optional('run-as-administrator'): optionally_keyed_by(
         'test-platform',
         bool),
 
     # -- values supplied by the task-generation infrastructure
 
     # the platform of the build this task is testing
-    'build-platform': text_type,
+    'build-platform': basestring,
 
     # the label of the build task generating the materials to test
-    'build-label': text_type,
+    'build-label': basestring,
 
     # the label of the signing task generating the materials to test.
     # Signed builds are used in xpcshell tests on Windows, for instance.
-    Optional('build-signing-label'): text_type,
+    Optional('build-signing-label'): basestring,
 
     # the build's attributes
-    'build-attributes': {text_type: object},
+    'build-attributes': {basestring: object},
 
     # the platform on which the tests will run
-    'test-platform': text_type,
+    'test-platform': basestring,
 
     # limit the test-platforms (as defined in test-platforms.yml)
     # that the test will run on
     Optional('limit-platforms'): optionally_keyed_by(
         'app',
-        [text_type]
+        [basestring]
     ),
 
     # the name of the test (the key in tests.yml)
-    'test-name': text_type,
+    'test-name': basestring,
 
     # the product name, defaults to firefox
-    Optional('product'): text_type,
+    Optional('product'): basestring,
 
     # conditional files to determine when these tests should be run
     Exclusive(Optional('when'), 'optimization'): {
-        Optional('files-changed'): [text_type],
+        Optional('files-changed'): [basestring],
     },
 
     # Optimization to perform on this task during the optimization phase.
     # Optimizations are defined in taskcluster/taskgraph/optimize.py.
     Exclusive(Optional('optimization'), 'optimization'): OptimizationSchema,
 
     # The SCHEDULES component for this task; this defaults to the suite
     # (not including the flavor) but can be overridden here.
-    Exclusive(Optional('schedules-component'), 'optimization'): text_type,
+    Exclusive(Optional('schedules-component'), 'optimization'): basestring,
 
     Optional('worker-type'): optionally_keyed_by(
         'test-platform',
-        Any(text_type, None),
+        Any(basestring, None),
     ),
 
     Optional(
         'require-signed-extensions',
         description="Whether the build being tested requires extensions be signed.",
     ): optionally_keyed_by('release-type', 'test-platform', bool),
 
     # The target name, specifying the build artifact to be tested.
     # If None or not specified, a transform sets the target based on OS:
     # target.dmg (Mac), target.apk (Android), target.tar.bz2 (Linux),
     # or target.zip (Windows).
     Optional('target'): optionally_keyed_by(
         'test-platform',
-        Any(text_type, None, {'index': text_type, 'name': text_type}),
+        Any(basestring, None, {'index': basestring, 'name': basestring}),
     ),
 
     # A list of artifacts to install from 'fetch' tasks.
     Optional('fetches'): {
-        text_type: optionally_keyed_by('test-platform', [text_type])
+        basestring: optionally_keyed_by('test-platform', [basestring])
     },
 }, required=True)
 
 
 @transforms.add
 def handle_keyed_by_mozharness(config, tests):
     """Resolve a mozharness field if it is keyed by something"""
     fields = [
@@ -631,17 +630,17 @@ def limit_platforms(config, tests):
 transforms.add_validate(test_description_schema)
 
 
 @transforms.add
 def handle_suite_category(config, tests):
     for test in tests:
         test.setdefault('suite', {})
 
-        if isinstance(test['suite'], text_type):
+        if isinstance(test['suite'], basestring):
             test['suite'] = {'name': test['suite']}
 
         suite = test['suite'].setdefault('name', test['test-name'])
         category = test['suite'].setdefault('category', suite)
 
         test.setdefault('attributes', {})
         test['attributes']['unittest_suite'] = suite
         test['attributes']['unittest_category'] = category
--- a/taskcluster/taskgraph/transforms/update_verify.py
+++ b/taskcluster/taskgraph/transforms/update_verify.py
@@ -20,18 +20,19 @@ def add_command(config, tasks):
     config_tasks = {}
     for dep in config.kind_dependencies_tasks:
         if 'update-verify-config' in dep.kind or 'update-verify-next-config' in dep.kind:
             config_tasks[dep.name] = dep
 
     for task in tasks:
         config_task = config_tasks[task['name']]
         total_chunks = task["extra"]["chunks"]
-        task['worker'].setdefault('env', {})['CHANNEL'] = (
-            config_task.task['extra']['channel'])
+        task['worker'].setdefault('env', {}).update(
+            CHANNEL=config_task.task['extra']['channel'],
+        )
         task.setdefault('fetches', {})[config_task.label] = [
             "update-verify.cfg",
         ]
         task['treeherder'] = inherit_treeherder_from_dep(task, config_task)
 
         for this_chunk in range(1, total_chunks+1):
             chunked = deepcopy(task)
             chunked["treeherder"]["symbol"] = add_suffix(
--- a/taskcluster/taskgraph/util/docker.py
+++ b/taskcluster/taskgraph/util/docker.py
@@ -5,17 +5,16 @@
 from __future__ import absolute_import, print_function, unicode_literals
 
 import hashlib
 import json
 import os
 import re
 import requests
 import requests_unixsocket
-import six
 import sys
 import urllib
 import urlparse
 
 from mozbuild.util import memoize
 from mozpack.files import GeneratedFile
 from mozpack.archive import (
     create_tar_gz_from_files,
@@ -165,33 +164,32 @@ class VoidWriter(object):
     data."""
     def write(self, buf):
         pass
 
 
 def generate_context_hash(topsrcdir, image_path, image_name, args=None):
     """Generates a sha256 hash for context directory used to build an image."""
 
-    return stream_context_tar(
-        topsrcdir, image_path, VoidWriter(), image_name, args)
+    return stream_context_tar(topsrcdir, image_path, VoidWriter(), image_name, args)
 
 
 class HashingWriter(object):
     """A file object with write capabilities that hashes the written data at
     the same time it passes down to a real file object."""
     def __init__(self, writer):
         self._hash = hashlib.sha256()
         self._writer = writer
 
     def write(self, buf):
         self._hash.update(buf)
         self._writer.write(buf)
 
     def hexdigest(self):
-        return six.ensure_text(self._hash.hexdigest())
+        return self._hash.hexdigest()
 
 
 def create_context_tar(topsrcdir, context_dir, out_path, prefix, args=None):
     """Create a context tarball.
 
     A directory ``context_dir`` containing a Dockerfile will be assembled into
     a gzipped tar file at ``out_path``. Files inside the archive will be
     prefixed by directory ``prefix``.
--- a/taskcluster/taskgraph/util/hg.py
+++ b/taskcluster/taskgraph/util/hg.py
@@ -2,17 +2,16 @@
 
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import requests
-import six
 import subprocess
 from redo import retry
 
 PUSHLOG_TMPL = '{}/json-pushes?version=2&changeset={}&tipsonly=1&full=1'
 
 
 def find_hg_revision_push_info(repository, revision):
     """Given the parameters for this action and a revision, find the
@@ -44,21 +43,21 @@ def find_hg_revision_push_info(repositor
         'pushid': pushid,
         'user': pushes[pushid]['user'],
     }
 
 
 def get_hg_revision_branch(root, revision):
     """Given the parameters for a revision, find the hg_branch (aka
     relbranch) of the revision."""
-    return six.ensure_text(subprocess.check_output([
+    return subprocess.check_output([
         'hg', 'identify',
         '-T', '{branch}',
         '--rev', revision,
-    ], cwd=root, universal_newlines=True))
+    ], cwd=root)
 
 
 # For these functions, we assume that run-task has correctly checked out the
 # revision indicated by GECKO_HEAD_REF, so all that remains is to see what the
 # current revision is.  Mercurial refers to that as `.`.
 def get_hg_commit_message(root):
     return subprocess.check_output(['hg', 'log', '-r', '.', '-T', '{desc}'], cwd=root)
 
--- a/taskcluster/taskgraph/util/schema.py
+++ b/taskcluster/taskgraph/util/schema.py
@@ -51,17 +51,17 @@ def optionally_keyed_by(*arguments):
     fields = arguments[:-1]
 
     # build the nestable schema by generating schema = Any(schema,
     # by-fld1, by-fld2, by-fld3) once for each field.  So we don't allow
     # infinite nesting, but one level of nesting for each field.
     for _ in arguments:
         options = [schema]
         for field in fields:
-            options.append({'by-' + field: {text_type: schema}})
+            options.append({'by-' + field: {basestring: schema}})
         schema = voluptuous.Any(*options)
     return schema
 
 
 def resolve_keyed_by(item, field, item_name, **extra_values):
     """
     For values which can either accept a literal value, or be keyed by some
     other attribute of the item, perform that lookup and replacement in-place
@@ -137,19 +137,19 @@ WHITELISTED_SCHEMA_IDENTIFIERS = [
 def check_schema(schema):
     identifier_re = re.compile('^[a-z][a-z0-9-]*$')
 
     def whitelisted(path):
         return any(f(path) for f in WHITELISTED_SCHEMA_IDENTIFIERS)
 
     def iter(path, sch):
         def check_identifier(path, k):
-            if k in (text_type, text_type, voluptuous.Extra):
+            if k in (basestring, text_type, voluptuous.Extra):
                 pass
-            elif isinstance(k, text_type):
+            elif isinstance(k, basestring):
                 if not identifier_re.match(k) and not whitelisted(path):
                     raise RuntimeError(
                         'YAML schemas should use dashed lower-case identifiers, '
                         'not {!r} @ {}'.format(k, path))
             elif isinstance(k, (voluptuous.Optional, voluptuous.Required)):
                 check_identifier(path, k.schema)
             elif isinstance(k, voluptuous.Any):
                 for v in k.validators:
@@ -193,27 +193,27 @@ class Schema(voluptuous.Schema):
         return self.schema[item]
 
 
 OptimizationSchema = voluptuous.Any(
     # always run this task (default)
     None,
     # search the index for the given index namespaces, and replace this task if found
     # the search occurs in order, with the first match winning
-    {'index-search': [text_type]},
+    {'index-search': [basestring]},
     # consult SETA and skip this task if it is low-value
     {'seta': None},
     # skip this task if none of the given file patterns match
-    {'skip-unless-changed': [text_type]},
+    {'skip-unless-changed': [basestring]},
     # skip this task if unless the change files' SCHEDULES contains any of these components
     {'skip-unless-schedules': list(schedules.ALL_COMPONENTS)},
     # optimize strategy aliases for the test kind
     {'test': list(schedules.ALL_COMPONENTS)},
     {'test-inclusive': list(schedules.ALL_COMPONENTS)},
     {'test-try': list(schedules.ALL_COMPONENTS)},
 )
 
 # shortcut for a string where task references are allowed
 taskref_or_string = voluptuous.Any(
-    text_type,
-    {voluptuous.Required('task-reference'): text_type},
-    {voluptuous.Required('artifact-reference'): text_type},
+    basestring,
+    {voluptuous.Required('task-reference'): basestring},
+    {voluptuous.Required('artifact-reference'): basestring},
 )
--- a/taskcluster/taskgraph/util/scriptworker.py
+++ b/taskcluster/taskgraph/util/scriptworker.py
@@ -352,20 +352,20 @@ def get_release_config(config):
         partial_updates = json.loads(partial_updates)
         release_config['partial_versions'] = ', '.join([
             '{}build{}'.format(v, info['buildNumber'])
             for v, info in partial_updates.items()
         ])
         if release_config['partial_versions'] == "{}":
             del release_config['partial_versions']
 
-    release_config['version'] = config.params['version']
-    release_config['appVersion'] = config.params['app_version']
+    release_config['version'] = str(config.params['version'])
+    release_config['appVersion'] = str(config.params['app_version'])
 
-    release_config['next_version'] = config.params['next_version']
+    release_config['next_version'] = str(config.params['next_version'])
     release_config['build_number'] = config.params['build_number']
     return release_config
 
 
 def get_signing_cert_scope_per_platform(build_platform, is_nightly, config):
     if 'devedition' in build_platform:
         return get_devedition_signing_cert_scope(config)
     elif is_nightly:
--- a/taskcluster/taskgraph/util/taskcluster.py
+++ b/taskcluster/taskgraph/util/taskcluster.py
@@ -5,17 +5,16 @@
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import os
 import datetime
 import functools
 import requests
-import six
 import logging
 import taskcluster_urls as liburls
 from mozbuild.util import memoize
 from requests.packages.urllib3.util.retry import Retry
 from taskgraph.task import Task
 from taskgraph.util import yaml
 
 logger = logging.getLogger(__name__)
@@ -35,17 +34,17 @@ CONCURRENCY = 50
 def get_root_url(use_proxy):
     """Get the current TASKCLUSTER_ROOT_URL.  When running in a task, this must
     come from $TASKCLUSTER_ROOT_URL; when run on the command line, we apply a
     defualt that points to the production deployment of Taskcluster.  If use_proxy
     is set, this attempts to get TASKCLUSTER_PROXY_URL instead, failing if it
     is not set."""
     if use_proxy:
         try:
-            return six.ensure_text(os.environ['TASKCLUSTER_PROXY_URL'])
+            return os.environ['TASKCLUSTER_PROXY_URL']
         except KeyError:
             if 'TASK_ID' not in os.environ:
                 raise RuntimeError(
                     'taskcluster-proxy is not available when not executing in a task')
             else:
                 raise RuntimeError(
                     'taskcluster-proxy is not enabled for this task')
 
@@ -53,17 +52,17 @@ def get_root_url(use_proxy):
         if 'TASK_ID' in os.environ:
             raise RuntimeError('$TASKCLUSTER_ROOT_URL must be set when running in a task')
         else:
             logger.debug('Using default TASKCLUSTER_ROOT_URL (Firefox CI production)')
             return PRODUCTION_TASKCLUSTER_ROOT_URL
     logger.debug('Running in Taskcluster instance {}{}'.format(
         os.environ['TASKCLUSTER_ROOT_URL'],
         ' with taskcluster-proxy' if 'TASKCLUSTER_PROXY_URL' in os.environ else ''))
-    return six.ensure_text(os.environ['TASKCLUSTER_ROOT_URL'])
+    return os.environ['TASKCLUSTER_ROOT_URL']
 
 
 @memoize
 def get_session():
     session = requests.Session()
 
     retry = Retry(total=5, backoff_factor=0.1,
                   status_forcelist=[500, 502, 503, 504])
@@ -103,28 +102,28 @@ def _handle_artifact(path, response):
     response.raw.read = functools.partial(response.raw.read,
                                           decode_content=True)
     return response.raw
 
 
 def get_artifact_url(task_id, path, use_proxy=False):
     artifact_tmpl = liburls.api(get_root_url(False), 'queue', 'v1',
                                 'task/{}/artifacts/{}')
-    data = six.ensure_text(artifact_tmpl.format(task_id, path))
+    data = artifact_tmpl.format(task_id, path)
     if use_proxy:
         # Until Bug 1405889 is deployed, we can't download directly
         # from the taskcluster-proxy.  Work around by using the /bewit
         # endpoint instead.
         # The bewit URL is the body of a 303 redirect, which we don't
         # want to follow (which fetches a potentially large resource).
         response = _do_request(
             os.environ['TASKCLUSTER_PROXY_URL'] + '/bewit',
             data=data,
             allow_redirects=False)
-        return six.ensure_text(response.text)
+        return response.text
     return data
 
 
 def get_artifact(task_id, path, use_proxy=False):
     """
     Returns the artifact with the given path for the given task id.
 
     If the path ends with ".json" or ".yml", the content is deserialized as,
--- a/tools/tryselect/selectors/coverage.py
+++ b/tools/tryselect/selectors/coverage.py
@@ -4,17 +4,16 @@
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import collections
 import json
 import hashlib
 import os
 import shutil
-import six
 import sqlite3
 import subprocess
 import requests
 import datetime
 
 
 from mozboot.util import get_state_dir
 from mozbuild.base import MozbuildObject
@@ -375,17 +374,16 @@ def run(try_config={}, full=False, param
         test_count=len(test_files),
         test_plural='' if len(test_files) == 1 else 's',
         test_singular='s' if len(test_files) == 1 else '',
         task_count=len(tasks),
         task_plural='' if len(tasks) == 1 else 's')
     print('Found ' + test_count_message)
 
     # Set the test paths to be run by setting MOZHARNESS_TEST_PATHS.
-    path_env = {'MOZHARNESS_TEST_PATHS': six.ensure_text(
-        json.dumps(resolve_tests_by_suite(test_files)))}
+    path_env = {'MOZHARNESS_TEST_PATHS': json.dumps(resolve_tests_by_suite(test_files))}
     try_config.setdefault('env', {}).update(path_env)
 
     # Build commit message.
     msg = 'try coverage - ' + test_count_message
     return push_to_try('coverage', message.format(msg=msg),
                        try_task_config=generate_try_task_config('coverage', tasks, try_config),
                        push=push, closed_tree=closed_tree)
--- a/tools/tryselect/task_config.py
+++ b/tools/tryselect/task_config.py
@@ -6,17 +6,16 @@
 Templates provide a way of modifying the task definition of selected tasks.
 They are added to 'try_task_config.json' and processed by the transforms.
 """
 
 from __future__ import absolute_import, print_function, unicode_literals
 
 import json
 import os
-import six
 import subprocess
 import sys
 from abc import ABCMeta, abstractmethod, abstractproperty
 from argparse import Action, SUPPRESS
 from textwrap import dedent
 
 import mozpack.path as mozpath
 import voluptuous
@@ -167,18 +166,17 @@ class Path(TryConfig):
         for p in paths:
             if not os.path.exists(p):
                 print("error: '{}' is not a valid path.".format(p), file=sys.stderr)
                 sys.exit(1)
 
         paths = [mozpath.relpath(mozpath.join(os.getcwd(), p), build.topsrcdir) for p in paths]
         return {
             'env': {
-                'MOZHARNESS_TEST_PATHS': six.ensure_text(
-                    json.dumps(resolve_tests_by_suite(paths))),
+                'MOZHARNESS_TEST_PATHS': json.dumps(resolve_tests_by_suite(paths)),
             }
         }
 
 
 class Environment(TryConfig):
 
     arguments = [
         [['--env'],