Bug 1068653 - Part 2 add taskcluster trygraph commands r=wcosta, garndt, mdas
authorJames Lal <james@lightsofapollo.com>
Wed, 26 Nov 2014 10:11:28 -0800
changeset 254435 f6a7135ba01290e9b982b14ac5238d3b340567c8
parent 254434 5edda558c6555e518d3579f89854b556c453c66a
child 254436 bb927ed931e07d42f1e58c684c0bafe79c95bb7b
push id721
push userjlund@mozilla.com
push dateTue, 21 Apr 2015 23:03:33 +0000
treeherdermozilla-release@d27c9211ebb3 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerswcosta, garndt, mdas
bugs1068653
milestone38.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1068653 - Part 2 add taskcluster trygraph commands r=wcosta, garndt, mdas
build/mach_bootstrap.py
testing/docker/README.md
testing/docker/REGISTRY
testing/docker/base/Dockerfile
testing/docker/base/VERSION
testing/docker/base/system-setup.sh
testing/docker/build.sh
testing/docker/builder/Dockerfile
testing/docker/builder/VERSION
testing/docker/builder/build-b2g-desktop.sh
testing/docker/builder/build-setup.sh
testing/docker/builder/build.sh
testing/docker/builder/get_gaia_repo.js
testing/docker/builder/get_gaia_revision.js
testing/docker/builder/mozconfigs/b2g-desktop
testing/docker/builder/mozconfigs/opt-firefox
testing/docker/tester/Dockerfile
testing/docker/tester/VERSION
testing/docker/tester/b2g-desktop-config.py
testing/docker/tester/bin/entrypoint
testing/docker/tester/dot-config/user-dirs.dirs
testing/docker/tester/dot-config/user-dirs.locale
testing/docker/tester/dot-pulse/default.pa
testing/docker/tester/emulator_automation_config.py
testing/docker/tester/test-setup.sh
testing/taskcluster/README.md
testing/taskcluster/design.md
testing/taskcluster/job_flags.yml
testing/taskcluster/mach_commands.py
testing/taskcluster/requirements.txt
testing/taskcluster/setup.py
testing/taskcluster/taskcluster_graph/__init__.py
testing/taskcluster/taskcluster_graph/build_task.py
testing/taskcluster/taskcluster_graph/commit_parser.py
testing/taskcluster/taskcluster_graph/from_now.py
testing/taskcluster/taskcluster_graph/slugid.py
testing/taskcluster/taskcluster_graph/try_test_parser.py
testing/taskcluster/tasks/b2g_desktop.json
testing/taskcluster/tasks/builds/b2g_desktop.yml
testing/taskcluster/tasks/mochitest.json
testing/taskcluster/tasks/tests/b2g_mochitest.yml
testing/taskcluster/tests/test_build_task.py
testing/taskcluster/tests/test_commit_parser.py
testing/taskcluster/tests/test_from_now.py
testing/taskcluster/tests/test_try_test_parser.py
--- a/build/mach_bootstrap.py
+++ b/build/mach_bootstrap.py
@@ -29,24 +29,27 @@ SEARCH_PATHS = [
     'python/mozboot',
     'python/mozbuild',
     'python/mozversioncontrol',
     'python/blessings',
     'python/configobj',
     'python/jsmin',
     'python/psutil',
     'python/which',
+    'python/pystache',
+    'python/pyyaml/lib',
     'build/pymake',
     'config',
     'dom/bindings',
     'dom/bindings/parser',
     'layout/tools/reftest',
     'other-licenses/ply',
     'xpcom/idl-parser',
     'testing',
+    'testing/taskcluster',
     'testing/xpcshell',
     'testing/web-platform',
     'testing/web-platform/harness',
     'testing/marionette/client/marionette',
     'testing/marionette/transport',
     'testing/mozbase/mozcrash',
     'testing/mozbase/mozdebug',
     'testing/mozbase/mozdevice',
@@ -74,16 +77,17 @@ MACH_MODULES = [
     'python/mach_commands.py',
     'python/mach/mach/commands/commandinfo.py',
     'python/mozboot/mozboot/mach_commands.py',
     'python/mozbuild/mozbuild/mach_commands.py',
     'python/mozbuild/mozbuild/backend/mach_commands.py',
     'python/mozbuild/mozbuild/frontend/mach_commands.py',
     'services/common/tests/mach_commands.py',
     'testing/mach_commands.py',
+    'testing/taskcluster/mach_commands.py',
     'testing/marionette/mach_commands.py',
     'testing/mochitest/mach_commands.py',
     'testing/xpcshell/mach_commands.py',
     'testing/talos/mach_commands.py',
     'testing/web-platform/mach_commands.py',
     'testing/xpcshell/mach_commands.py',
     'tools/docs/mach_commands.py',
     'tools/mercurial/mach_commands.py',
@@ -103,16 +107,21 @@ CATEGORIES = {
         'long': 'Common actions performed after completing a build.',
         'priority': 70,
     },
     'testing': {
         'short': 'Testing',
         'long': 'Run tests.',
         'priority': 60,
     },
+    'ci': {
+        'short': 'CI',
+        'long': 'Taskcluster commands',
+        'priority': 59
+    },
     'devenv': {
         'short': 'Development Environment',
         'long': 'Set up and configure your development environment.',
         'priority': 50,
     },
     'build-dev': {
         'short': 'Low-level Build System Interaction',
         'long': 'Interact with specific parts of the build system.',
new file mode 100644
--- /dev/null
+++ b/testing/docker/README.md
@@ -0,0 +1,33 @@
+# Docker Images for Gecko
+
+This folder contains various docker images used in [taskcluster](http://docs.taskcluster.net/) as well as other misc docker images which may be useful for
+hacking on gecko.
+
+## Building images
+
+Generally images can pull from the [registry](./REGISTRY) rather then
+build locally but for developing new images its often helpful to hack on
+them locally.
+
+```sh
+# Example: ./build.sh base
+./build.sh <FOLDER>
+```
+
+This is a tiny wrapper around building the docker images via `docker
+build -t $REGISTRY/$FOLDER:$FOLDER_VERSION`
+
+## Adding a new image
+
+The docker image primitives are very basic building block for
+constructing an "image" but generally don't help much with tagging it
+for deployment so we have a wrapper (./build.sh) which adds some sugar
+to help with tagging/versioning... Each folder should look something
+like this:
+
+  - your_amazing_image/
+    - your_amazing_image/Dockerfile: Standard docker file syntax
+    - your_amazing_image/VERSION: The version of the docker file
+      (required* used during tagging)
+    - your_amazing_image/REGISTRY: Override default registry
+      (useful for secret registries)
new file mode 100644
--- /dev/null
+++ b/testing/docker/REGISTRY
@@ -0,0 +1,1 @@
+quay.io/mozilla
new file mode 100644
--- /dev/null
+++ b/testing/docker/base/Dockerfile
@@ -0,0 +1,19 @@
+FROM          quay.io/mozilla/ubuntu:12.04
+MAINTAINER    Jonas Finnemann Jensen <jopsen@gmail.com>
+
+# Run system setup script
+ADD           system-setup.sh   /tmp/system-setup.sh
+RUN           ["/tmp/system-setup.sh"]
+
+# Set variable normally configured at login, by the shells parent process, these
+# are taken from GNU su manual
+ENV           HOME          /home/worker
+ENV           SHELL         /bin/bash
+ENV           USER          worker
+ENV           LOGNAME       worker
+
+# Declare default working folder
+WORKDIR       /home/worker
+
+# Set a default command useful for debugging
+CMD ["/bin/bash", "--login"]
new file mode 100644
--- /dev/null
+++ b/testing/docker/base/VERSION
@@ -0,0 +1,1 @@
+0.0.1
new file mode 100755
--- /dev/null
+++ b/testing/docker/base/system-setup.sh
@@ -0,0 +1,77 @@
+#!/bin/bash -ve
+
+################################### setup.sh ###################################
+
+### Check that we are running as root
+test `whoami` == 'root';
+
+### Add worker user
+# Minimize the number of things which the build script can do, security-wise
+# it's not a problem to let the build script install things with apt-get. But it
+# really shouldn't do this, so let's forbid root access.
+useradd -d /home/worker -s /bin/bash -m worker;
+
+### Install Useful Packages
+# First we update and upgrade to latest versions.
+apt-get update;
+apt-get upgrade -y;
+
+# Let's install some goodies, ca-certificates is needed for https with hg.
+# sudo will be required anyway, but let's make it explicit. It nice to have
+# sudo around. We'll also install nano, this is pure bloat I know, but it's
+# useful a text editor.
+apt-get install -y                  \
+  ca-certificates                   \
+  sudo                              \
+  nano                              \
+  ;
+
+# Then let's install all firefox build dependencies, these are extracted from
+# mozboot. See python/mozboot/bin/bootstrap.py in mozilla-central.
+apt-get install -y                  \
+  autoconf2.13                      \
+  build-essential                   \
+  ccache                            \
+  libasound2-dev                    \
+  libcurl4-openssl-dev              \
+  libdbus-1-dev                     \
+  libdbus-glib-1-dev                \
+  libgconf2-dev                     \
+  libgstreamer0.10-dev              \
+  libgstreamer-plugins-base0.10-dev \
+  libgtk2.0-dev                     \
+  libiw-dev                         \
+  libnotify-dev                     \
+  libpulse-dev                      \
+  libxt-dev                         \
+  mercurial                         \
+  mesa-common-dev                   \
+  python-dev                        \
+  unzip                             \
+  uuid                              \
+  yasm                              \
+  xvfb                              \
+  zip                               \
+  software-properties-common        \
+  ;
+
+# Install some utilities, we'll be using nodejs in automation scripts, maybe we
+# shouldn't we can clean up later
+apt-get install -y                  \
+  screen                            \
+  vim                               \
+  wget                              \
+  curl                              \
+  nodejs                            \
+  ;
+
+### Clean up from setup
+# Remove cached .deb packages. Cached package takes up a lot of space and
+# distributing them to workers is wasteful.
+apt-get clean;
+
+# Remove the setup.sh setup, we don't really need this script anymore, deleting
+# it keeps the image as clean as possible.
+rm $0; echo "Deleted $0";
+
+################################### setup.sh ###################################
new file mode 100755
--- /dev/null
+++ b/testing/docker/build.sh
@@ -0,0 +1,81 @@
+#! /bin/bash -e
+
+# This file is a wrapper around docker build with specific concerns around image
+# versions and registry deployment... It also attempts to detect any potential
+# missing dependencies and warns you about them.
+
+usage() {
+  echo "Build a docker image in the given folder (and tag it)"
+  echo
+  echo "$0 <folder>"
+  echo
+  echo "  For more see: $PWD/README.md"
+  echo
+}
+
+usage_err() {
+  echo $1
+  echo
+  usage
+  exit 1
+}
+
+find_registry() {
+  local reg="$1/REGISTRY"
+
+  if [ -f $reg ];
+  then
+    echo $folder
+    return
+  fi
+}
+
+build() {
+  local folder=$1
+  local folder_reg="$1/REGISTRY"
+  local folder_ver="$1/VERSION"
+
+  if [ "$folder" == "" ];
+  then
+    usage
+    return
+  fi
+
+  test -d "$folder" || usage_err "Unknown folder: $folder"
+  test -f "$folder_ver" || usage_err "$folder must contain VERSION file"
+
+  # Fallback to default registry if one is not in the folder...
+  if [ ! -f "$folder_reg" ]; then
+    folder_reg=$PWD/REGISTRY
+  fi
+
+  local registry=$(cat $folder_reg)
+  local version=$(cat $folder_ver)
+
+  test -n "$registry" || usage_err "$folder_reg is empty aborting..."
+  test -n "$version" || usage_err "$folder_ver is empty aborting..."
+
+  local tag="$registry/$folder:$version"
+  docker build -t $tag $folder
+  echo "Success built $folder and tagged with $tag"
+  echo "If deploying now you can run 'docker push $tag'"
+}
+
+if ! which docker > /dev/null; then
+  echo "Docker must be installed read installation instructions at docker.com"
+  echo
+  usage
+  exit 1
+fi
+
+# TODO: In the future we should check minimum docker version it does matter.
+if ! docker version > /dev/null;
+then
+  echo "Docker server is unresponsive run 'docker ps' and check that docker is"
+  echo "running"
+  echo
+  usage
+  exit 1
+fi
+
+build $1
new file mode 100644
--- /dev/null
+++ b/testing/docker/builder/Dockerfile
@@ -0,0 +1,16 @@
+FROM          quay.io/mozilla/gecko-base
+MAINTAINER    Jonas Finnemann Jensen <jopsen@gmail.com>
+
+ENV PATH $PATH:/home/worker/bin/
+
+# Add utilities and configuration
+ADD           mozconfigs/           /home/worker/mozconfigs/
+RUN           mkdir -p              /home/worker/bin
+ADD           build.sh              /home/worker/bin/build.sh
+ADD           build-b2g-desktop.sh  /home/worker/bin/build-b2g-desktop.sh
+ADD           get_gaia_repo.js      /home/worker/bin/get_gaia_repo.js
+ADD           get_gaia_revision.js  /home/worker/bin/get_gaia_revision.js
+ADD           build-setup.sh        /home/worker/bin/build-setup.sh
+
+# Set a default command useful for debugging
+CMD ["/bin/bash", "--login"]
new file mode 100644
--- /dev/null
+++ b/testing/docker/builder/VERSION
@@ -0,0 +1,1 @@
+0.0.11
new file mode 100755
--- /dev/null
+++ b/testing/docker/builder/build-b2g-desktop.sh
@@ -0,0 +1,50 @@
+#!/bin/bash -live
+
+################################### build.sh ###################################
+
+build-setup.sh
+
+### Check that require variables are defined
+test $REPOSITORY  # Should be an hg repository url to pull from
+test $REVISION    # Should be an hg revision to pull down
+test $MOZCONFIG   # Should be a mozconfig file from mozconfig/ folder
+
+### Pull and update mozilla-central
+cd /home/worker/mozilla-central/source;
+hg pull -r $REVISION $REPOSITORY;
+hg update $REVISION;
+
+### Pull and update gaia
+cd /home/worker/gaia/source;
+GAIA_REV=$(get_gaia_revision.js)
+GAIA_REPO="https://hg.mozilla.org$(get_gaia_repo.js)"
+hg pull -r $GAIA_REV $GAIA_REPO;
+hg update $GAIA_REV;
+
+cd /home/worker/mozilla-central/source;
+./mach build;
+
+### Make package
+cd /home/worker/object-folder;
+make package package-tests;
+
+### Extract artifacts
+# Navigate to dist/ folder
+cd /home/worker/object-folder/dist;
+
+ls -lah /home/worker/object-folder/dist/
+
+
+# Target names are cached so make sure we discard them first if found.
+rm -f target.linux-x86_64.tar.bz2 target.linux-x86_64.json target.tests.zip
+
+# Artifacts folder is outside of the cache.
+mkdir -p /home/worker/artifacts/
+
+# Discard version numbers from packaged files, they just make it hard to write
+# the right filename in the task payload where artifacts are declared
+mv *.linux-x86_64.tar.bz2   /home/worker/artifacts/target.linux-x86_64.tar.bz2
+mv *.linux-x86_64.json      /home/worker/artifacts/target.linux-x86_64.json
+mv *.tests.zip              /home/worker/artifacts/target.tests.zip
+
+################################### build.sh ###################################
new file mode 100755
--- /dev/null
+++ b/testing/docker/builder/build-setup.sh
@@ -0,0 +1,24 @@
+#!/bin/bash -vex
+
+gecko_dir=/home/worker/mozilla-central/source
+gaia_dir=/home/worker/gaia/source
+
+
+### Firefox Build Setup
+# Clone mozilla-central
+if [ ! -d "$gecko_dir" ];
+then
+  hg clone https://hg.mozilla.org/mozilla-central/ $gecko_dir
+fi
+
+# Create .mozbuild so mach doesn't complain about this
+mkdir -p /home/worker/.mozbuild/
+
+# Create object-folder exists
+mkdir -p /home/worker/object-folder/
+
+### Clone gaia in too
+if [ ! -d "$gaia_dir" ];
+then
+  hg clone https://hg.mozilla.org/integration/gaia-central/ $gaia_dir
+fi
new file mode 100755
--- /dev/null
+++ b/testing/docker/builder/build.sh
@@ -0,0 +1,35 @@
+#!/bin/bash -live
+
+################################### build.sh ###################################
+
+build-setup.sh
+
+### Check that we are running as worker
+test `whoami` == 'worker';
+
+### Check that require variables are defined
+test $REPOSITORY  # Should be an hg repository url to pull from
+test $REVISION    # Should be an hg revision to pull down
+test $MOZCONFIG   # Should be a mozconfig file from mozconfig/ folder
+
+### Pull, Update and Build
+cd /home/worker/mozilla-central;
+hg pull -r $REVISION $REPOSITORY;
+hg update $REVISION;
+./mach build;
+
+### Make package
+cd /home/worker/object-folder;
+make package package-tests;
+
+### Extract artifacts
+# Navigate to dist/ folder
+cd /home/worker/object-folder/dist;
+# Discard version numbers from packaged files, they just make it hard to write
+# the right filename in the task payload where artifacts are declared
+mv *.linux-x86_64.tar.bz2   target.linux-x86_64.tar.bz2
+mv *.linux-x86_64.json      target.linux-x86_64.json
+mv *.tests.zip              target.tests.zip
+
+
+################################### build.sh ###################################
new file mode 100755
--- /dev/null
+++ b/testing/docker/builder/get_gaia_repo.js
@@ -0,0 +1,4 @@
+#! /usr/bin/env node
+var fs = require('fs');
+var data = fs.readFileSync('/home/worker/mozilla-central/source/b2g/config/gaia.json');
+console.log(JSON.parse(data).repo_path);
new file mode 100755
--- /dev/null
+++ b/testing/docker/builder/get_gaia_revision.js
@@ -0,0 +1,4 @@
+#! /usr/bin/env node
+var fs = require('fs');
+var data = fs.readFileSync('/home/worker/mozilla-central/source/b2g/config/gaia.json');
+console.log(JSON.parse(data).revision);
new file mode 100644
--- /dev/null
+++ b/testing/docker/builder/mozconfigs/b2g-desktop
@@ -0,0 +1,48 @@
+. "$topsrcdir/b2g/config/mozconfigs/common"
+ac_add_options --enable-stdcxx-compat
+
+ac_add_options --enable-update-channel=${MOZ_UPDATE_CHANNEL}
+ac_add_options --enable-update-packaging
+ac_add_options --enable-signmar
+ac_add_options --enable-debug
+
+# Nightlies only since this has a cost in performance
+#ac_add_options --enable-js-diagnostics
+
+# This will overwrite the default of stripping everything and keep the symbol table.
+# This is useful for profiling and debugging and only increases the package size
+# by 2 MBs.
+STRIP_FLAGS="--strip-debug"
+
+# Needed to enable breakpad in application.ini
+export MOZILLA_OFFICIAL=1
+
+export MOZ_TELEMETRY_REPORTING=1
+
+# Treat warnings as errors in directories with FAIL_ON_WARNINGS.
+# DISABLED WHILE NOT ON TRY ac_add_options --enable-warnings-as-errors
+
+# Use ccache
+#. "$topsrcdir/build/mozconfig.cache"
+
+#B2G options
+ac_add_options --enable-application=b2g
+ENABLE_MARIONETTE=1
+ac_add_options --disable-elf-hack
+export CXXFLAGS=-DMOZ_ENABLE_JS_DUMP
+
+GAIADIR=/home/worker/gaia/source
+
+. "$topsrcdir/b2g/config/mozconfigs/common.override"
+
+
+mk_add_options MOZ_OBJDIR=/home/worker/object-folder/
+# Enable parallel compiling
+CONCURRENCY=$(( `grep processor /proc/cpuinfo | wc -l` + 2 ))
+mk_add_options MOZ_MAKE_FLAGS="-j$CONCURRENCY"
+
+# Not sure about this one...
+ac_add_options --disable-libjpeg-turbo
+
+# This option is required if you want to be able to run Gaia's tests
+ac_add_options --enable-tests
new file mode 100644
--- /dev/null
+++ b/testing/docker/builder/mozconfigs/opt-firefox
@@ -0,0 +1,8 @@
+ac_add_options --enable-application=browser
+ac_add_options --enable-optimize
+mk_add_options MOZ_OBJDIR=/home/worker/object-folder/
+
+# Enable parallel compiling
+CONCURRENCY=$(( `grep processor /proc/cpuinfo | wc -l` + 2 ))
+mk_add_options MOZ_MAKE_FLAGS="-j$CONCURRENCY"
+
new file mode 100644
--- /dev/null
+++ b/testing/docker/tester/Dockerfile
@@ -0,0 +1,23 @@
+FROM          quay.io/mozilla/gecko-base
+MAINTAINER    Jonas Finnemann Jensen <jopsen@gmail.com>
+
+# Add utilities and configuration
+COPY           fetch-artifacts.py            /home/worker/fetch-artifacts.py
+COPY           b2g-desktop-tests.sh          /home/worker/b2g-desktop-tests.sh
+COPY           b2g-emulator-tests.sh         /home/worker/b2g-emulator-tests.sh
+COPY           b2g-desktop-config.py         /home/worker/b2g-desktop-config.py
+COPY           test-setup.sh                 /tmp/test-setup.sh
+COPY           dot-config                    /home/worker/.config
+COPY           dot-pulse                     /home/worker/.pulse
+COPY           emulator_automation_config.py /home/worker/emulator_automation_config.py
+COPY           bin                           /home/worker/bin
+
+# Run test setup script
+USER          root
+RUN           ["/tmp/test-setup.sh"]
+ENV           PATH $PATH:/home/worker/bin
+
+USER          worker
+
+# Set a default command useful for debugging
+CMD ["/bin/bash", "--login"]
new file mode 100644
--- /dev/null
+++ b/testing/docker/tester/VERSION
@@ -0,0 +1,1 @@
+0.0.2
new file mode 100644
--- /dev/null
+++ b/testing/docker/tester/b2g-desktop-config.py
@@ -0,0 +1,29 @@
+# This is a template config file for b2g desktop unittest production.
+import os
+
+config = {
+    # mozharness options
+    "application": "b2g",
+
+    "find_links": [
+        "http://pypi.pub.build.mozilla.org/pub",
+    ],
+    "pip_index": False,
+
+    "default_actions": [
+        'clobber',
+        'read-buildbot-config',
+        'download-and-extract',
+        'create-virtualenv',
+        'install',
+        'run-tests',
+    ],
+    "download_symbols": "ondemand",
+    "download_minidump_stackwalk": True,
+
+    "run_file_names": {
+        "mochitest": "runtestsb2g.py",
+        "reftest": "runreftestb2g.py",
+    },
+   "in_tree_config": "config/mozharness/b2g_desktop_config.py",
+}
new file mode 100755
--- /dev/null
+++ b/testing/docker/tester/bin/entrypoint
@@ -0,0 +1,12 @@
+#! /bin/bash -vex
+
+# This file is intended to be used as the "entrypoint" or first command in the
+# docker image. It uses "eval" so it takes place of the usual /bin/bash -c
+# <stuff> conventions you need when running a docker image. Primarily it
+# bootstraps any processes that need to run outside of mozharness (like Xvfb)
+
+# XXX: Note that all tests could be faster at smaller bit depth
+Xvfb :0 -nolisten tcp -screen 0 1600x1200x24 &
+export DISPLAY=:0
+
+eval $@
new file mode 100644
--- /dev/null
+++ b/testing/docker/tester/dot-config/user-dirs.dirs
@@ -0,0 +1,15 @@
+# This file is written by xdg-user-dirs-update
+# If you want to change or add directories, just edit the line you're
+# interested in. All local changes will be retained on the next run
+# Format is XDG_xxx_DIR="$HOME/yyy", where yyy is a shell-escaped
+# homedir-relative path, or XDG_xxx_DIR="/yyy", where /yyy is an
+# absolute path. No other format is supported.
+# 
+XDG_DESKTOP_DIR="$HOME/Desktop"
+XDG_DOWNLOAD_DIR="$HOME/Downloads"
+XDG_TEMPLATES_DIR="$HOME/Templates"
+XDG_PUBLICSHARE_DIR="$HOME/Public"
+XDG_DOCUMENTS_DIR="$HOME/Documents"
+XDG_MUSIC_DIR="$HOME/Music"
+XDG_PICTURES_DIR="$HOME/Pictures"
+XDG_VIDEOS_DIR="$HOME/Videos"
new file mode 100644
--- /dev/null
+++ b/testing/docker/tester/dot-config/user-dirs.locale
@@ -0,0 +1,1 @@
+en_US
new file mode 100644
--- /dev/null
+++ b/testing/docker/tester/dot-pulse/default.pa
@@ -0,0 +1,164 @@
+#!/usr/bin/pulseaudio -nF
+#
+# This file is part of PulseAudio.
+#
+# PulseAudio is free software; you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# PulseAudio is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with PulseAudio; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
+
+# This startup script is used only if PulseAudio is started per-user
+# (i.e. not in system mode)
+
+.nofail
+
+### Load something into the sample cache
+#load-sample-lazy x11-bell /usr/share/sounds/gtk-events/activate.wav
+#load-sample-lazy pulse-hotplug /usr/share/sounds/startup3.wav
+#load-sample-lazy pulse-coldplug /usr/share/sounds/startup3.wav
+#load-sample-lazy pulse-access /usr/share/sounds/generic.wav
+
+.fail
+
+### Automatically restore the volume of streams and devices
+load-module module-device-restore
+load-module module-stream-restore
+load-module module-card-restore
+
+### Automatically augment property information from .desktop files
+### stored in /usr/share/application
+load-module module-augment-properties
+
+### Load audio drivers statically
+### (it's probably better to not load these drivers manually, but instead
+### use module-udev-detect -- see below -- for doing this automatically)
+#load-module module-alsa-sink
+#load-module module-alsa-source device=hw:1,0
+#load-module module-oss device="/dev/dsp" sink_name=output source_name=input
+#load-module module-oss-mmap device="/dev/dsp" sink_name=output source_name=input
+#load-module module-null-sink
+#load-module module-pipe-sink
+
+### Automatically load driver modules depending on the hardware available
+.ifexists module-udev-detect.so
+load-module module-udev-detect
+.else
+### Use the static hardware detection module (for systems that lack udev/hal support)
+load-module module-detect
+.endif
+
+### Automatically connect sink and source if JACK server is present
+.ifexists module-jackdbus-detect.so
+.nofail
+load-module module-jackdbus-detect
+.fail
+.endif
+
+### Automatically load driver modules for Bluetooth hardware
+# This module causes a pulseaudio startup failure on "gecko-tester"
+#.ifexists module-bluetooth-discover.so
+#load-module module-bluetooth-discover
+#.endif
+
+### Load several protocols
+.ifexists module-esound-protocol-unix.so
+load-module module-esound-protocol-unix
+.endif
+load-module module-native-protocol-unix
+
+### Network access (may be configured with paprefs, so leave this commented
+### here if you plan to use paprefs)
+#load-module module-esound-protocol-tcp
+#load-module module-native-protocol-tcp
+#load-module module-zeroconf-publish
+
+### Load the RTP receiver module (also configured via paprefs, see above)
+#load-module module-rtp-recv
+
+### Load the RTP sender module (also configured via paprefs, see above)
+#load-module module-null-sink sink_name=rtp format=s16be channels=2 rate=44100 sink_properties="device.description='RTP Multicast Sink'"
+#load-module module-rtp-send source=rtp.monitor
+
+### Load additional modules from GConf settings. This can be configured with the paprefs tool.
+### Please keep in mind that the modules configured by paprefs might conflict with manually
+### loaded modules.
+.ifexists module-gconf.so
+.nofail
+load-module module-gconf
+.fail
+.endif
+
+### Automatically restore the default sink/source when changed by the user
+### during runtime
+### NOTE: This should be loaded as early as possible so that subsequent modules
+### that look up the default sink/source get the right value
+load-module module-default-device-restore
+
+### Automatically move streams to the default sink if the sink they are
+### connected to dies, similar for sources
+load-module module-rescue-streams
+
+### Make sure we always have a sink around, even if it is a null sink.
+load-module module-always-sink
+
+### Honour intended role device property
+load-module module-intended-roles
+
+### Automatically suspend sinks/sources that become idle for too long
+load-module module-suspend-on-idle
+
+### If autoexit on idle is enabled we want to make sure we only quit
+### when no local session needs us anymore.
+# This module causes a pulseaudio startup failure on "gecko-tester"
+#.ifexists module-console-kit.so
+#load-module module-console-kit
+#.endif
+
+### Enable positioned event sounds
+load-module module-position-event-sounds
+
+### Cork music streams when a phone stream is active
+#load-module module-cork-music-on-phone
+
+### Modules to allow autoloading of filters (such as echo cancellation)
+### on demand. module-filter-heuristics tries to determine what filters
+### make sense, and module-filter-apply does the heavy-lifting of
+### loading modules and rerouting streams.
+load-module module-filter-heuristics
+load-module module-filter-apply
+
+### Load DBus protocol
+#.ifexists module-dbus-protocol.so
+#load-module module-dbus-protocol
+#.endif
+
+# X11 modules should not be started from default.pa so that one daemon
+# can be shared by multiple sessions.
+
+### Load X11 bell module
+#load-module module-x11-bell sample=bell-windowing-system
+
+### Register ourselves in the X11 session manager
+#load-module module-x11-xsmp
+
+### Publish connection data in the X11 root window
+#.ifexists module-x11-publish.so
+#.nofail
+#load-module module-x11-publish
+#.fail
+#.endif
+
+load-module module-switch-on-port-available
+
+### Make some devices default
+#set-default-sink output
+#set-default-source input
new file mode 100644
--- /dev/null
+++ b/testing/docker/tester/emulator_automation_config.py
@@ -0,0 +1,49 @@
+# This is a template config file for b2g emulator unittest production.
+import os
+
+config = {
+    # mozharness options
+    "application": "b2g",
+    "busybox_url": "http://runtime-binaries.pvt.build.mozilla.org/tooltool/sha512/0748e900821820f1a42e2f1f3fa4d9002ef257c351b9e6b78e7de0ddd0202eace351f440372fbb1ae0b7e69e8361b036f6bd3362df99e67fc585082a311fc0df",
+    "xre_url": "http://runtime-binaries.pvt.build.mozilla.org/tooltool/sha512/263f4e8796c25543f64ba36e53d5c4ab8ed4d4e919226037ac0988761d34791b038ce96a8ae434f0153f9c2061204086decdbff18bdced42f3849156ae4dc9a4",
+    "tooltool_servers": ["http://runtime-binaries.pvt.build.mozilla.org/tooltool/"],
+
+    "exes": {
+        'python': '/usr/bin/python',
+        'tooltool.py': "mozharness/mozharness/mozilla/tooltool.py",
+    },
+
+    "find_links": [
+        "http://pypi.pvt.build.mozilla.org/pub",
+        "http://pypi.pub.build.mozilla.org/pub",
+    ],
+    "pip_index": False,
+
+    "buildbot_json_path": "buildprops.json",
+
+    "default_actions": [
+        'clobber',
+        'read-buildbot-config',
+        'download-and-extract',
+        'create-virtualenv',
+        'install',
+        'run-tests',
+    ],
+    "download_symbols": "ondemand",
+    "download_minidump_stackwalk": True,
+    "default_blob_upload_servers": [
+        "https://blobupload.elasticbeanstalk.com",
+    ],
+    "blob_uploader_auth_file": os.path.join(os.getcwd(), "oauth.txt"),
+
+    "run_file_names": {
+        "jsreftest": "runreftestb2g.py",
+        "mochitest": "runtestsb2g.py",
+        "reftest": "runreftestb2g.py",
+        "crashtest": "runreftestb2g.py",
+        "xpcshell": "runtestsb2g.py"
+    },
+    # test harness options are located in the gecko tree
+    "in_tree_config": "config/mozharness/b2g_emulator_config.py",
+    "vcs_output_timeout": 1760,
+}
new file mode 100755
--- /dev/null
+++ b/testing/docker/tester/test-setup.sh
@@ -0,0 +1,78 @@
+#!/bin/bash -ve
+
+### Firefox Test Setup
+
+apt-get update
+apt-get install -y \
+        alsa-base \
+        alsa-utils \
+        bluez-alsa \
+        bluez-alsa:i386 \
+        bluez-cups \
+        bluez-gstreamer \
+        g++-multilib \
+        gcc-multilib \
+        gir1.2-gnomebluetooth-1.0 \
+        gstreamer0.10-alsa \
+        libasound2-plugins:i386 \
+        libcanberra-pulse \
+        libdrm-intel1:i386 \
+        libdrm-nouveau1a:i386 \
+        libdrm-radeon1:i386 \
+        libdrm2:i386 \
+        libexpat1:i386 \
+        libgnome-bluetooth8 \
+        libllvm2.9 \
+        libllvm3.0:i386 \
+        libncurses5:i386          \
+        libpulse-mainloop-glib0:i386 \
+        libpulsedsp:i386 \
+        libsdl1.2debian:i386 \
+        libsox-fmt-alsa \
+        libx11-xcb1:i386 \
+        libxcb-glx0:i386 \
+        libxcb-glx0 \
+        libxdamage1:i386 \
+        libxfixes3:i386 \
+        libxxf86vm1:i386 \
+        libxxf86vm1 \
+        llvm \
+        llvm-2.9 \
+        llvm-2.9-dev \
+        llvm-2.9-runtime \
+        llvm-dev \
+        llvm-runtime \
+        pulseaudio-module-bluetooth \
+        pulseaudio-module-gconf \
+        pulseaudio-module-X11 \
+        pulseaudio \
+        python-pip
+
+# Mozilla-patched mesa libs required for many reftests -- see bug 975034
+wget http://puppetagain.pub.build.mozilla.org/data/repos/apt/releng/pool/main/m/mesa/libgl1-mesa-dri_8.0.4-0ubuntu0.6mozilla1_i386.deb
+wget http://puppetagain.pub.build.mozilla.org/data/repos/apt/releng/pool/main/m/mesa/libgl1-mesa-dri_8.0.4-0ubuntu0.6mozilla1_amd64.deb
+wget http://puppetagain.pub.build.mozilla.org/data/repos/apt/releng/pool/main/m/mesa/libgl1-mesa-glx_8.0.4-0ubuntu0.6mozilla1_i386.deb
+wget http://puppetagain.pub.build.mozilla.org/data/repos/apt/releng/pool/main/m/mesa/libgl1-mesa-glx_8.0.4-0ubuntu0.6mozilla1_amd64.deb
+wget http://puppetagain.pub.build.mozilla.org/data/repos/apt/releng/pool/main/m/mesa/libglapi-mesa_8.0.4-0ubuntu0.6mozilla1_i386.deb
+wget http://puppetagain.pub.build.mozilla.org/data/repos/apt/releng/pool/main/m/mesa/libglapi-mesa_8.0.4-0ubuntu0.6mozilla1_amd64.deb
+wget http://puppetagain.pub.build.mozilla.org/data/repos/apt/releng/pool/main/m/mesa/libglu1-mesa_8.0.4-0ubuntu0.6mozilla1_i386.deb
+wget http://puppetagain.pub.build.mozilla.org/data/repos/apt/releng/pool/main/m/mesa/libglu1-mesa_8.0.4-0ubuntu0.6mozilla1_amd64.deb
+dpkg -i libgl1-mesa-dri_8.0.4-0ubuntu0.6mozilla1_amd64.deb
+dpkg -i libgl1-mesa-dri_8.0.4-0ubuntu0.6mozilla1_i386.deb
+dpkg -i libglapi-mesa_8.0.4-0ubuntu0.6mozilla1_amd64.deb
+dpkg -i libglapi-mesa_8.0.4-0ubuntu0.6mozilla1_i386.deb
+dpkg -i libgl1-mesa-glx_8.0.4-0ubuntu0.6mozilla1_i386.deb
+dpkg -i libgl1-mesa-glx_8.0.4-0ubuntu0.6mozilla1_amd64.deb
+dpkg -i libglu1-mesa_8.0.4-0ubuntu0.6mozilla1_i386.deb
+dpkg -i libglu1-mesa_8.0.4-0ubuntu0.6mozilla1_amd64.deb
+
+pip install virtualenv;
+mkdir Documents; mkdir Pictures; mkdir Music; mkdir Videos;
+hg clone http://hg.mozilla.org/build/mozharness/
+echo 'Xvfb :0 -nolisten tcp -screen 0 1600x1200x24 &> /dev/null &' >> .bashrc
+chown -R worker:worker /home/worker/* /home/worker/.*
+
+### Clean up from setup
+# Remove the setup.sh setup, we don't really need this script anymore, deleting
+# it keeps the image as clean as possible.
+#rm $0; echo "Deleted $0";
new file mode 100644
--- /dev/null
+++ b/testing/taskcluster/README.md
@@ -0,0 +1,132 @@
+# Taskcluster + Gecko Integration
+
+## Task conventions
+
+In order to properly enable task reuse there are a small number of
+conventions and parameters that are specialized for build tasks vs test
+tasks. The goal here should be to provide as much of the power as
+taskcluster but not at the cost of making it easy to support the current
+model of build/test.
+
+
+All tasks are in the YAML format and are also processed via mustache to
+allow for greater customizations. All tasks have the following
+templates variables:
+
+
+  - `docker_image`: Helper for always using the latest version of a docker
+    image that exist in tree.
+
+    ```
+    {{#docker_image}}base{{/docker_image}}
+    ```
+
+    Will produce something like (see the docker folder):
+
+    ```
+    quay.io/mozilla.com/base:0.11
+    ```
+
+  - `from_now`: Helper for crafting a JSON date in the future.
+
+    ```
+    {{#from_now}}1 year{{/from_now}}
+    ```
+
+    Will produce:
+
+    ```
+    2014-10-19T22:45:45.655Z
+    ```
+
+  - `now`: Current time as a json formatted date.
+
+
+### Build tasks
+
+By convention build tasks are stored in `tasks/builds/` the location of
+each particular type of build is specified in `job_flags.yml` (and more
+locations in the future)
+
+#### Task format
+
+To facilitate better reuse of tasks there are some expectations of the
+build tasks. These are required for the test tasks to interact with the
+builds correctly but may not effect the builds or indexing services.
+
+```yaml
+
+# This is an example of just the special fields. Other fields that are
+# required by taskcluster are omitted and documented on http://docs.taskcluster.net/
+task:
+
+  payload:
+    # Builders usually create at least two important artifacts the build
+    # and the tests these can be anywhere in the task and also may have
+    # different path names to include things like arch and extension
+    artifacts:
+      # The build this can be anything as long as its referenced in
+      # locations.
+      'public/name_i_made_up.tar.gz': '/path/to/build'
+      'public/some_tests.zip': '/path/to/tests'
+
+  extra:
+    # Build tasks may put their artifacts anywhere but there are common
+    # resources that test tasks need to do their job correctly so we
+    # need to provide an easy way to lookup the correct aritfact path.
+    locations:
+      build: 'public/name_i_made_up.tar.gz'
+      tests: 'public/some_tests.zip'
+```
+
+#### Templates properties
+
+  - repository: Target HG repository (ex:
+    https://hg.mozilla.org/mozilla-central)
+
+  - revision: Target HG revision for gecko
+
+  - owner: Email address of the committer
+
+### Test Tasks
+
+By convention test tasks are stored in `tasks/tests/` the location of
+each particular type of build is specified in `job_flags.yml` (and more
+locations in the future)
+
+
+#### Template properties
+
+  - repository: Target HG repository (ex:
+    https://hg.mozilla.org/mozilla-central)
+
+  - revision: Target HG revision for gecko
+
+  - owner: Email address of the committer
+
+  - build_url: Location of the build
+
+  - tests_url: Location of the tests.zip package
+
+  - chunk: Current chunk
+
+  - total_chunks: Total number of chunks
+
+## Developing
+
+Running commands via mach is the best way to invoke commands testing
+works a little differently (I have not figured out how to invoke
+python-test without running install steps first)
+
+
+```sh
+mach python-test tests/
+```
+
+## Examples:
+
+Requires [taskcluster-cli](https://github.com/taskcluster/taskcluster-cli).
+
+```sh
+mach trygraph --message 'try: -b do -p all' --revision=33c0181c4a25 --repository=http://hg.mozilla.org/mozilla-central --owner=jlal@mozilla.com | taskcluster run-graph
+```
new file mode 100644
--- /dev/null
+++ b/testing/taskcluster/design.md
@@ -0,0 +1,28 @@
+Problems:
+
+Not all tests work on all platforms
+Many tests work on N+1 platforms
+
+Goals:
+
+Tests and builds should be loosely coupled (you probably need a build
+but you don't always need a build!)
+
+Workflows:
+
+1. Try: decide upon a set of builds and tests from a matrix of checkboxes
+
+2. Branch: decide upon a set of builds based on in tree configuration
+   (essentially a "fixed" version of try flags)
+
+3. One off builds / One of tests (which require a build we created
+   earlier)
+
+## Build tasks
+
+No special logic needed but convention of generating artifacts should be followed!
+
+## Test Tasks
+
+Always need a build (and likely also need the tests.zip). Should know
+what potential builds they can run on.
new file mode 100644
--- /dev/null
+++ b/testing/taskcluster/job_flags.yml
@@ -0,0 +1,76 @@
+# This file contains the list of "job flags"/"try flags" for tests and builds.
+---
+
+# List of all possible flags for each category of tests used in the case where
+# "all" is specified.
+flags:
+  builds:
+    - emulator
+    - emulator-jb
+    - emulator-kk
+    - linux32_gecko  # b2g desktop linux 32 bit
+    - linux64_gecko  # b2g desktop linux 64 bit
+    - macosx64_gecko # b2g desktop osx 64 bit
+    - win32_gecko    # b2g desktop win 32 bit
+
+  tests:
+    - reftest
+    - reftest-ipc
+    - reftest-no-accel
+    - crashtest
+    - crashtest-ipc
+    - xpcshell
+    - jsreftest
+    - jetpack
+    - marionette
+    - mozmill
+    - cppunit
+    - jittests
+    - mochitests
+    - web-platform-tests
+    - marionette-webapi
+
+# Build section covers the -b[uild] and -p[latform] options that try provides.
+builds:
+  # The format for registering a new build flag -> task looks like this:
+  #
+  # <flag name>
+  #   # Platforms are primarily used to restrict test runs to only X platform
+  #   # but the information is stored on the build to indicate which platform(s)
+  #   # the build belongs to. Note that `platforms` here is the term used by the
+  #   # try chooser meaning "some group of tests" examples of platforms are
+  #   # things like "b2g", "win32"
+  #   platforms:
+  #     - <platform name>
+  #   # note that for sanity o -> means opt and d -> means debug if additional
+  #   # flags are passed we will attempt to match them up to an option here if
+  #   # available
+  #   types:
+  #     opt: <path to opt task>
+  #     debug: <path to debug task>
+  #
+  linux64_gecko:
+    platforms:
+      - b2g
+    types:
+      opt: tasks/builds/b2g_desktop.yml
+
+# Test section covers the -u options in the try flags
+tests:
+  # The format for registering a new test flag -> task looks like this:
+  #
+  # <flag name>
+  #   task: <path to test task>
+  #   # Note that total number of chunks effects more then just performance we
+  #   # need to schedule specific chunks in some cases!
+  #   chunks: <total number of chunks>
+  #   # Not all tests can run on all builds and we may not want to run some
+  #   # tests on all build variants so we use "allowed tasks" instead of
+  #   # "allowed platforms" here.
+  #   allowed_build_tasks:
+  #     - builds/b2g_desktop.yml
+  mochitests:
+    allowed_build_tasks:
+      - tasks/builds/b2g_desktop.yml
+    task: tasks/tests/b2g_mochitest.yml
+    chunks: 5
new file mode 100644
--- /dev/null
+++ b/testing/taskcluster/mach_commands.py
@@ -0,0 +1,167 @@
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import os.path
+import json
+import copy
+import datetime
+
+import pystache
+import yaml
+
+from mach.decorators import (
+    CommandArgument,
+    CommandProvider,
+    Command,
+)
+
+
+from taskcluster_graph.commit_parser import parse_commit
+from taskcluster_graph.slugid import slugid
+from taskcluster_graph.from_now import json_time_from_now
+
+import taskcluster_graph.build_task
+
+ROOT = os.path.dirname(os.path.realpath(__file__))
+DOCKER_ROOT = os.path.join(ROOT, '..', 'docker')
+LOCAL_WORKER_TYPES = ['b2gtest', 'b2gbuild']
+
+# XXX: If/when we have the taskcluster queue use construct url instead
+ARTIFACT_URL = 'https://queue.taskcluster.net/v1/task/{}/artifacts/{}'
+REGISTRY = open(os.path.join(DOCKER_ROOT, 'REGISTRY')).read().strip()
+
+
+def import_yaml(path, variables=None):
+    ''' Load a yml file relative to the root of this file'''
+    content = open(os.path.join(ROOT, path)).read()
+    if variables is not None:
+        content = pystache.render(content, variables)
+    task = yaml.load(content)
+    return task
+
+def docker_image(name):
+    ''' Determine the docker tag/revision from an in tree docker file '''
+    repository_path = os.path.join(DOCKER_ROOT, name, 'REPOSITORY')
+    repository = REGISTRY
+
+    version = open(os.path.join(DOCKER_ROOT, name, 'VERSION')).read().strip()
+
+    if os.path.isfile(repository_path):
+        repository = open(repository_path).read().strip()
+
+    return '{}/{}:{}'.format(repository, name, version)
+
+@CommandProvider
+class TryGraph(object):
+    @Command('trygraph', category="ci",
+        description="Create taskcluster try server graph")
+    @CommandArgument('--revision',
+        help='revision in gecko to use in sub tasks')
+    @CommandArgument('--message',
+        required=True,
+        help='Commit message to be parsed')
+    @CommandArgument('--repository',
+        help='full path to hg repository to use in sub tasks')
+    @CommandArgument('--owner',
+        help='email address of who owns this graph')
+    @CommandArgument('--extend-graph',
+        action="store_true", dest="ci", help='Omit create graph arguments')
+    def create_graph(self, revision="", message="", repository="", owner="",
+            ci=False):
+        """ Create the taskcluster graph from the try commit message.
+
+        :param args: commit message (ex: "– try: -b o -p linux64_gecko -u gaia-unit -t none")
+        """
+        jobs = import_yaml('job_flags.yml')
+        job_graph = parse_commit(message, jobs)
+
+        # Template parameters used when expanding the graph
+        parameters = {
+            'docker_image': docker_image,
+            'repository': repository,
+            'revision': revision,
+            'owner': owner,
+            'from_now': json_time_from_now,
+            'now': datetime.datetime.now().isoformat()
+        }
+
+        # Task graph we are generating for taskcluster...
+        graph = {
+            'tasks': []
+        }
+
+        if ci is False:
+            # XXX: We need to figure out a less ugly way to store these for
+            # local testing.
+            graph['scopes'] = [
+                "docker-worker:cache:sources-mozilla-central",
+                "docker-worker:cache:sources-gaia",
+                "docker-worker:cache:build-b2g-desktop-objects"
+            ]
+
+            # XXX: This is a hack figure out how to do this correctly or sanely
+            # at least so we don't need to keep track of all worker types in
+            # existence.
+            for worker_type in LOCAL_WORKER_TYPES:
+                graph['scopes'].append(
+                    'queue:define-task:{}/{}'.format('aws-provisioner',
+                        worker_type)
+                )
+
+                graph['scopes'].append(
+                    'queue:create-task:{}/{}'.format('aws-provisioner',
+                        worker_type)
+                )
+
+            graph['metadata'] = {
+                'source': 'http://todo.com/what/goes/here',
+                'owner': owner,
+                # TODO: Add full mach commands to this example?
+                'description': 'Try task graph generated via ./mach trygraph',
+                'name': 'trygraph local'
+            }
+
+        for build in job_graph:
+            build_parameters = copy.copy(parameters)
+            build_parameters['build_slugid'] = slugid()
+            build_task = import_yaml(build['task'], build_parameters)
+
+            # Ensure each build graph is valid after construction.
+            taskcluster_graph.build_task.validate(build_task)
+            graph['tasks'].append(build_task)
+
+            tests_url = ARTIFACT_URL.format(
+                build_parameters['build_slugid'],
+                build_task['task']['extra']['locations']['tests']
+            )
+
+            build_url = ARTIFACT_URL.format(
+                build_parameters['build_slugid'],
+                build_task['task']['extra']['locations']['build']
+            )
+
+            for test in build['dependents']:
+                test_parameters = copy.copy(build_parameters)
+                test_parameters['build_url'] = build_url
+                test_parameters['tests_url'] = tests_url
+                test_parameters['total_chunks'] = 1
+
+                if 'chunks' in test:
+                    test_parameters['total_chunks'] = test['chunks']
+
+                for chunk in range(1, test_parameters['total_chunks'] + 1):
+                    test_parameters['chunk'] = chunk
+                    test_task = import_yaml(test['task'], test_parameters)
+                    test_task['taskId'] = slugid()
+
+                    if 'requires' not in test_task:
+                        test_task['requires'] = []
+
+                    test_task['requires'].append(test_parameters['build_slugid'])
+                    graph['tasks'].append(test_task)
+
+        print(json.dumps(graph, indent=4))
new file mode 100644
--- /dev/null
+++ b/testing/taskcluster/requirements.txt
@@ -0,0 +1,2 @@
+pystache == 0.5.4
+PyYAML == 3.11
new file mode 100644
--- /dev/null
+++ b/testing/taskcluster/setup.py
@@ -0,0 +1,19 @@
+import os
+from setuptools import setup, find_packages
+import sys
+
+version = '0.0.0'
+
+# dependencies
+with open('requirements.txt') as f:
+    deps = f.read().splitlines()
+
+setup(name='taskcluster_graph',
+      version=version,
+      description='',
+      classifiers=[],
+      keywords='mozilla',
+      license='MPL',
+      packages=['taskcluster_graph'],
+      install_requires=deps,
+      )
new file mode 100644
new file mode 100644
--- /dev/null
+++ b/testing/taskcluster/taskcluster_graph/build_task.py
@@ -0,0 +1,33 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This module contains logic related to build tasks
+
+class BuildTaskValidationException(Exception):
+    pass
+
+# XXX: Consider using JSON Schema here like we do elsewhere...
+def validate(task):
+    '''
+    The build tasks have some required fields in extra this function ensures
+    they are there.
+    '''
+    if 'task' not in task:
+        raise BuildTaskValidationException('must have task field')
+
+    task_def = task['task']
+
+    if 'extra' not in task_def:
+        raise BuildTaskValidationException('build task must have task.extra props')
+
+    if 'locations' not in task_def['extra']:
+        raise BuildTaskValidationException('task.extra.locations missing')
+
+    locations = task_def['extra']['locations']
+
+    if 'build' not in locations:
+        raise BuildTaskValidationException('task.extra.locations.build missing')
+
+    if 'tests' not in locations:
+        raise BuildTaskValidationException('task.extra.locations.tests missing')
new file mode 100644
--- /dev/null
+++ b/testing/taskcluster/taskcluster_graph/commit_parser.py
@@ -0,0 +1,176 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+import shlex
+import argparse
+import functools
+import copy
+from try_test_parser import parse_test_opts
+
+TRY_DELIMITER='try:'
+
+# The build type aliases are very cryptic and only used in try flags these are
+# mappings from the single char alias to a longer more recognizable form.
+BUILD_TYPE_ALIASES = {
+    'o': 'opt',
+    'd': 'debug'
+}
+
+class InvalidCommitException(Exception):
+    pass
+
+def normalize_platform_list(all_builds, build_list):
+    if build_list == 'all':
+        return all_builds
+
+    return [ build.strip() for build in build_list.split(',') ]
+
+def normalize_test_list(all_tests, job_list):
+    '''
+    Normalize a set of jobs (builds or tests) there are three common cases:
+
+        - job_list is == 'none' (meaning an empty list)
+        - job_list is == 'all' (meaning use the list of jobs for that job type)
+        - job_list is comma delimited string which needs to be split
+
+    :param list all_tests: test flags from job_flags.yml structure.
+    :param str job_list: see above examples.
+    :returns: List of jobs
+    '''
+
+    # Empty job list case...
+    if job_list is None or job_list == 'none':
+        return []
+
+    tests = parse_test_opts(job_list)
+
+    if not tests:
+        return []
+
+    # Special case where tests is 'all' and must be expanded
+    if tests[0]['test'] == 'all':
+        results = []
+        all_entry = tests[0]
+        for test in all_tests:
+            entry = { 'test': test }
+            # If there are platform restrictions copy them across the list.
+            if 'platforms' in all_entry:
+                entry['platforms'] = list(all_entry['platforms'])
+            results.append(entry)
+        return results
+    else:
+        return tests
+
+def extract_tests_from_platform(test_jobs, build_platform, build_task, tests):
+    '''
+    Build the list of tests from the current build.
+
+    :param dict test_jobs: Entire list of tests (from job_flags.yml).
+    :param dict build_platform: Current build platform.
+    :param str build_task: Build task path.
+    :param list tests: Test flags.
+    :return: List of tasks (ex: [{ task: 'test_task.yml' }]
+    '''
+    if tests is None:
+        return []
+
+    results = []
+
+    for test_entry in tests:
+        if test_entry['test'] not in test_jobs:
+            continue
+
+        test_job = test_jobs[test_entry['test']]
+
+        # Verify that this job can actually be run on this build task...
+        if 'allowed_build_tasks' in test_job and build_task not in test_job['allowed_build_tasks']:
+            continue
+
+        if 'platforms' in test_entry:
+            # The default here is _exclusive_ rather then inclusive so if the
+            # build platform does not specify what platform(s) it belongs to
+            # then we must skip it.
+            if 'platforms' not in build_platform:
+                continue
+
+            # Sorta hack to see if the two lists intersect at all if they do not
+            # then we must skip this set.
+            common_platforms = set(test_entry['platforms']) & set(build_platform['platforms'])
+            if not common_platforms:
+                # Tests should not run on this platform...
+                continue
+
+        # Add the job to the list and ensure to copy it so we don't accidentally
+        # mutate the state of the test job in the future...
+        results.append(copy.deepcopy(test_job))
+
+    return results
+
+'''
+This module exists to deal with parsing the options flags that try uses. We do
+not try to build a graph or anything here but match up build flags to tasks via
+the "jobs" datastructure (see job_flags.yml)
+'''
+
+def parse_commit(message, jobs):
+    '''
+    :param message: Commit message that is typical to a try push.
+    :param jobs: Dict (see job_flags.yml)
+    '''
+
+    # shlex used to ensure we split correctly when giving values to argparse.
+    parts = shlex.split(message)
+
+    if parts[0] != TRY_DELIMITER:
+        raise InvalidCommitException('Invalid commit format must start with' +
+                TRY_DELIMITER)
+
+    # Argument parser based on try flag flags
+    parser = argparse.ArgumentParser()
+    parser.add_argument('-b', dest='build_types')
+    parser.add_argument('-p', dest='platforms')
+    parser.add_argument('-u', dest='tests')
+    args, unknown = parser.parse_known_args(parts[1:])
+
+    # Sanity check platforms...
+    if args.platforms is None:
+        return []
+
+    # Then builds...
+    if args.build_types is None:
+        return []
+
+    build_types = [ BUILD_TYPE_ALIASES.get(build_type, build_type) for
+            build_type in args.build_types ]
+
+    platforms = normalize_platform_list(jobs['flags']['builds'], args.platforms)
+    tests = normalize_test_list(jobs['flags']['tests'], args.tests)
+
+    result = []
+
+    # Expand the matrix of things!
+    for platform in platforms:
+        # Silently skip unknown platforms.
+        if platform not in jobs['builds']:
+            continue
+
+        platform_builds = jobs['builds'][platform]
+
+        for build_type in build_types:
+            # Not all platforms have debug builds, etc...
+            if build_type not in platform_builds['types']:
+                continue
+
+            build_task = platform_builds['types'][build_type]
+
+            # Node for this particular build type
+            result.append({
+                'task': build_task,
+                'dependents': extract_tests_from_platform(
+                    jobs['tests'], platform_builds, build_task, tests
+                )
+            })
+
+    return result
new file mode 100644
--- /dev/null
+++ b/testing/taskcluster/taskcluster_graph/from_now.py
@@ -0,0 +1,79 @@
+# Python port of the ms.js node module this is not a direct port some things are
+# more complicated or less precise and we lean on time delta here.
+
+import re
+import datetime
+
+PATTERN=re.compile(
+    '((?:\d+)?\.?\d+) *([a-z])'
+)
+
+def seconds(value):
+    return datetime.timedelta(seconds=int(value))
+
+def minutes(value):
+    return datetime.timedelta(minutes=int(value))
+
+def hours(value):
+    return datetime.timedelta(hours=int(value))
+
+def days (value):
+    return datetime.timedelta(days=int(value))
+
+def years(value):
+    # Warning here "years" are vague don't use this for really sensitive date
+    # computation the idea is to give you a absolute amount of time in the
+    # future which is not the same thing as "precisely on this date next year"
+    return datetime.timedelta(days=int(value) * 365)
+
+ALIASES = {}
+ALIASES['seconds'] = ALIASES['second'] = ALIASES['s'] = seconds
+ALIASES['minutes'] = ALIASES['minute'] = ALIASES['m'] = minutes
+ALIASES['hours'] = ALIASES['hour'] = ALIASES['h'] = hours
+ALIASES['days'] = ALIASES['day'] = ALIASES['d'] = days
+ALIASES['years'] = ALIASES['year'] = ALIASES['y'] = years
+
+class InvalidString(Exception):
+    pass
+
+class UnknownTimeMeasurement(Exception):
+    pass
+
+def value_of(input_str):
+    '''
+    Convert a string to a json date in the future
+    :param str input_str: (ex: 1d, 2d, 6years, 2 seconds)
+    :returns: Unit given in seconds
+    '''
+
+    matches = PATTERN.search(input_str)
+
+    if matches is None or len(matches.groups()) < 2:
+        raise InvalidString("'{}' is invalid string".format(input_str))
+
+    value, unit = matches.groups()
+
+    if unit not in ALIASES:
+        raise UnknownTimeMeasurement(
+            '{} is not a valid time measure use one of {}'.format(unit,
+                sorted(ALIASES.keys()))
+        )
+
+    return ALIASES[unit](value)
+
+def json_time_from_now(input_str, now=None):
+    '''
+    :param str input_str: Input string (see value of)
+    :param datetime now: Optionally set the definition of `now`
+    :returns: JSON string representation of time in future.
+    '''
+
+    if now is None:
+        now = datetime.datetime.utcnow()
+
+    time = now + value_of(input_str)
+
+    # Sorta a big hack but the json schema validator for date does not like the
+    # ISO dates until 'Z' (for timezone) is added...
+    return time.isoformat() + 'Z'
+
new file mode 100644
--- /dev/null
+++ b/testing/taskcluster/taskcluster_graph/slugid.py
@@ -0,0 +1,14 @@
+import uuid
+import base64
+
+def slugid():
+    '''
+    Logic and rational of this construct here:
+    https://github.com/jonasfj/slugid/blob/29be40074646b97e5ed02da257918467fac07c4a/slugid.js#L46
+    '''
+    encoded = base64.b64encode(str(uuid.uuid4().bytes))
+    encoded = encoded.replace('+', '-') # Replace + with - (see RFC 4648, sec. 5)
+    encoded = encoded.replace('/', '_') # Replace + with - (see RFC 4648, sec. 5)
+    encoded = encoded.replace('=', '') # Drop '==' padding
+
+    return encoded
new file mode 100644
--- /dev/null
+++ b/testing/taskcluster/taskcluster_graph/try_test_parser.py
@@ -0,0 +1,57 @@
+def parse_test_opts(input_str):
+    '''
+    Test argument parsing is surprisingly complicated with the "restrictions"
+    logic this function is responsible for parsing this out into a easier to
+    work with structure like { test: '..', platforms: ['..'] }
+    '''
+
+    # Final results which we will return.
+    tests = []
+
+    cur_test = {}
+    token = ''
+    in_platforms = False
+
+    def add_test(value):
+        cur_test['test'] = value.strip()
+        tests.insert(0, cur_test)
+
+    def add_platform(value):
+        cur_test['platforms'].insert(0, value.strip())
+
+    # This might be somewhat confusing but we parse the string _backwards_ so
+    # there is no ambiguity over what state we are in.
+    for char in reversed(input_str):
+
+        # , indicates exiting a state
+        if char == ',':
+
+            # Exit a particular platform.
+            if in_platforms:
+                add_platform(token)
+
+            # Exit a particular test.
+            else:
+                add_test(token)
+                cur_test = {}
+
+            # Token must always be reset after we exit a state
+            token = ''
+        elif char == '[':
+            # Exiting platform state entering test state.
+            add_platform(token)
+            token = ''
+            in_platforms = False
+        elif char == ']':
+            # Entering platform state.
+            in_platforms = True
+            cur_test['platforms'] = []
+        else:
+            # Accumulator.
+            token = char + token
+
+    # Handle any left over tokens.
+    if token:
+        add_test(token)
+
+    return tests
new file mode 100644
--- /dev/null
+++ b/testing/taskcluster/tasks/b2g_desktop.json
@@ -0,0 +1,51 @@
+{
+  "metadata": {
+    "name": "Gaia unit tests in b2g-desktop chunk 5",
+    "description": "Full gaia unit test suite",
+    "source": "http://todo.com/soon",
+    "owner": "jlal@mozilla.com"
+  },
+  "workerType": "gaia-staging",
+  "provisionerId": "aws-provisioner",
+  "scopes": [
+    "docker-worker:cache:sources-mozilla-central",
+    "docker-worker:cache:sources-gaia",
+    "docker-worker:cache:build-b2g-desktop-objects"
+  ],
+  "payload": {
+    "cache": {
+      "sources-gaia": "/home/worker/gaia",
+      "sources-mozilla-central": "/home/worker/mozilla-central",
+      "build-b2g-desktop-objects": "/home/worker/object-folder"
+    },
+    "env": {
+      "MOZCONFIG": "/home/worker/mozconfigs/b2g-desktop",
+      "REPOSITORY": "https://hg.mozilla.org/mozilla-central/",
+      "REVISION": "cc7fc5918561"
+    },
+    "command": [
+      "build-b2g-desktop.sh"
+    ],
+    "artifacts": {
+      "public/build": {
+        "type": "directory",
+        "path": "/home/worker/artifacts/",
+        "expires": "2015-10-12T06:36:04.550Z"
+      }
+    },
+    "image": "quay.io/mozilla/builder:0.0.11",
+    "maxRunTime": 6000
+  },
+  "extra": {
+    "treeherder": {
+      "symbol": "B"
+    }
+  },
+  "created": "2014-10-12T06:36:04.766Z",
+  "schedulerId": "task-graph-scheduler",
+  "deadline": "2014-10-20T06:36:04.567Z",
+  "taskGroupId": "2XxDQycZQHKZbyDQUeXu0w",
+  "routes": [],
+  "retries": 5,
+  "tags": {}
+}
new file mode 100644
--- /dev/null
+++ b/testing/taskcluster/tasks/builds/b2g_desktop.yml
@@ -0,0 +1,50 @@
+taskId: {{build_slugid}}
+task:
+  metadata:
+    source: http://todo.com/soon
+    owner: {{owner}}
+    name: B2G Desktop Opt
+    description: B2G Desktop Opt
+
+  workerType: b2gbuild
+  provisionerId: aws-provisioner
+
+  scopes:
+    - 'docker-worker:cache:sources-mozilla-central'
+    - 'docker-worker:cache:sources-gaia'
+    - 'docker-worker:cache:build-b2g-desktop-objects'
+
+  payload:
+    cache:
+      sources-gaia: '/home/worker/gaia'
+      sources-mozilla-central: '/home/worker/mozilla-central'
+      build-b2g-desktop-objects: '/home/worker/object-folder'
+
+    env:
+      MOZCONFIG: '/home/worker/mozconfigs/b2g-desktop'
+      # revision/project params defined originally here https://github.com/taskcluster/taskcluster-try/blob/master/try/instantiate.js
+      REVISION: '{{revision}}'
+      REPOSITORY: '{{repository}}'
+
+    image: '{{#docker_image}}builder{{/docker_image}}'
+    maxRunTime: 3600
+
+    command:
+      - build-b2g-desktop.sh
+
+    artifacts:
+      'public/build':
+        type: directory
+        path: '/home/worker/artifacts/'
+        expires: '{{#from_now}}1 year{{/from_now}}'
+
+  extra:
+    # Rather then enforcing particular conventions we require that all build
+    # tasks provide the "build" extra field to specify where the build and tests
+    # files are located.
+    locations:
+      build: 'public/build/target.linux-x86_64.tar.bz2'
+      tests: 'public/build/target.tests.zip'
+
+    treeherder:
+      symbol: B
new file mode 100644
--- /dev/null
+++ b/testing/taskcluster/tasks/mochitest.json
@@ -0,0 +1,38 @@
+{
+  "workerType": "b2gtest",
+  "metadata": {
+    "owner": "jlal@mozilla.com",
+    "source": "http://todo.com/soon",
+    "name": "B2G Mochitests 1",
+    "description": "B2G Desktop Mochi test run 1"
+  },
+  "payload": {
+    "maxRunTime": 3600,
+    "artifacts": {
+      "public/build": {
+        "path": "/home/worker/artifacts/",
+        "expires": "2015-10-19T19:01:54.080641Z",
+        "type": "directory"
+      }
+    },
+    "image": "quay.io/mozilla/tester:0.0.2",
+    "command": [
+      "entrypoint",
+      "python ./mozharness/scripts/b2g_desktop_unittest.py --no-read-buildbot-config --config-file /home/worker/b2g-desktop-config.py --installer-url https://queue.taskcluster.net/v1/task/wFJY0lENTHibWYQJNzHcsw/runs/0/artifacts/public/build/target.linux-x86_64.tar.bz2 --test-url https://queue.taskcluster.net/v1/task/wFJY0lENTHibWYQJNzHcsw/runs/0/artifacts/public/build/target.tests.zip --download-symbols ondemand --test-suite mochitest --total-chunk=1 --this-chunk=1\n"
+    ]
+  },
+  "provisionerId": "aws-provisioner",
+  "extra": {
+    "treeherder": {
+      "symbol": "B"
+    }
+  },
+  "created": "2014-10-20T02:01:54.095Z",
+  "deadline": "2014-10-21T02:01:54.095Z",
+  "schedulerId": "task-graph-scheduler",
+  "taskGroupId": "ley4JgmQTSaFLr-lfH9O3w",
+  "routes": [],
+  "retries": 5,
+  "scopes": [],
+  "tags": {}
+}
new file mode 100644
--- /dev/null
+++ b/testing/taskcluster/tasks/tests/b2g_mochitest.yml
@@ -0,0 +1,38 @@
+---
+task:
+  metadata:
+    source: http://todo.com/soon
+    owner: {{owner}}
+    name: B2G Mochitests {{chunk}}
+    description: B2G Desktop Mochi test run {{chunk}}
+
+  workerType: b2gtest
+  provisionerId: aws-provisioner
+
+  payload:
+
+    image: '{{#docker_image}}tester{{/docker_image}}'
+    maxRunTime: 3600
+
+    command:
+      - entrypoint # entrypoint ensures we are running in xvfb
+      - >
+        python ./mozharness/scripts/b2g_desktop_unittest.py
+        --no-read-buildbot-config
+        --config-file /home/worker/b2g-desktop-config.py
+        --installer-url {{build_url}}
+        --test-url {{tests_url}}
+        --download-symbols ondemand
+        --test-suite mochitest
+        --total-chunk={{total_chunks}}
+        --this-chunk={{chunk}}
+    artifacts:
+      'public/build':
+        type: directory
+        path: '/home/worker/artifacts/'
+        expires: '{{#from_now}}1 year{{/from_now}}'
+
+  extra:
+    treeherder:
+      symbol: B
+
new file mode 100755
--- /dev/null
+++ b/testing/taskcluster/tests/test_build_task.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+import mozunit
+import taskcluster_graph.build_task as build_task
+
+class TestBuildTask(unittest.TestCase):
+
+    def test_validate_missing_extra(self):
+        with self.assertRaises(build_task.BuildTaskValidationException):
+            build_task.validate({})
+
+    def test_validate_valid(self):
+        with self.assertRaises(build_task.BuildTaskValidationException):
+            build_task.validate({
+                'extra': {
+                    'locations': {
+                        'build': '',
+                        'tests': ''
+                    }
+                }
+            })
+
+if __name__ == '__main__':
+    mozunit.main()
+
new file mode 100755
--- /dev/null
+++ b/testing/taskcluster/tests/test_commit_parser.py
@@ -0,0 +1,320 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+import mozunit
+from taskcluster_graph.commit_parser import (
+    parse_commit,
+    normalize_test_list,
+    InvalidCommitException
+)
+
+class TestCommitParser(unittest.TestCase):
+
+    def test_normalize_test_list_none(self):
+        self.assertEqual(
+            normalize_test_list(['woot'], 'none'), []
+        )
+
+    def test_normalize_test_list_all(self):
+        self.assertEqual(
+            normalize_test_list(['woot'], 'all'),
+            [{ 'test': 'woot' }]
+        )
+
+    def test_normalize_test_list_specific_tests(self):
+        self.assertEqual(
+            normalize_test_list(['woot'], 'a,b,c'),
+            [{ 'test': 'a' }, { 'test': 'b' }, { 'test': 'c' }]
+        )
+
+    def test_normalize_test_list_specific_tests_with_whitespace(self):
+        self.assertEqual(
+            normalize_test_list(['woot'], 'a, b, c'),
+            [{ 'test': 'a' }, { 'test': 'b' }, { 'test': 'c' }]
+        )
+
+    def test_invalid_commit(self):
+        '''
+        Disallow invalid commit messages from being parsed...
+        '''
+        with self.assertRaises(InvalidCommitException):
+            parse_commit("wootbarbaz", {})
+
+    def test_commit_no_tests(self):
+        '''
+        This test covers the case of builds but no tests passed -u none
+        '''
+        commit = 'try: -b o -p linux -u none -t none'
+        jobs = {
+            'flags': {
+                'builds': ['linux', 'linux64'],
+                'tests': ['web-platform-tests'],
+            },
+            'builds': {
+                'linux': {
+                    'types': {
+                        'opt': 'task/linux',
+                        'debug': 'task/linux-debug'
+                    }
+                },
+            },
+            'tests': {}
+        }
+
+        expected = [
+            {
+                'task': 'task/linux',
+                'dependents': []
+            }
+        ]
+
+        result = parse_commit(commit, jobs)
+        self.assertEqual(expected, result)
+
+    def test_commit_all_builds_no_tests(self):
+        '''
+        This test covers the case of all builds but no tests passed -u none
+        '''
+        commit = 'try: -b o -p all -u none -t none'
+        jobs = {
+            'flags': {
+                'builds': ['linux', 'linux64'],
+                'tests': ['web-platform-tests'],
+            },
+            'builds': {
+                'linux': {
+                    'types': {
+                        'opt': 'task/linux',
+                        'debug': 'task/linux-debug'
+                    }
+                },
+            },
+            'tests': {}
+        }
+
+        expected = [
+            {
+                'task': 'task/linux',
+                'dependents': []
+            }
+        ]
+
+        result = parse_commit(commit, jobs)
+        self.assertEqual(expected, result)
+
+    def test_some_test_tasks_restricted(self):
+        '''
+        This test covers the case of all builds but no tests passed -u none
+        '''
+        commit = 'try: -b do -p all -u all -t none'
+        jobs = {
+            'flags': {
+                'builds': ['linux', 'linux64'],
+                'tests': ['web-platform-tests'],
+            },
+            'builds': {
+                'linux': {
+                    'types': {
+                        'opt': 'task/linux',
+                        'debug': 'task/linux-debug'
+                    }
+                },
+            },
+            'tests': {
+                'web-platform-tests': {
+                    'task': 'task/web-platform-tests',
+                    'allowed_build_tasks': [
+                        'task/linux'
+                    ]
+                }
+            }
+        }
+
+        expected = [
+            {
+                'task': 'task/linux-debug',
+                'dependents': []
+            },
+            {
+                'task': 'task/linux',
+                'dependents': [{
+                    'task': 'task/web-platform-tests',
+                    'allowed_build_tasks': ['task/linux']
+                }]
+            }
+        ]
+
+        result = parse_commit(commit, jobs)
+        self.assertEqual(expected, result)
+
+    def test_specific_test_platforms(self):
+        '''
+        This test cases covers the platform specific test exclusion options.
+        '''
+        commit = 'try: -b od -p all -u all[windows,b2g] -t none'
+        jobs = {
+            'flags': {
+                'builds': ['linux', 'win32'],
+                'tests': ['web-platform-tests', 'mochitest'],
+            },
+            'builds': {
+                'linux': {
+                    'types': {
+                        'opt': 'task/linux',
+                        'debug': 'task/linux-debug'
+                    }
+                },
+                'win32': {
+                    'platforms': ['windows'],
+                    'types': {
+                        'opt': 'task/win32',
+                    }
+                },
+            },
+            'tests': {
+                'web-platform-tests': {
+                    'task': 'task/web-platform-tests',
+                },
+                'mochitest': {
+                    'task': 'task/mochitest',
+                }
+            }
+        }
+
+        expected = [
+            {
+                'task': 'task/linux',
+                'dependents': []
+            },
+            {
+                'task': 'task/linux-debug',
+                'dependents': []
+            },
+            {
+                'task': 'task/win32',
+                'dependents': [
+                    { 'task': 'task/web-platform-tests' },
+                    { 'task': 'task/mochitest' }
+                ]
+            }
+        ]
+
+        result = parse_commit(commit, jobs)
+        self.assertEqual(expected, result)
+
+    def test_specific_test_platforms_with_specific_platform(self):
+        '''
+        This test cases covers the platform specific test exclusion options.
+        '''
+        commit = 'try: -b od -p win32 -u mochitest[windows] -t none'
+        jobs = {
+            'flags': {
+                'builds': ['linux', 'win32'],
+                'tests': ['web-platform-tests', 'mochitest'],
+            },
+            'builds': {
+                'linux': {
+                    'types': {
+                        'opt': 'task/linux',
+                        'debug': 'task/linux-debug'
+                    }
+                },
+                'win32': {
+                    'platforms': ['windows'],
+                    'types': {
+                        'opt': 'task/win32',
+                    }
+                },
+            },
+            'tests': {
+                'web-platform-tests': {
+                    'task': 'task/web-platform-tests',
+                },
+                'mochitest': {
+                    'task': 'task/mochitest',
+                }
+            }
+        }
+
+        expected = [
+            {
+                'task': 'task/win32',
+                'dependents': [
+                    { 'task': 'task/mochitest' }
+                ]
+            }
+        ]
+
+        result = parse_commit(commit, jobs)
+        self.assertEqual(expected, result)
+
+    def test_commit_with_builds_and_tests(self):
+        '''
+        This test covers the broad case of a commit which has both builds and
+        tests without any exclusions or other fancy logic.
+        '''
+        commit = 'try: -b od -p linux,linux64 -u web-platform-tests -t none'
+        jobs = {
+            'flags': {
+                'builds': ['linux', 'linux64'],
+                'tests': ['web-platform-tests'],
+            },
+            'builds': {
+                'linux': {
+                    'types': {
+                        'opt': 'task/linux',
+                        'debug': 'task/linux-debug'
+                    }
+                },
+                'linux64': {
+                    'types': {
+                        'opt': 'task/linux64',
+                        'debug': 'task/linux64-debug'
+                    }
+                }
+            },
+            'tests': {
+                'web-platform-tests': {
+                    'task': 'task/web-platform-tests'
+                }
+            }
+        }
+
+        expected = [
+            {
+                'task': 'task/linux',
+                'dependents': [
+                    { 'task': 'task/web-platform-tests' }
+                ]
+            },
+            {
+                'task': 'task/linux-debug',
+                'dependents': [
+                    { 'task': 'task/web-platform-tests' }
+                ]
+            },
+            {
+                'task': 'task/linux64',
+                'dependents': [
+                    { 'task': 'task/web-platform-tests' }
+                ]
+            },
+            {
+                'task': 'task/linux64-debug',
+                'dependents': [
+                    { 'task': 'task/web-platform-tests' }
+                ]
+            }
+        ]
+
+        result = parse_commit(commit, jobs)
+        self.assertEqual(expected, result)
+
+
+if __name__ == '__main__':
+    mozunit.main()
+
new file mode 100755
--- /dev/null
+++ b/testing/taskcluster/tests/test_from_now.py
@@ -0,0 +1,47 @@
+import unittest
+import mozunit
+from datetime import datetime
+from taskcluster_graph.from_now import (
+    InvalidString,
+    UnknownTimeMeasurement,
+    value_of,
+    json_time_from_now
+)
+
+class FromNowTest(unittest.TestCase):
+
+    def test_invalid_str(self):
+        with self.assertRaises(InvalidString):
+            value_of('wtfs')
+
+    def test_missing_unit(self):
+        with self.assertRaises(InvalidString):
+            value_of('1')
+
+    def test_missing_unknown_unit(self):
+        with self.assertRaises(UnknownTimeMeasurement):
+            value_of('1z')
+
+    def test_value_of(self):
+        self.assertEqual(value_of('1s').total_seconds(), 1)
+        self.assertEqual(value_of('1 second').total_seconds(), 1)
+        self.assertEqual(value_of('1m').total_seconds(), 60)
+        self.assertEqual(value_of('1h').total_seconds(), 3600)
+        self.assertEqual(value_of('1d').total_seconds(), 86400)
+        self.assertEqual(value_of('1y').total_seconds(), 31536000)
+
+    def test_json_from_now_utc_now(self):
+        # Just here to ensure we don't raise.
+        time = json_time_from_now('1 years')
+
+    def test_json_from_now(self):
+        now = datetime(2014, 1, 1)
+        self.assertEqual(json_time_from_now('1 years', now),
+                '2015-01-01T00:00:00Z')
+        self.assertEqual(json_time_from_now('6 days', now),
+                '2014-01-07T00:00:00Z')
+
+if __name__ == '__main__':
+    mozunit.main()
+
+
new file mode 100755
--- /dev/null
+++ b/testing/taskcluster/tests/test_try_test_parser.py
@@ -0,0 +1,39 @@
+import unittest
+import mozunit
+from taskcluster_graph.try_test_parser import parse_test_opts
+
+class TryTestParserTest(unittest.TestCase):
+
+    def test_parse_opts_valid(self):
+        self.assertEquals(
+            parse_test_opts('all[Amazing, Foobar woot,yeah]'),
+            [{ 'test': 'all', 'platforms': ['Amazing', 'Foobar woot', 'yeah'] }]
+        )
+
+        self.assertEquals(
+            parse_test_opts('a,b, c'),
+            [
+                { 'test': 'a' },
+                { 'test': 'b' },
+                { 'test': 'c' },
+            ]
+        )
+        self.assertEquals(
+            parse_test_opts('woot, bar[b], baz, qux[ z ],a'),
+            [
+                { 'test': 'woot' },
+                { 'test': 'bar', 'platforms': ['b'] },
+                { 'test': 'baz' },
+                { 'test': 'qux', 'platforms': ['z'] },
+                { 'test': 'a' }
+            ]
+        )
+
+        self.assertEquals(
+            parse_test_opts(''),
+            []
+        )
+
+if __name__ == '__main__':
+    mozunit.main()
+