Bug 1399800 - integrate pytest and add initial tests r=jmaher
authorIonut Goldan <igoldan@mozilla.com>
Tue, 24 Oct 2017 14:20:01 +0300
changeset 388529 fdb45c5ec56da319edab3165e7676a80ad2d9fbe
parent 388528 05a8206ba5840fa253540f97dc6e3f9a37f27a95
child 388530 76eee0a0c764176017a1c5e344d9de2a3deb2cc2
push id32753
push userarchaeopteryx@coole-files.de
push dateFri, 27 Oct 2017 09:43:22 +0000
treeherdermozilla-central@d9613617f268 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjmaher
bugs1399800
milestone58.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1399800 - integrate pytest and add initial tests r=jmaher MozReview-Commit-ID: FNQwCEtM1MB
build/virtualenv_packages.txt
moz.build
testing/talos/talos/filter.py
testing/talos/talos/test.py
testing/talos/talos/unittests/__init__.py
testing/talos/talos/unittests/browser_output.ts.txt
testing/talos/talos/unittests/browser_output.tsvg.txt
testing/talos/talos/unittests/conftest.py
testing/talos/talos/unittests/profile.tgz
testing/talos/talos/unittests/ps-Acj.out
testing/talos/talos/unittests/python.ini
testing/talos/talos/unittests/test_cmanager.py
testing/talos/talos/unittests/test_cmanager_base.py
testing/talos/talos/unittests/test_cmanager_linux.py
testing/talos/talos/unittests/test_cmanager_mac.py
testing/talos/talos/unittests/test_cmanger_win32.py
testing/talos/talos/unittests/test_cmdline.py
testing/talos/talos/unittests/test_config.py
testing/talos/talos/unittests/test_ffsetup.py
testing/talos/talos/unittests/test_gecko_profile.py
testing/talos/talos/unittests/test_mainthreadio.py
testing/talos/talos/unittests/test_mitmproxy/__init__.py
testing/talos/talos/unittests/test_output.py
testing/talos/talos/unittests/test_pageloader/__init__.py
testing/talos/talos/unittests/test_profiler/__init__.py
testing/talos/talos/unittests/test_run_tests.py
testing/talos/talos/unittests/test_scripts/__init__.py
testing/talos/talos/unittests/test_startup_test/__init__.py
testing/talos/talos/unittests/test_talos_powers/__init__.py
testing/talos/talos/unittests/test_talos_process.py
testing/talos/talos/unittests/test_talosconfig_browser_config.json
testing/talos/talos/unittests/test_talosconfig_test_config.json
testing/talos/talos/unittests/test_test.py
testing/talos/talos/unittests/test_tests/__init__.py
testing/talos/talos/unittests/test_ttest.py
testing/talos/talos/unittests/test_whitelist.py
testing/talos/talos/unittests/test_xtalos/__init__.py
testing/talos/talos/unittests/xrestop_output.txt
testing/talos/tests/__init__.py
testing/talos/tests/browser_output.ts.txt
testing/talos/tests/browser_output.tsvg.txt
testing/talos/tests/profile.tgz
testing/talos/tests/ps-Acj.out
testing/talos/tests/test_browser_output.py
testing/talos/tests/test_filter.py
testing/talos/tests/test_heavy.py
testing/talos/tests/test_results.py
testing/talos/tests/test_talosconfig.py
testing/talos/tests/test_talosconfig_browser_config.json
testing/talos/tests/test_talosconfig_test_config.json
testing/talos/tests/test_urlsplit.py
testing/talos/tests/test_utils.py
testing/talos/tests/test_xrestop.py
testing/talos/tests/xrestop_output.txt
--- a/build/virtualenv_packages.txt
+++ b/build/virtualenv_packages.txt
@@ -37,16 +37,17 @@ mozilla.pth:layout/tools/reftest
 mozilla.pth:other-licenses/ply/
 mozilla.pth:taskcluster
 mozilla.pth:testing
 mozilla.pth:testing/firefox-ui/harness
 mozilla.pth:testing/marionette/client
 mozilla.pth:testing/marionette/harness
 mozilla.pth:testing/marionette/harness/marionette_harness/runner/mixins/browsermob-proxy-py
 mozilla.pth:testing/marionette/puppeteer/firefox
+mozilla.pth:testing/talos
 packages.txt:testing/mozbase/packages.txt
 mozilla.pth:tools
 mozilla.pth:testing/web-platform
 mozilla.pth:testing/web-platform/tests/tools/wptrunner
 mozilla.pth:testing/web-platform/tests/tools/wptserve
 mozilla.pth:testing/web-platform/tests/tools/six
 mozilla.pth:testing/xpcshell
 mozilla.pth:third_party/python/mock-1.0.0
--- a/moz.build
+++ b/moz.build
@@ -80,16 +80,17 @@ DIRS += [
 ]
 
 if CONFIG['MOZ_WIDGET_TOOLKIT'] or not CONFIG['MOZ_BUILD_APP']:
     # These python manifests are included here so they get picked up without an objdir
     PYTHON_UNITTEST_MANIFESTS += [
         'layout/tools/reftest/selftest/python.ini',
         'testing/marionette/harness/marionette_harness/tests/harness_unit/python.ini',
         'testing/mochitest/tests/python/python.ini',
+        'testing/talos/talos/unittests/python.ini'
     ]
 
     CONFIGURE_SUBST_FILES += [
         'tools/update-packaging/Makefile',
     ]
     CONFIGURE_DEFINE_FILES += [
         'mozilla-config.h',
     ]
--- a/testing/talos/talos/filter.py
+++ b/testing/talos/talos/filter.py
@@ -13,16 +13,18 @@ Each filter is a simple function, but it
   from talos import filter
   filters = filter.ignore_first.prepare(1) + filter.median.prepare()
 
   for filter in filters:
       data = filter(data)
   # data is filtered
 """
 
+_FILTERS = {}
+
 
 class Filter(object):
     def __init__(self, func, *args, **kwargs):
         """
         Takes a filter function, and save args and kwargs that
         should be used when the filter is used.
         """
         self.func = func
@@ -40,137 +42,193 @@ def define_filter(func):
     """
     decorator to attach the prepare method.
     """
     def prepare(*args, **kwargs):
         return (Filter(func, *args, **kwargs),)
     func.prepare = prepare
     return func
 
+
+def register_filter(func):
+    """
+    all filters defined in this module
+    should be registered
+    """
+    global _FILTERS
+
+    _FILTERS[func.__name__] = func
+    return func
+
+
+def filters(*args):
+    global _FILTERS
+
+    filters_ = [_FILTERS[filter] for filter in args]
+    return filters_
+
+
+def apply(data, filters):
+    for filter in filters:
+        data = filter(data)
+
+    return data
+
+
+def parse(string_):
+
+    def to_number(string_number):
+        try:
+            return int(string_number)
+        except ValueError:
+            return float(string_number)
+
+    tokens = string_.split(":")
+
+    func = tokens[0]
+    digits = []
+    if len(tokens) > 1:
+        digits.extend(tokens[1].split(","))
+        digits = [to_number(digit) for digit in digits]
+
+    return [func, digits]
+
+
 # filters that return a scalar
 
-
+@register_filter
 @define_filter
 def mean(series):
     """
     mean of data; needs at least one data point
     """
     return sum(series)/float(len(series))
 
 
+@register_filter
 @define_filter
 def median(series):
     """
     median of data; needs at least one data point
     """
     series = sorted(series)
     if len(series) % 2:
         # odd
         return series[len(series)/2]
     else:
         # even
         middle = len(series)/2  # the higher of the middle 2, actually
         return 0.5*(series[middle-1] + series[middle])
 
 
+@register_filter
 @define_filter
 def variance(series):
     """
     variance: http://en.wikipedia.org/wiki/Variance
     """
 
     _mean = mean(series)
     variance = sum([(i-_mean)**2 for i in series])/float(len(series))
     return variance
 
 
+@register_filter
 @define_filter
 def stddev(series):
     """
     standard deviation: http://en.wikipedia.org/wiki/Standard_deviation
     """
     return variance(series)**0.5
 
 
+@register_filter
 @define_filter
 def dromaeo(series):
     """
     dromaeo: https://wiki.mozilla.org/Dromaeo, pull the internal calculation
     out
       * This is for 'runs/s' based tests, not 'ms' tests.
       * chunksize: defined in dromaeo: tests/dromaeo/webrunner.js#l8
     """
     means = []
     chunksize = 5
     series = list(dromaeo_chunks(series, chunksize))
     for i in series:
         means.append(mean(i))
     return geometric_mean(means)
 
 
+@register_filter
 @define_filter
 def dromaeo_chunks(series, size):
     for i in range(0, len(series), size):
         yield series[i:i+size]
 
 
+@register_filter
 @define_filter
 def geometric_mean(series):
     """
     geometric_mean: http://en.wikipedia.org/wiki/Geometric_mean
     """
     total = 0
     for i in series:
         total += math.log(i+1)
     return math.exp(total / len(series)) - 1
 
 # filters that return a list
 
 
+@register_filter
 @define_filter
 def ignore_first(series, number=1):
     """
     ignore first datapoint
     """
     if len(series) <= number:
         # don't modify short series
         return series
     return series[number:]
 
 
+@register_filter
 @define_filter
 def ignore(series, function):
     """
     ignore the first value of a list given by function
     """
     if len(series) <= 1:
         # don't modify short series
         return series
     series = series[:]  # do not mutate the original series
     value = function(series)
     series.remove(value)
     return series
 
 
+@register_filter
 @define_filter
 def ignore_max(series):
     """
     ignore maximum data point
     """
     return ignore(series, max)
 
 
+@register_filter
 @define_filter
 def ignore_min(series):
     """
     ignore minimum data point
     """
     return ignore(series, min)
 
 
+@register_filter
 @define_filter
 def v8_subtest(series, name):
     """
        v8 benchmark score - modified for no sub benchmarks.
        * removed Crypto and kept Encrypt/Decrypt standalone
        * removed EarlyBoyer and kept Earley/Boyer standalone
 
        this is not 100% in parity but within .3%
@@ -185,11 +243,12 @@ def v8_subtest(series, name):
                  'RegExp': 910985.,
                  'Richards': 35302.,
                  'Splay': 81491.
                  }
 
     return reference[name] / geometric_mean(series)
 
 
+@register_filter
 @define_filter
 def responsiveness_Metric(val_list):
     return sum([float(x)*float(x) / 1000000.0 for x in val_list])
--- a/testing/talos/talos/test.py
+++ b/testing/talos/talos/test.py
@@ -24,16 +24,18 @@ def register_test():
 
 def test_dict():
     """Return the dict of the registered test classes"""
     return _TESTS
 
 
 class Test(object):
     """abstract base class for a Talos test case"""
+    __test__ = False  # not pytest
+
     cycles = None  # number of cycles
     keys = []
     desktop = True
     filters = filter.ignore_first.prepare(1) + filter.median.prepare()
     lower_is_better = True
     alert_threshold = 2.0
 
     @classmethod
rename from testing/talos/tests/__init__.py
rename to testing/talos/talos/unittests/__init__.py
rename from testing/talos/tests/browser_output.ts.txt
rename to testing/talos/talos/unittests/browser_output.ts.txt
rename from testing/talos/tests/browser_output.tsvg.txt
rename to testing/talos/talos/unittests/browser_output.tsvg.txt
new file mode 100644
--- /dev/null
+++ b/testing/talos/talos/unittests/conftest.py
@@ -0,0 +1,17 @@
+from __future__ import absolute_import
+
+import os
+
+here = os.path.realpath(__file__)
+__TESTS_DIR = os.path.join(os.path.dirname(os.path.dirname(here)), 'tests')
+
+
+def remove_develop_files(starting_dir=__TESTS_DIR):
+    for file_name in os.listdir(starting_dir):
+
+        file_path = os.path.join(starting_dir, file_name)
+
+        if file_name.endswith('.develop') and os.path.isfile(file_path):
+            os.remove(file_path)
+        elif os.path.isdir(file_path):
+            remove_develop_files(file_path)
rename from testing/talos/tests/profile.tgz
rename to testing/talos/talos/unittests/profile.tgz
rename from testing/talos/tests/ps-Acj.out
rename to testing/talos/talos/unittests/ps-Acj.out
new file mode 100644
--- /dev/null
+++ b/testing/talos/talos/unittests/python.ini
@@ -0,0 +1,6 @@
+[DEFAULT]
+subsuite = talos
+
+[test_config.py]
+[test_ffsetup.py]
+[test_test.py]
new file mode 100644
new file mode 100644
new file mode 100644
new file mode 100644
new file mode 100644
new file mode 100644
new file mode 100644
--- /dev/null
+++ b/testing/talos/talos/unittests/test_config.py
@@ -0,0 +1,1186 @@
+from __future__ import absolute_import
+
+import copy
+import os
+
+import mock
+
+import mozunit
+import pytest
+import conftest
+
+from talos.config import (
+    get_active_tests, get_test, get_config, get_browser_config,
+    get_configs, ConfigurationError, DEFAULTS)
+from talos.test import PageloaderTest
+
+ORIGINAL_DEFAULTS = copy.deepcopy(DEFAULTS)
+
+
+class mock_test(PageloaderTest):
+    keys = ['tpmanifest', 'tpcycles', 'tppagecycles', 'tprender', 'tpchrome',
+            'tpmozafterpaint', 'fnbpaint', 'tploadnocache', 'firstpaint', 'userready',
+            'testeventmap', 'base_vs_ref', 'mainthread', 'resolution', 'cycles',
+            'gecko_profile', 'gecko_profile_interval', 'gecko_profile_entries',
+            'tptimeout', 'win_counters', 'w7_counters', 'linux_counters', 'mac_counters',
+            'tpscrolltest', 'xperf_counters', 'timeout', 'shutdown', 'responsiveness',
+            'profile_path', 'xperf_providers', 'xperf_user_providers', 'xperf_stackwalk',
+            'format_pagename', 'filters', 'preferences', 'extensions', 'setup', 'cleanup',
+            'lower_is_better', 'alert_threshold', 'unit', 'webextensions', 'profile',
+            'tpmozafterpaint', 'url']
+
+    tpmozafterpaint = 'value'
+    firstpaint = 'value'
+    userready = 'value'
+    fnbpaint = 'value'
+
+
+class Test_get_active_tests(object):
+    def test_raises_exception_for_undefined_test(self):
+        with pytest.raises(ConfigurationError):
+            get_active_tests({'activeTests': 'undefined_test'})
+
+        with pytest.raises(ConfigurationError):
+            get_active_tests({'activeTests': '  undefined_test     '})
+
+        with pytest.raises(ConfigurationError):
+            get_active_tests({'activeTests': 'undef_test:undef_test2:undef_test3'})
+
+
+class Test_get_test(object):
+    global_overrides = {
+        'tpmozafterpaint': 'overriden',
+        'firstpaint': 'overriden',
+        'userready': 'overriden',
+        'fnbpaint': 'overriden'
+    }
+
+    config = {'webserver': 'test_webserver'}
+
+    def test_doesnt_override_specific_keys_unless_they_are_null(self):
+        test_instance = mock_test()
+        test_dict = get_test({}, self.global_overrides, [], test_instance)
+
+        assert test_dict['tpmozafterpaint'] == 'value'
+        assert test_dict['firstpaint'] == 'value'
+        assert test_dict['userready'] == 'value'
+        assert test_dict['fnbpaint'] == 'value'
+
+        # nulls still get overriden
+        test_instance = mock_test(
+            tpmozafterpaint=None, firstpaint=None, userready=None, fnbpaint=None)
+        test_dict = get_test({}, self.global_overrides, [], test_instance)
+
+        assert test_dict['tpmozafterpaint'] == 'overriden'
+        assert test_dict['firstpaint'] == 'overriden'
+        assert test_dict['userready'] == 'overriden'
+        assert test_dict['fnbpaint'] == 'overriden'
+
+    @mock.patch('talos.config.open', create=True)
+    def test_interpolate_keys(self, mock_open):
+        mock_open.return_value = mock.MagicMock(readlines=lambda: [])
+
+        test_instance = mock_test(url='${talos}/test_page.html',
+                                  tpmanifest='${talos}/file.manifest')
+
+        test_dict = get_test(self.config, self.global_overrides, [], test_instance)
+        assert test_dict['url'].startswith('http://test_webserver/')
+        assert '${talos}' not in test_dict['url']
+        assert '${talos}' not in test_dict['tpmanifest']
+
+    def test_build_tpmanifest(self, tmpdir):
+        manifest_file = tmpdir.join('file.manifest').ensure(file=True)
+        test_instance = mock_test(url='test_page.html',
+                                  tpmanifest=str(manifest_file))
+
+        test_dict = get_test(self.config, self.global_overrides, [], test_instance)
+        assert test_dict['tpmanifest'].endswith('.develop')
+
+    def test_add_counters(self):
+        test_instance = mock_test(
+            linux_counters=None,
+            mac_counters=[],
+            win_counters=['counter_a'],
+            w7_counters=['counter_a', 'counter_b'],
+            xperf_counters=['counter_a', 'counter_extra']
+        )
+
+        counters = ['counter_a', 'counter_b', 'counter_c']
+        test_dict = get_test(
+            self.config, self.global_overrides, counters, test_instance)
+
+        assert test_dict['linux_counters'] == counters
+        assert test_dict['mac_counters'] == counters
+        assert test_dict['win_counters'] == counters
+        assert test_dict['w7_counters'] == counters
+        assert set(test_dict['xperf_counters']) == set(counters + ['counter_extra'])
+
+
+class Test_get_browser_config(object):
+    required = ('preferences', 'extensions', 'browser_path', 'browser_wait',
+                'extra_args', 'buildid', 'env', 'init_url', 'webserver')
+    optional = ['bcontroller_config',
+                'branch_name',
+                'child_process',
+                'develop',
+                'e10s',
+                'process',
+                'framework',
+                'repository',
+                'sourcestamp',
+                'symbols_path',
+                'test_timeout',
+                'xperf_path',
+                'error_filename',
+                'no_upload_results',
+                'enable_stylo',
+                'disable_stylo',
+                'stylothreads',
+                'subtests']
+
+    def test_that_contains_title(self):
+        config_no_optionals = dict.fromkeys(self.required, '')
+        config_no_optionals.update(title='is_mandatory')
+
+        browser_config = get_browser_config(config_no_optionals)
+        assert browser_config['title'] == 'is_mandatory'
+
+    def test_raises_keyerror_for_missing_title(self):
+        config_missing_title = dict.fromkeys(self.required, '')
+
+        with pytest.raises(KeyError):
+            get_browser_config(config_missing_title)
+
+    def test_raises_keyerror_for_required_keys(self):
+        config_missing_required = dict.fromkeys(self.required, '')
+        config_missing_required.update(title='is_mandatory')
+        del config_missing_required['preferences']
+
+        with pytest.raises(KeyError):
+            get_browser_config(config_missing_required)
+
+    def test_doesnt_raise_on_missing_optionals(self):
+        config_missing_optionals = dict.fromkeys(self.required, '')
+        config_missing_optionals['title'] = 'is_mandatory'
+
+        try:
+            get_browser_config(config_missing_optionals)
+        except KeyError:
+            pytest.fail('Must not raise exception on missing optional')
+
+    def test_browser_keys_are_subset_from_config(self):
+        config_extensive = dict.fromkeys(self.required, '')
+        config_extensive.update(dict.fromkeys(self.optional, ''))
+        config_extensive['title'] = 'is_mandatory'
+        config_extensive['extra_custom_key'] = 'value'
+
+        browser_config = get_browser_config(config_extensive)
+        assert browser_config != config_extensive
+        assert set(browser_config.keys()).issubset(set(config_extensive.keys()))
+
+
+class Test_get_config(object):
+    @classmethod
+    def setup_class(cls):
+        cls.argv = '--suite other-e10s --mainthread -e /some/random/path'.split()
+        cls.argv_unprovided_tests = '-e /some/random/path'.split()
+        cls.argv_unknown_suite = '--suite random-unknown-suite -e /some/random/path'.split()
+        cls.argv_overrides_defaults = '''
+        --suite other-e10s
+        --executablePath /some/random/path
+        --cycles 20
+        --geckoProfile
+        --geckoProfileInterval 1000
+        --geckoProfileEntries 1000
+        --mainthread
+        --tpcycles 20
+        --mozAfterPaint
+        --firstPaint
+        --firstNonBlankPaint
+        --userReady
+        --tppagecycles 20
+        '''.split()
+
+        cls.argv_ts_paint = '--activeTests ts_paint -e /some/random/path'.split()
+        cls.argv_ts_paint_webext = '--activeTests ts_paint_webext -e /some/random/path'.split()
+        cls.argv_ts_paint_heavy = '--activeTests ts_paint_heavy -e /some/random/path'.split()
+        cls.argv_sessionrestore = '--activeTests sessionrestore -e /some/random/path'.split()
+        cls.argv_sessionrestore_no_auto_restore = \
+            '--activeTests sessionrestore_no_auto_restore -e /some/random/path'.split()
+        cls.argv_sessionrestore_many_windows = \
+            '--activeTests sessionrestore_many_windows -e /some/random/path'.split()
+        cls.argv_tresize = '--activeTests tresize -e /some/random/path'.split()
+        cls.argv_tpaint = '--activeTests tpaint -e /some/random/path'.split()
+        cls.argv_cpstartup = '--activeTests cpstartup -e /some/random/path'.split()
+        cls.argv_tabpaint = '--activeTests tabpaint -e /some/random/path'.split()
+        cls.argv_tps = '--activeTests tps -e /some/random/path'.split()
+        cls.argv_tart = '--activeTests tart -e /some/random/path'.split()
+        cls.argv_cart = '--activeTests cart -e /some/random/path'.split()
+        cls.argv_damp = '--activeTests damp -e /some/random/path'.split()
+        cls.argv_glterrain = '--activeTests glterrain -e /some/random/path'.split()
+        cls.argv_glvideo = '--activeTests glvideo -e /some/random/path'.split()
+        cls.argv_tp5n = '--activeTests tp5n -e /some/random/path'.split()
+        cls.argv_tp5o = '--activeTests tp5o -e /some/random/path'.split()
+        cls.argv_tp5o_webext = '--activeTests tp5o_webext -e /some/random/path'.split()
+        cls.argv_tp5o_scroll = '--activeTests tp5o_scroll -e /some/random/path'.split()
+        cls.argv_v8_7 = '--activeTests v8_7 -e /some/random/path'.split()
+        cls.argv_kraken = '--activeTests kraken -e /some/random/path'.split()
+        cls.argv_basic_compositor_video = \
+            '--activeTests basic_compositor_video -e /some/random/path'.split()
+        cls.argv_tcanvasmark = '--activeTests tcanvasmark -e /some/random/path'.split()
+        cls.argv_dromaeo_css = '--activeTests dromaeo_css -e /some/random/path'.split()
+        cls.argv_dromaeo_dom = '--activeTests dromaeo_dom -e /some/random/path'.split()
+        cls.argv_tsvgm = '--activeTests tsvgm -e /some/random/path'.split()
+        cls.argv_tsvgx = '--activeTests tsvgx -e /some/random/path'.split()
+        cls.argv_tsvg_static = '--activeTests tsvg_static -e /some/random/path'.split()
+        cls.argv_tsvgr_opacity = '--activeTests tsvgr_opacity -e /some/random/path'.split()
+        cls.argv_tscrollx = '--activeTests tscrollx -e /some/random/path'.split()
+        cls.argv_a11yr = '--activeTests a11yr -e /some/random/path'.split()
+        cls.argv_speedometer = '--activeTests speedometer -e /some/random/path'.split()
+        cls.argv_perf_reftest = '--activeTests perf_reftest -e /some/random/path'.split()
+        cls.argv_perf_reftest_singletons = \
+            '--activeTests perf_reftest_singletons -e /some/random/path'.split()
+        cls.argv_quantum_pageload_google = \
+            '--activeTests quantum_pageload_google -e /some/random/path'.split()
+        cls.argv_quantum_pageload_youtube = \
+            '--activeTests quantum_pageload_youtube -e /some/random/path'.split()
+        cls.argv_quantum_pageload_amazon = \
+            '--activeTests quantum_pageload_amazon -e /some/random/path'.split()
+        cls.argv_quantum_pageload_facebook = \
+            '--activeTests quantum_pageload_facebook -e /some/random/path'.split()
+        cls.argv_tp6_google = '--activeTests tp6_google -e /some/random/path'.split()
+        cls.argv_tp6_google_heavy = '--activeTests tp6_google_heavy -e /some/random/path'.split()
+        cls.argv_tp6_youtube = '--activeTests tp6_youtube -e /some/random/path'.split()
+        cls.argv_tp6_youtube_heavy = '--activeTests tp6_youtube_heavy -e /some/random/path'.split()
+        cls.argv_tp6_amazon = '--activeTests tp6_amazon -e /some/random/path'.split()
+        cls.argv_tp6_amazon_heavy = '--activeTests tp6_amazon_heavy -e /some/random/path'.split()
+        cls.argv_tp6_facebook = '--activeTests tp6_facebook -e /some/random/path'.split()
+        cls.argv_tp6_facebook_heavy = \
+            '--activeTests tp6_facebook_heavy -e /some/random/path'.split()
+
+    @classmethod
+    def teardown_class(cls):
+        conftest.remove_develop_files()
+
+    def test_correctly_overrides_test_valus(self):
+        config = get_config(self.argv)
+        assert bool(config) is True
+
+        # no null values
+        null_keys = [key for key, val in config.iteritems() if val is None]
+        assert len(null_keys) == 0
+
+        # expected keys are there
+        assert config['browser_path'] == '/some/random/path'
+        assert config['suite'] == 'other-e10s'
+        assert config['mainthread'] is True
+
+        # default values overriden
+        config = get_config(self.argv_overrides_defaults)
+        assert config['basetest'] == ORIGINAL_DEFAULTS['basetest']
+
+    def test_config_has_tests(self):
+        config = get_config(self.argv)
+        assert len(config['tests']) > 0
+
+    def test_global_variable_isnt_modified(self):
+        get_config(self.argv)
+        assert ORIGINAL_DEFAULTS == DEFAULTS
+
+    def test_raises_except_if_unprovided_tests_on_cli(self):
+        with pytest.raises(ConfigurationError):
+            get_config(self.argv_unprovided_tests)
+
+        with pytest.raises(ConfigurationError):
+            get_config(self.argv_unknown_suite)
+
+    def test_ts_paint_has_expected_attributes(self):
+        config = get_config(self.argv_ts_paint)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'ts_paint'
+        assert test_config['cycles'] == 20
+        assert test_config['timeout'] == 150
+        assert test_config['gecko_profile_startup'] is True
+        assert test_config['gecko_profile_entries'] == 10000000
+        assert test_config['url'] != 'startup_test/tspaint_test.html'  # interpolation was done
+        assert test_config['shutdown'] is False
+        assert test_config['xperf_counters'] == []
+        # TODO: these don't work; is this a bug?
+        # assert test_config['win7_counters'] == []
+        assert test_config['filters'] is not None
+        assert test_config['tpmozafterpaint'] is True
+        # assert test_config['mainthread'] is False
+        # assert test_config['responsiveness'] is False
+        # assert test_config['unit'] == 'ms'
+
+    def test_ts_paint_webext_has_expected_attributes(self):
+        config = get_config(self.argv_ts_paint_webext)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'ts_paint_webext'
+        assert test_config['cycles'] == 20
+        assert test_config['timeout'] == 150
+        assert test_config['gecko_profile_startup'] is True
+        assert test_config['gecko_profile_entries'] == 10000000
+        assert test_config['url'] != 'startup_test/tspaint_test.html'  # interpolation was done
+        assert test_config['shutdown'] is False
+        assert test_config['xperf_counters'] == []
+        # TODO: these don't work; is this a bug?
+        # assert test_config['win7_counters'] == []
+        assert test_config['filters'] is not None
+        assert test_config['tpmozafterpaint'] is True
+        # assert test_config['mainthread'] is False
+        # assert test_config['responsiveness'] is False
+        # assert test_config['unit'] == 'ms'
+        # TODO: this isn't overriden
+        # assert test_config['webextensions'] != '${talos}/webextensions/dummy/dummy-signed.xpi'
+        assert test_config['preferences'] == {'xpinstall.signatures.required': False}
+
+    def test_ts_paint_heavy_has_expected_attributes(self):
+        config = get_config(self.argv_ts_paint_heavy)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'ts_paint_heavy'
+        assert test_config['cycles'] == 20
+        assert test_config['timeout'] == 150
+        assert test_config['gecko_profile_startup'] is True
+        assert test_config['gecko_profile_entries'] == 10000000
+        assert test_config['url'] != 'startup_test/tspaint_test.html'  # interpolation was done
+        assert test_config['shutdown'] is False
+        assert test_config['xperf_counters'] == []
+        # TODO: this doesn't work; is this a bug?
+        # assert test_config['win7_counters'] == []
+        assert test_config['filters'] is not None
+        assert test_config['tpmozafterpaint'] is True
+        # assert test_config['mainthread'] is False
+        # assert test_config['responsiveness'] is False
+        # assert test_config['unit'] == 'ms'
+        assert test_config['profile'] == 'simple'
+
+    def test_sessionrestore_has_expected_attributes(self):
+        config = get_config(self.argv_sessionrestore)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'sessionrestore'
+        assert test_config['cycles'] == 10
+        assert test_config['timeout'] == 900
+        assert test_config['gecko_profile_startup'] is True
+        assert test_config['gecko_profile_entries'] == 10000000
+        assert test_config['shutdown'] is False
+        assert test_config['reinstall'] == [
+            'sessionstore.jsonlz4', 'sessionstore.js', 'sessionCheckpoints.json']
+        assert test_config['url'] == 'about:home'
+        assert test_config['preferences'] == {'browser.startup.page': 3}
+        # assert test_config['unit'] == 'ms'
+
+    def test_sesssionrestore_no_auto_restore_has_expected_attributes(self):
+        config = get_config(self.argv_sessionrestore_no_auto_restore)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'sessionrestore_no_auto_restore'
+        assert test_config['cycles'] == 10
+        assert test_config['timeout'] == 900
+        assert test_config['gecko_profile_startup'] is True
+        assert test_config['gecko_profile_entries'] == 10000000
+        assert test_config['shutdown'] is False
+        assert test_config['reinstall'] == [
+            'sessionstore.jsonlz4', 'sessionstore.js', 'sessionCheckpoints.json']
+        assert test_config['url'] == 'about:home'
+        assert test_config['preferences'] == {'browser.startup.page': 1}
+        # assert test_config['unit'] == 'ms'
+
+    def test_sessionrestore_many_windows_has_expected_attributes(self):
+        config = get_config(self.argv_sessionrestore_many_windows)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'sessionrestore_many_windows'
+        assert test_config['cycles'] == 10
+        assert test_config['timeout'] == 900
+        assert test_config['gecko_profile_startup'] is True
+        assert test_config['gecko_profile_entries'] == 10000000
+        assert test_config['shutdown'] is False
+        assert test_config['reinstall'] == [
+            'sessionstore.jsonlz4', 'sessionstore.js', 'sessionCheckpoints.json']
+        assert test_config['url'] == 'about:home'
+        assert test_config['preferences'] == {'browser.startup.page': 3}
+        # assert test_config['unit'] == 'ms'
+
+    def test_tresize_has_expected_attributes(self):
+        config = get_config(self.argv_tresize)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'tresize'
+        assert test_config['cycles'] == 20
+        assert test_config['url'] != 'startup_test/tresize/addon/content/tresize-test.html'
+        assert test_config['timeout'] == 150
+        assert test_config['gecko_profile_interval'] == 2
+        assert test_config['gecko_profile_entries'] == 1000000
+        assert test_config['tpmozafterpaint'] is True
+        assert test_config['filters'] is not None
+        # assert test_config['unit'] == 'ms'
+
+    def test_tpaint_has_expected_attributes(self):
+        config = get_config(self.argv_tpaint)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'tpaint'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 20
+        assert test_config['timeout'] == 300
+        assert test_config['gecko_profile_interval'] == 1
+        assert test_config['gecko_profile_entries'] == 2000000
+        assert test_config['tpmozafterpaint'] is True
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == 'ms'
+        assert test_config['preferences'] == {
+            'security.data_uri.block_toplevel_data_uri_navigations': False}
+
+    def test_cpstartup_has_expected_attributes(self):
+        config = get_config(self.argv_cpstartup)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'cpstartup'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tpmanifest'] != '${talos}/tests/cpstartup/cpstartup.manifest'
+        assert test_config['tppagecycles'] == 20
+        assert test_config['gecko_profile_entries'] == 1000000
+        assert test_config['tploadnocache'] is True
+        assert test_config['unit'] == 'ms'
+        assert test_config['preferences'] == {
+            'browser.link.open_newwindow': 3,
+            'browser.link.open_newwindow.restriction': 2,
+        }
+
+    def test_tabpaint_has_expected_attributes(self):
+        config = get_config(self.argv_tabpaint)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'tabpaint'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tpmanifest'] != '${talos}/tests/tabpaint/tabpaint.manifest'
+        assert test_config['tppagecycles'] == 20
+        assert test_config['gecko_profile_entries'] == 1000000
+        assert test_config['tploadnocache'] is True
+        assert test_config['unit'] == 'ms'
+        assert test_config['preferences'] == {
+            'browser.link.open_newwindow': 3,
+            'browser.link.open_newwindow.restriction': 2,
+        }
+
+    def test_tps_has_expected_attributes(self):
+        config = get_config(self.argv_tps)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'tps'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tpmanifest'] != '${talos}/tests/tabswitch/tps.manifest'
+        assert test_config['tppagecycles'] == 5
+        assert test_config['gecko_profile_entries'] == 5000000
+        assert test_config['tploadnocache'] is True
+        assert test_config['preferences'] == {
+            'addon.test.tabswitch.urlfile': os.path.join('${talos}',
+                                                         'tests',
+                                                         'tp5o.html'),
+            'addon.test.tabswitch.webserver': '${webserver}',
+            'addon.test.tabswitch.maxurls': -1,
+        }
+        assert test_config['unit'] == 'ms'
+
+    def test_tart_has_expected_attributes(self):
+        config = get_config(self.argv_tart)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'tart'
+        assert test_config['tpmanifest'] != '${talos}/tests/tart/tart.manifest'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 25
+        assert test_config['tploadnocache'] is True
+        assert test_config['tpmozafterpaint'] is False
+        assert test_config['gecko_profile_interval'] == 10
+        assert test_config['gecko_profile_entries'] == 1000000
+        assert 'win_counters' not in test_config
+        assert 'w7_counters' not in test_config
+        assert 'linux_counters' not in test_config
+        assert 'mac_counters' not in test_config
+        assert test_config['preferences'] == {
+            'layout.frame_rate': 0,
+            'docshell.event_starvation_delay_hint': 1,
+            'dom.send_after_paint_to_content': False
+        }
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == 'ms'
+
+    def test_cart_has_expected_attributes(self):
+        config = get_config(self.argv_cart)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'cart'
+        assert test_config['tpmanifest'] != '${talos}/tests/tart/cart.manifest'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 25
+        assert test_config['tploadnocache'] is True
+        assert test_config['tpmozafterpaint'] is False
+        assert test_config['gecko_profile_interval'] == 1
+        assert test_config['gecko_profile_entries'] == 10000000
+        assert 'win_counters' not in test_config
+        assert 'w7_counters' not in test_config
+        assert 'linux_counters' not in test_config
+        assert 'mac_counters' not in test_config
+        assert test_config['preferences'] == {
+            'layout.frame_rate': 0,
+            'docshell.event_starvation_delay_hint': 1,
+            'dom.send_after_paint_to_content': False
+        }
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == 'ms'
+
+    def test_damp_has_expected_attributes(self):
+        config = get_config(self.argv_damp)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'damp'
+        assert test_config['tpmanifest'] != '${talos}/tests/devtools/damp.manifest'
+        assert test_config['cycles'] == 5
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 5
+        assert test_config['tploadnocache'] is True
+        assert test_config['tpmozafterpaint'] is False
+        assert test_config['gecko_profile_interval'] == 10
+        assert test_config['gecko_profile_entries'] == 1000000
+        assert 'win_counters' not in test_config
+        assert 'w7_counters' not in test_config
+        assert 'linux_counters' not in test_config
+        assert 'mac_counters' not in test_config
+        assert test_config['filters'] is not None
+        assert test_config['preferences'] == {
+            'devtools.memory.enabled': True,
+            'addon.test.damp.webserver': '${webserver}'
+        }
+        assert test_config['unit'] == 'ms'
+
+    def test_glterrain_has_expected_attributes(self):
+        config = get_config(self.argv_glterrain)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'glterrain'
+        assert test_config['tpmanifest'] != '${talos}/tests/webgl/glterrain.manifest'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 25
+        assert test_config['tploadnocache'] is True
+        assert test_config['tpmozafterpaint'] is False
+        assert test_config['tpchrome'] is False
+        assert test_config['gecko_profile_interval'] == 10
+        assert test_config['gecko_profile_entries'] == 2000000
+        assert 'win_counters' not in test_config
+        assert 'w7_counters' not in test_config
+        assert 'linux_counters' not in test_config
+        assert 'mac_counters' not in test_config
+        assert test_config['preferences'] == {
+            'layout.frame_rate': 0,
+            'docshell.event_starvation_delay_hint': 1,
+            'dom.send_after_paint_to_content': False
+        }
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == 'frame interval'
+
+    def test_glvideo_has_expected_attributes(self):
+        config = get_config(self.argv_glvideo)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'glvideo'
+        assert test_config['tpmanifest'] != '${talos}/tests/webgl/glvideo.manifest'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 5
+        assert test_config['tploadnocache'] is True
+        assert test_config['tpmozafterpaint'] is False
+        assert test_config['tpchrome'] is False
+        assert test_config['gecko_profile_interval'] == 2
+        assert test_config['gecko_profile_entries'] == 2000000
+        assert 'win_counters' not in test_config
+        assert 'w7_counters' not in test_config
+        assert 'linux_counters' not in test_config
+        assert 'mac_counters' not in test_config
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == 'ms'
+
+    def test_tp5n_has_expected_attributes(self):
+        config = get_config(self.argv_tp5n)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'tp5n'
+        assert test_config['resolution'] == 20
+        assert test_config['shutdown'] is False
+        assert test_config['tpmanifest'] != '${talos}/tests/tp5n/tp5n.manifest'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 1
+        assert test_config['cycles'] == 1
+        assert test_config['tpmozafterpaint'] is True
+        assert test_config['tptimeout'] == 5000
+        assert test_config['mainthread'] is True
+        assert test_config['w7_counters'] == []
+        assert test_config['win_counters'] == []
+        assert test_config['linux_counters'] == []
+        assert test_config['mac_counters'] == []
+        assert test_config['xperf_counters'] == [
+            'main_startup_fileio', 'main_startup_netio',
+            'main_normal_fileio', 'main_normal_netio',
+            'nonmain_startup_fileio', 'nonmain_normal_fileio',
+            'nonmain_normal_netio', 'mainthread_readcount',
+            'mainthread_readbytes', 'mainthread_writecount',
+            'mainthread_writebytes'
+        ]
+        assert test_config['xperf_providers'] == [
+            'PROC_THREAD', 'LOADER', 'HARD_FAULTS', 'FILENAME',
+            'FILE_IO', 'FILE_IO_INIT'
+        ]
+        assert test_config['xperf_user_providers'] == [
+            'Mozilla Generic Provider',
+            'Microsoft-Windows-TCPIP'
+        ]
+        assert test_config['xperf_stackwalk'] == [
+            'FileCreate', 'FileRead', 'FileWrite', 'FileFlush',
+            'FileClose'
+        ]
+        assert test_config['filters'] is not None
+        assert test_config['timeout'] == 1800
+        assert test_config['preferences'] == {
+            'extensions.enabledScopes': '',
+            'talos.logfile': 'browser_output.txt'
+        }
+        assert test_config['unit'] == 'ms'
+
+    def test_tp5o_has_expected_attributes(self):
+        config = get_config(self.argv_tp5o)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'tp5o'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 25
+        assert test_config['cycles'] == 1
+        assert test_config['tpmozafterpaint'] is True
+        assert test_config['tptimeout'] == 5000
+        assert test_config['mainthread'] is False
+        assert test_config['tpmanifest'] != '${talos}/tests/tp5n/tp5o.manifest'
+        assert test_config['win_counters'] == ['% Processor Time']
+        assert test_config['w7_counters'] == ['% Processor Time']
+        assert test_config['linux_counters'] == ['XRes']
+        assert test_config['mac_counters'] == []
+        assert test_config['responsiveness'] is True
+        assert test_config['gecko_profile_interval'] == 2
+        assert test_config['gecko_profile_entries'] == 4000000
+        assert test_config['filters'] is not None
+        assert test_config['timeout'] == 1800
+        assert test_config['unit'] == 'ms'
+
+    def test_tp5o_webext_has_expected_attributes(self):
+        config = get_config(self.argv_tp5o_webext)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'tp5o_webext'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 25
+        assert test_config['cycles'] == 1
+        assert test_config['tpmozafterpaint'] is True
+        assert test_config['tptimeout'] == 5000
+        assert test_config['mainthread'] is False
+        assert test_config['tpmanifest'] != '${talos}/tests/tp5n/tp5o.manifest'
+        assert test_config['win_counters'] == ['% Processor Time']
+        assert test_config['w7_counters'] == ['% Processor Time']
+        assert test_config['linux_counters'] == ['XRes']
+        assert test_config['mac_counters'] == []
+        assert test_config['responsiveness'] is True
+        assert test_config['gecko_profile_interval'] == 2
+        assert test_config['gecko_profile_entries'] == 4000000
+        assert test_config['filters'] is not None
+        assert test_config['timeout'] == 1800
+        assert test_config['unit'] == 'ms'
+        assert test_config['webextensions'] == '${talos}/webextensions/dummy/dummy-signed.xpi'
+        assert test_config['preferences'] == {'xpinstall.signatures.required': False}
+
+    def test_tp5o_scroll_has_expected_attributes(self):
+        config = get_config(self.argv_tp5o_scroll)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'tp5o_scroll'
+        assert test_config['tpmanifest'] != '${talos}/tests/tp5n/tp5o.manifest'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 12
+        assert test_config['gecko_profile_interval'] == 2
+        assert test_config['gecko_profile_entries'] == 2000000
+        assert test_config['tpscrolltest'] is True
+        assert test_config['tpmozafterpaint'] is False
+        assert test_config['preferences'] == {
+            'layout.frame_rate': 0,
+            'docshell.event_starvation_delay_hint': 1,
+            'dom.send_after_paint_to_content': False,
+            'layout.css.scroll-behavior.spring-constant': "'10'",
+            'toolkit.framesRecording.bufferSize': 10000
+        }
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == '1/FPS'
+
+    def test_v8_7_has_expected_attributes(self):
+        config = get_config(self.argv_v8_7)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'v8_7'
+        assert test_config['tpmanifest'] != '${talos}/tests/v8_7/v8.manifest'
+        assert test_config['gecko_profile_interval'] == 1
+        assert test_config['gecko_profile_entries'] == 1000000
+        assert test_config['tpcycles'] == 1
+        assert test_config['resolution'] == 20
+        assert test_config['tpmozafterpaint'] is False
+        assert test_config['preferences'] == {'dom.send_after_paint_to_content': False}
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == 'score'
+        assert test_config['lower_is_better'] is False
+
+    def test_kraken_has_expected_attributes(self):
+        config = get_config(self.argv_kraken)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'kraken'
+        assert test_config['tpmanifest'] != '${talos}/tests/kraken/kraken.manifest'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 1
+        assert test_config['gecko_profile_interval'] == 1
+        assert test_config['gecko_profile_entries'] == 5000000
+        assert test_config['tpmozafterpaint'] is False
+        assert test_config['tpchrome'] is False
+        assert test_config['preferences'] == {'dom.send_after_paint_to_content': False}
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == 'score'
+
+    def test_basic_compositor_video_has_expected_attributes(self):
+        config = get_config(self.argv_basic_compositor_video)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'basic_compositor_video'
+        assert test_config['tpmanifest'] != '${talos}/tests/video/video.manifest'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 12
+        assert test_config['tpchrome'] is False
+        assert test_config['timeout'] == 10000
+        assert test_config['gecko_profile_interval'] == 1
+        assert test_config['gecko_profile_entries'] == 2000000
+        assert test_config['preferences'] == {
+            'full-screen-api.allow-trusted-requests-only': False,
+            'layers.acceleration.force-enabled': False,
+            'layers.acceleration.disabled': True,
+            'layout.frame_rate': 0,
+            'docshell.event_starvation_delay_hint': 1,
+            'full-screen-api.warning.timeout': 500,
+            'media.ruin-av-sync.enabled': True
+        }
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == 'ms/frame'
+        assert test_config['lower_is_better'] is True
+
+    def test_tcanvasmark_has_expected_attributes(self):
+        config = get_config(self.argv_tcanvasmark)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'tcanvasmark'
+        assert test_config['tpmanifest'] != '${talos}/tests/canvasmark/canvasmark.manifest'
+        assert 'win_counters' not in test_config
+        assert 'w7_counters' not in test_config
+        assert 'linux_counters' not in test_config
+        assert 'mac_counters' not in test_config
+        assert test_config['tpcycles'] == 5
+        assert test_config['tppagecycles'] == 1
+        assert test_config['timeout'] == 900
+        assert test_config['gecko_profile_interval'] == 10
+        assert test_config['gecko_profile_entries'] == 2500000
+        assert test_config['tpmozafterpaint'] is False
+        assert test_config['preferences'] == {'dom.send_after_paint_to_content': False}
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == 'score'
+        assert test_config['lower_is_better'] is False
+
+    def test_dromaeo_css_has_expected_attributes(self):
+        config = get_config(self.argv_dromaeo_css)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'dromaeo_css'
+        assert test_config['tpcycles'] == 1
+        assert test_config['filters'] is not None
+        assert test_config['lower_is_better'] is False
+        assert test_config['alert_threshold'] == 5.0
+        assert test_config['tpchrome'] is False
+        assert test_config['gecko_profile_interval'] == 2
+        assert test_config['gecko_profile_entries'] == 10000000
+        assert test_config['tpmanifest'] != '${talos}/tests/dromaeo/css.manifest'
+        assert test_config['unit'] == 'score'
+
+    def test_dromaeo_dom_has_expected_attributes(self):
+        config = get_config(self.argv_dromaeo_dom)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'dromaeo_dom'
+        assert test_config['tpcycles'] == 1
+        assert test_config['filters'] is not None
+        assert test_config['lower_is_better'] is False
+        assert test_config['alert_threshold'] == 5.0
+        assert test_config['tpchrome'] is False
+        assert test_config['gecko_profile_interval'] == 2
+        assert test_config['gecko_profile_entries'] == 10000000
+        assert test_config['tpmanifest'] != '${talos}/tests/dromaeo/dom.manifest'
+        assert test_config['unit'] == 'score'
+
+    def test_tsvgm_has_expected_attributes(self):
+        config = get_config(self.argv_tsvgm)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'tsvgm'
+        assert test_config['tpmanifest'] != '${talos}/tests/svgx/svgm.manifest'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 7
+        assert test_config['tpmozafterpaint'] is False
+        assert test_config['tpchrome'] is False
+        assert test_config['gecko_profile_interval'] == 10
+        assert test_config['gecko_profile_entries'] == 1000000
+        assert test_config['preferences'] == {
+            'layout.frame_rate': 0,
+            'docshell.event_starvation_delay_hint': 1,
+            'dom.send_after_paint_to_content': False
+        }
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == 'ms'
+
+    def test_tsvgx_has_expected_attributes(self):
+        config = get_config(self.argv_tsvgx)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'tsvgx'
+        assert test_config['tpmanifest'] != '${talos}/tests/svgx/svgx.manifest'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 25
+        assert test_config['tpmozafterpaint'] is False
+        assert test_config['tpchrome'] is False
+        assert test_config['gecko_profile_interval'] == 10
+        assert test_config['gecko_profile_entries'] == 1000000
+        assert test_config['preferences'] == {
+            'layout.frame_rate': 0,
+            'docshell.event_starvation_delay_hint': 1,
+            'dom.send_after_paint_to_content': False
+        }
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == 'ms'
+
+    def test_tsvg_static_has_expected_attributes(self):
+        config = get_config(self.argv_tsvg_static)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'tsvg_static'
+        assert test_config['tpmanifest'] != '${talos}/tests/svg_static/svg_static.manifest'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 25
+        assert test_config['tpmozafterpaint'] is True
+        assert test_config['tpchrome'] is False
+        assert test_config['gecko_profile_interval'] == 1
+        assert test_config['gecko_profile_entries'] == 10000000
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == 'ms'
+
+    def test_tsvgr_opacity_has_expected_attributes(self):
+        config = get_config(self.argv_tsvgr_opacity)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'tsvgr_opacity'
+        assert test_config['tpmanifest'] != '${talos}/tests/svg_opacity/svg_opacity.manifest'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 25
+        assert test_config['tpmozafterpaint'] is True
+        assert test_config['tpchrome'] is False
+        assert test_config['gecko_profile_interval'] == 1
+        assert test_config['gecko_profile_entries'] == 10000000
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == 'ms'
+
+    def test_tscrollx_has_expected_attributes(self):
+        config = get_config(self.argv_tscrollx)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'tscrollx'
+        assert test_config['tpmanifest'] != '${talos}/tests/scroll/scroll.manifest'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 25
+        assert test_config['tpmozafterpaint'] is False
+        assert test_config['tpchrome'] is False
+        assert test_config['gecko_profile_interval'] == 1
+        assert test_config['gecko_profile_entries'] == 1000000
+        assert test_config['preferences'] == {
+            'layout.frame_rate': 0,
+            'docshell.event_starvation_delay_hint': 1,
+            'dom.send_after_paint_to_content': False,
+            'layout.css.scroll-behavior.spring-constant': "'10'",
+            'toolkit.framesRecording.bufferSize': 10000
+        }
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == 'ms'
+
+    def test_a11yr_has_expect_attributes(self):
+        config = get_config(self.argv_a11yr)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'a11yr'
+        assert test_config['tpmanifest'] != '${talos}/tests/a11y/a11y.manifest'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 25
+        assert test_config['tpmozafterpaint'] is True
+        assert test_config['tpchrome'] is False
+        assert test_config['preferences'] == {'dom.send_after_paint_to_content': False}
+        assert test_config['unit'] == 'ms'
+        assert test_config['alert_threshold'] == 5.0
+
+    def test_speedometer_has_expected_attributes(self):
+        config = get_config(self.argv_speedometer)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'speedometer'
+        assert test_config['tpmanifest'] != '${talos}/tests/speedometer/speedometer.manifest'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 5
+        assert test_config['tpmozafterpaint'] is False
+        assert test_config['tpchrome'] is False
+        assert test_config['format_pagename'] is False
+        assert test_config['lower_is_better'] is False
+        assert test_config['unit'] == 'score'
+
+    def test_perf_reftest_has_expected_attributes(self):
+        config = get_config(self.argv_perf_reftest)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'perf_reftest'
+        assert test_config['base_vs_ref'] is True
+        assert test_config['tpmanifest'] != '${talos}/tests/perf-reftest/perf_reftest.manifest'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 10
+        assert test_config['tptimeout'] == 30000
+        assert test_config['gecko_profile_interval'] == 1
+        assert test_config['gecko_profile_entries'] == 2000000
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == 'ms'
+        assert test_config['lower_is_better'] is True
+        assert test_config['alert_threshold'] == 5.0
+
+    def test_perf_reftest_singletons_has_expected_attributes(self):
+        config = get_config(self.argv_perf_reftest_singletons)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'perf_reftest_singletons'
+        assert test_config['tpmanifest'] != \
+            '${talos}/tests/perf-reftest-singletons/perf_reftest_singletons.manifest'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 15
+        assert test_config['tptimeout'] == 30000
+        assert test_config['gecko_profile_interval'] == 1
+        assert test_config['gecko_profile_entries'] == 2000000
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == 'ms'
+        assert test_config['lower_is_better'] is True
+        assert test_config['alert_threshold'] == 5.0
+
+    def test_quantum_pageload_google_has_expected_attributes(self):
+        config = get_config(self.argv_quantum_pageload_google)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'quantum_pageload_google'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 25
+        assert test_config['gecko_profile_interval'] == 1
+        assert test_config['gecko_profile_entries'] == 2000000
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == 'ms'
+        assert test_config['lower_is_better'] is True
+        assert test_config['fnbpaint'] is True
+        assert test_config['tpmanifest'] != \
+            '${talos}/tests/quantum_pageload/quantum_pageload_google.manifest'
+
+    def test_quantum_pageload_youtube_has_expected_attributes(self):
+        config = get_config(self.argv_quantum_pageload_youtube)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'quantum_pageload_youtube'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 25
+        assert test_config['gecko_profile_interval'] == 1
+        assert test_config['gecko_profile_entries'] == 2000000
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == 'ms'
+        assert test_config['lower_is_better'] is True
+        assert test_config['fnbpaint'] is True
+        assert test_config['tpmanifest'] != \
+            '${talos}/tests/quantum_pageload/quantum_pageload_youtube.manifest'
+
+    def test_quantum_pageload_amazon_has_expected_attributes(self):
+        config = get_config(self.argv_quantum_pageload_amazon)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'quantum_pageload_amazon'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 25
+        assert test_config['gecko_profile_interval'] == 1
+        assert test_config['gecko_profile_entries'] == 2000000
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == 'ms'
+        assert test_config['lower_is_better'] is True
+        assert test_config['fnbpaint'] is True
+        assert test_config['tpmanifest'] != \
+            '${talos}/tests/quantum_pageload/quantum_pageload_amazon.manifest'
+
+    def test_quantum_pageload_facebook_has_expected_attributes(self):
+        config = get_config(self.argv_quantum_pageload_facebook)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'quantum_pageload_facebook'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 25
+        assert test_config['gecko_profile_interval'] == 1
+        assert test_config['gecko_profile_entries'] == 2000000
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == 'ms'
+        assert test_config['lower_is_better'] is True
+        assert test_config['fnbpaint'] is True
+        assert test_config['tpmanifest'] != \
+            '${talos}/tests/quantum_pageload/quantum_pageload_facebook.manifest'
+
+    def test_tp6_google_has_expected_attributes(self):
+        config = get_config(self.argv_tp6_google)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'tp6_google'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 25
+        assert test_config['gecko_profile_interval'] == 1
+        assert test_config['gecko_profile_entries'] == 2000000
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == 'ms'
+        assert test_config['lower_is_better'] is True
+        assert test_config['fnbpaint'] is True
+        assert test_config['tpmanifest'] != \
+            '${talos}/tests/quantum_pageload/quantum_pageload_google.manifest'
+
+    def test_tp6_google_heavy_has_expected_attributes(self):
+        config = get_config(self.argv_tp6_google_heavy)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'tp6_google_heavy'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 25
+        assert test_config['gecko_profile_interval'] == 1
+        assert test_config['gecko_profile_entries'] == 2000000
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == 'ms'
+        assert test_config['lower_is_better'] is True
+        assert test_config['fnbpaint'] is True
+        assert test_config['profile'] == 'simple'
+
+    def test_tp6_youtube_has_expected_attributes(self):
+        config = get_config(self.argv_tp6_youtube)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'tp6_youtube'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 25
+        assert test_config['gecko_profile_interval'] == 1
+        assert test_config['gecko_profile_entries'] == 2000000
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == 'ms'
+        assert test_config['lower_is_better'] is True
+        assert test_config['fnbpaint'] is True
+        assert test_config['tpmanifest'] != \
+            '${talos}/tests/quantum_pageload/quantum_pageload_youtube.manifest'
+
+    def test_tp6_youtube_heavy_has_expected_attributes(self):
+        config = get_config(self.argv_tp6_youtube_heavy)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'tp6_youtube_heavy'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 25
+        assert test_config['gecko_profile_interval'] == 1
+        assert test_config['gecko_profile_entries'] == 2000000
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == 'ms'
+        assert test_config['lower_is_better'] is True
+        assert test_config['fnbpaint'] is True
+        assert test_config['profile'] == 'simple'
+
+    def test_tp6_amazon_has_expected_attributes(self):
+        config = get_config(self.argv_tp6_amazon)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'tp6_amazon'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 25
+        assert test_config['gecko_profile_interval'] == 1
+        assert test_config['gecko_profile_entries'] == 2000000
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == 'ms'
+        assert test_config['lower_is_better'] is True
+        assert test_config['fnbpaint'] is True
+        assert test_config['tpmanifest'] != \
+            '${talos}/tests/quantum_pageload/quantum_pageload_amazon.manifest'
+
+    def test_tp6_amazon_heavy_has_expected_attributes(self):
+        config = get_config(self.argv_tp6_amazon_heavy)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'tp6_amazon_heavy'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 25
+        assert test_config['gecko_profile_interval'] == 1
+        assert test_config['gecko_profile_entries'] == 2000000
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == 'ms'
+        assert test_config['lower_is_better'] is True
+        assert test_config['fnbpaint'] is True
+        assert test_config['profile'] == 'simple'
+
+    def test_tp6_facebook_has_expected_attributes(self):
+        config = get_config(self.argv_tp6_facebook)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'tp6_facebook'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 25
+        assert test_config['gecko_profile_interval'] == 1
+        assert test_config['gecko_profile_entries'] == 2000000
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == 'ms'
+        assert test_config['lower_is_better'] is True
+        assert test_config['fnbpaint'] is True
+        assert test_config['tpmanifest'] != \
+            '${talos}/tests/quantum_pageload/quantum_pageload_facebook.manifest'
+
+    def test_tp6_facebook_heavy_has_expected_attributes(self):
+        config = get_config(self.argv_tp6_facebook_heavy)
+        test_config = config['tests'][0]
+
+        assert test_config['name'] == 'tp6_facebook_heavy'
+        assert test_config['tpcycles'] == 1
+        assert test_config['tppagecycles'] == 25
+        assert test_config['gecko_profile_interval'] == 1
+        assert test_config['gecko_profile_entries'] == 2000000
+        assert test_config['filters'] is not None
+        assert test_config['unit'] == 'ms'
+        assert test_config['lower_is_better'] is True
+        assert test_config['fnbpaint'] is True
+        assert test_config['profile'] == 'simple'
+
+
+@mock.patch('talos.config.get_browser_config')
+@mock.patch('talos.config.get_config')
+def test_get_configs(get_config_mock, get_browser_config_mock):
+    # unpacks in right order
+    get_config_mock.return_value = 'first'
+    get_browser_config_mock.return_value = 'second'
+
+    first, second = get_configs()
+    assert (first, second) == ('first', 'second')
+
+
+if __name__ == '__main__':
+    mozunit.main()
new file mode 100644
--- /dev/null
+++ b/testing/talos/talos/unittests/test_ffsetup.py
@@ -0,0 +1,92 @@
+from __future__ import absolute_import
+
+import os
+
+import mock
+import mozunit
+
+from talos.ffsetup import FFSetup
+
+
+class TestFFSetup(object):
+
+    def setup_method(self, method):
+        self.ffsetup = FFSetup(
+            # browser_config
+            {
+                "env": {},
+                "symbols_path": "",
+                "preferences": {},
+                "webserver": "",
+                "extensions": []
+            },
+            # test_config
+            {
+                "preferences": {},
+                "extensions": [],
+                "profile": None
+            }
+        )
+
+        # setup proxy logger
+
+    def test_clean(self):
+        # tmp dir removed
+        assert self.ffsetup._tmp_dir is not None
+        assert os.path.exists(self.ffsetup._tmp_dir) is True
+
+        self.ffsetup.clean()
+
+        assert self.ffsetup._tmp_dir is not None
+        assert os.path.exists(self.ffsetup._tmp_dir) is False
+
+        # gecko profile also cleaned
+        gecko_profile = mock.Mock()
+        self.ffsetup.gecko_profile = gecko_profile
+
+        self.ffsetup.clean()
+
+        assert gecko_profile.clean.called is True
+
+#     def test_as_context_manager(self):
+#         self.ffsetup._init_env = mock.Mock()
+#         self.ffsetup._init_profile = mock.Mock()
+#         self.ffsetup._run_profile = mock.Mock()
+#         self.ffsetup._init_gecko_profile = mock.Mock()
+#
+#         with self.ffsetup as setup:
+#             # env initiated
+#             self.assertIsNotNone(setup.env)
+#             # profile initiated
+#             self.assertTrue(setup._init_profile.called)
+#             # gecko profile initiated
+#
+#         # except raised
+#         pass
+#
+#     def test_environment_init(self):
+#         # self.env not empty
+#         # browser_config env vars in self.env
+#         # multiple calls return same self.env
+#         pass
+#
+#     def test_profile_init(self):
+#         # addons get installed
+#         # webextensions get installed
+#         # preferences contain interpolated values
+#         # profile path is added
+#         pass
+#
+#     def test_run_profile(self):
+#         # exception raised
+#         # browser process launched
+#         pass
+#
+#     def test_gecko_profile_init(self):
+#         # complains on not provided upload_dir
+#         # self.gecko_profile not None
+#         pass
+
+
+if __name__ == '__main__':
+    mozunit.main()
new file mode 100644
new file mode 100644
new file mode 100644
new file mode 100644
new file mode 100644
new file mode 100644
new file mode 100644
new file mode 100644
new file mode 100644
new file mode 100644
new file mode 100644
rename from testing/talos/tests/test_talosconfig_browser_config.json
rename to testing/talos/talos/unittests/test_talosconfig_browser_config.json
rename from testing/talos/tests/test_talosconfig_test_config.json
rename to testing/talos/talos/unittests/test_talosconfig_test_config.json
new file mode 100644
--- /dev/null
+++ b/testing/talos/talos/unittests/test_test.py
@@ -0,0 +1,205 @@
+from __future__ import absolute_import
+
+import mozunit
+import pytest
+
+from talos.test import Test, TsBase, ts_paint
+from talos.test import register_test
+from talos.test import test_dict
+
+
+class BasicTestA(Test):
+    pass
+
+
+class BasicTestB(Test):
+    pass
+
+
+class BasicTestC(Test):
+    """basic description"""
+    keys = [
+        'nonnull_attrib',
+        'null_attrib'
+    ]
+
+    nonnull_attrib = 'value'
+    null_attrib = None
+
+
+class NotATest(object):
+    pass
+
+
+class Test_register_test(object):
+
+    def test_same_instance_returned(self):
+        decorator = register_test()
+        NewBasicTest = decorator(BasicTestA)
+
+        assert BasicTestA is NewBasicTest
+
+    def test_class_registered(self):
+        _TESTS = test_dict()
+        decorator = register_test()
+
+        # class registered
+        decorator(BasicTestB)
+        assert 'BasicTestB' in _TESTS
+        assert BasicTestB in _TESTS.values()
+
+        # cannot register same class
+        with pytest.raises(AssertionError):
+            decorator(BasicTestB)
+
+        # # cannot register other class type
+        with pytest.raises(AssertionError):
+            decorator(NotATest)
+
+
+class TestTest(object):
+
+    def test_same_class_name(self):
+        assert BasicTestA.name() == 'BasicTestA'
+
+    def test_class_doc(self):
+        assert BasicTestA.description() is not None
+        assert BasicTestC.description() == 'basic description'
+
+    def test_init(self):
+        basic_test = BasicTestA(new_attrib_a='value_a', new_attrib_b='value_b')
+        assert basic_test.new_attrib_a == 'value_a'
+        assert basic_test.new_attrib_b == 'value_b'
+
+    def test_update(self):
+        basic_test = BasicTestA()
+        basic_test.update(new_attrib_a='value_a', new_attrib_b='value_b')
+
+        assert basic_test.new_attrib_a == 'value_a'
+        assert basic_test.new_attrib_b == 'value_b'
+
+        basic_test.update(new_attrib_c='value_c')
+        assert basic_test.new_attrib_c == 'value_c'
+
+    def test_items(self):
+        basic_test = BasicTestC()
+
+        # returns iterable
+        try:
+            iter(basic_test.items())
+        except TypeError:
+            pytest.fail('Test.items() did not return iterator')
+
+        tuple_result = basic_test.items()[0]
+        assert len(tuple_result) == 2
+
+        # returns not nones
+        assert ('nonnull_attrib', 'value') in basic_test.items()
+        assert ('null_attrib', None) not in basic_test.items()
+
+        # not overriden Test instance
+        test_instance = Test()
+        assert test_instance.items() == [('name', 'Test')]
+
+        # overriden Test instance
+        test_instance = Test(unregistered_attr='value')
+        assert ('unregistered_attr', 'value') not in test_instance.items()
+
+        test_instance = Test()
+        test_instance.update(keys=['cycles', 'desktop', 'lower_is_better'])
+        assert dict(test_instance.items()) == {
+            'name': 'Test', 'desktop': True, 'lower_is_better': True}
+
+        test_instance = Test()
+        test_instance.update(new_attrib='some')
+        assert ('new_attrib', 'some') not in test_instance.items()
+
+        test_instance = Test()
+        test_instance.update(keys=['new_attrib'], new_attrib='value')
+        assert dict(test_instance.items()) == {'name': 'Test', 'new_attrib': 'value'}
+
+        test_instance = Test(cycles=20, desktop=False)
+        assert test_instance.cycles == 20
+        assert test_instance.desktop is False
+
+        test_instance = Test()
+        test_instance.update(cycles=20, desktop=False)
+        assert test_instance.cycles == 20
+        assert test_instance.desktop is False
+
+
+class TestTsBase(object):
+    ts_base_registered_keys = {
+        'url',
+        'url_timestamp',
+        'timeout',
+        'cycles',
+        'shutdown',
+        'profile_path',
+        'gecko_profile',
+        'gecko_profile_interval',
+        'gecko_profile_entries',
+        'gecko_profile_startup',
+        'preferences',
+        'xperf_counters',
+        'xperf_providers',
+        'xperf_user_providers',
+        'xperf_stackwalk',
+        'tpmozafterpaint',
+        'fnbpaint',
+        'profile',
+        'firstpaint',
+        'userready',
+        'testeventmap',
+        'base_vs_ref',
+        'extensions',
+        'filters',
+        'setup',
+        'cleanup',
+        'webextensions',
+        'reinstall',
+    }
+
+    def setup_method(self):
+        self.test_instance = TsBase()
+
+    def test_no_unknown_keys_are_somehow_added_alongside_registered_ones(self):
+        assert set(self.test_instance.keys) == self.ts_base_registered_keys
+
+        self.test_instance.update(attribute_one='value', attribute_two='value')
+        assert set(self.test_instance.keys) == self.ts_base_registered_keys
+
+    def test_nonnull_keys_show_up(self):
+        assert dict(self.test_instance.items()) == {
+            'name': 'TsBase',
+            'filters': self.test_instance.filters
+        }
+
+        self.test_instance.update(timeout=500)
+        assert dict(self.test_instance.items()) == {
+            'name': 'TsBase',
+            'filters': self.test_instance.filters,
+            'timeout': 500
+        }
+
+
+class Test_ts_paint(object):
+    def test_test_nonnull_keys_show_up(self):
+        test_instance = ts_paint()
+        keys = {key for key, _ in test_instance.items()}
+        assert keys == {
+            'name',
+            'cycles',
+            'timeout',
+            'gecko_profile_startup',
+            'gecko_profile_entries',
+            'url',
+            'shutdown',
+            'xperf_counters',
+            'filters',
+            'tpmozafterpaint'
+        }
+
+
+if __name__ == '__main__':
+    mozunit.main()
new file mode 100644
new file mode 100644
new file mode 100644
new file mode 100644
rename from testing/talos/tests/xrestop_output.txt
rename to testing/talos/talos/unittests/xrestop_output.txt
deleted file mode 100644
--- a/testing/talos/tests/test_browser_output.py
+++ /dev/null
@@ -1,192 +0,0 @@
-#!/usr/bin/env python
-
-"""
-test talos browser output parsing
-"""
-from __future__ import absolute_import
-
-import os
-import unittest
-
-from talos.results import BrowserLogResults
-from talos.results import PageloaderResults
-from talos.utils import TalosError
-
-here = os.path.dirname(os.path.abspath(__file__))
-
-
-class TestBrowserOutput(unittest.TestCase):
-
-    def test_ts_format(self):
-
-        # output file
-        browser_ts = os.path.join(here, 'browser_output.ts.txt')
-
-        # parse the results
-        browser_log = BrowserLogResults(browser_ts)
-
-        # ensure the results meet what we expect
-        self.assertEqual(browser_log.format, 'tsformat')
-        self.assertEqual(browser_log.browser_results.strip(), '392')
-        self.assertEqual(browser_log.startTime, 1333663595953)
-        self.assertEqual(browser_log.endTime, 1333663596551)
-
-    def test_tsvg_format(self):
-
-        # output file
-        browser_tsvg = os.path.join(here, 'browser_output.tsvg.txt')
-
-        # parse the results
-        browser_log = BrowserLogResults(browser_tsvg)
-
-        # ensure the results meet what we expect
-        self.assertEqual(browser_log.format, 'tpformat')
-        self.assertEqual(browser_log.startTime, 1333666702130)
-        self.assertEqual(browser_log.endTime, 1333666702743)
-
-        # we won't validate the exact string because it is long
-        raw_report = browser_log.browser_results.strip()
-        raw_report.startswith('_x_x_mozilla_page_load')
-        raw_report.endswith('|11;hixie-007.xml;1629;1651;1648;1652;1649')
-
-        # but we will ensure that it is parseable
-        pageloader_results = PageloaderResults(raw_report)
-        self.assertEqual(len(pageloader_results.results), 12)
-        indices = [i['index'] for i in pageloader_results.results]
-        self.assertEqual(indices, range(12))
-
-        # test hixie-001.xml just as a spot-check
-        hixie_001 = pageloader_results.results[5]
-        expected_values = [45643, 14976, 17807, 14971, 17235]
-        self.assertEqual(hixie_001['runs'], expected_values)
-        self.assertEqual(hixie_001['page'], 'hixie-001.xml')
-
-    def test_garbage(self):
-        """
-        send in garbage input and ensure the output is the
-        inability to find the report
-        """
-
-        garbage = "hjksdfhkhasdfjkhsdfkhdfjklasd"
-        self.compare_error_message(garbage, "Could not find report")
-
-    def test_missing_end_report(self):
-        """what if you're not done with a report?"""
-        garbage = "hjksdfhkhasdfjkhsdfkhdfjklasd"
-
-        input = self.start_report()
-        input += garbage
-        self.compare_error_message(input, "Could not find end token: '__end_report'")
-
-    def test_double_end_report(self):
-        """double end report tokens"""
-
-        garbage = "hjksdfhkhasdfjkhsdfkhdfjklasd"
-        input = self.start_report() + garbage + self.end_report() + self.end_report()
-        self.compare_error_message(input, "Unmatched number of tokens")
-
-    def test_end_report_before_start_report(self):
-        """the end report token occurs before the start report token"""
-
-        garbage = "hjksdfhkhasdfjkhsdfkhdfjklasd"
-        input = self.end_report() + garbage + self.start_report()
-        self.compare_error_message(input,
-                                   "End token '%s' occurs before start token" %
-                                   self.end_report())
-
-    def test_missing_timestamps(self):
-        """what if the timestamps are missing?"""
-
-        # make a bogus report but missing the timestamps
-        garbage = "hjksdfhkhasdfjkhsdfkhdfjklasd"
-        input = self.start_report() + garbage + self.end_report()
-
-        # it will fail
-        self.compare_error_message(input, "Could not find startTime in browser output")
-
-    def test_wrong_order(self):
-        """what happens if you mix up the token order?"""
-
-        # i've secretly put the AfterTerminationTimestamp before
-        # the BeforeLaunchTimestamp
-        # Let's see if the parser notices
-        bad_report = """__start_report392__end_report
-
-Failed to load native module at path '/home/jhammel/firefox/components/libmozgnome.so':
-(80004005) libnotify.so.1: cannot open shared object file: No such file or directory
-Could not read chrome manifest
-'file:///home/jhammel/firefox/extensions/%7B972ce4c6-7e08-4474-a285-3208198ce6fd%7D/chrome.manifest'.
-[JavaScript Warning: "Use of enablePrivilege is deprecated.
-Please use code that runs with the system principal (e.g. an extension) instead.
-" {file: "http://localhost:15707/startup_test/startup_test.html?begin=1333663595557" line: 0}]
-__startTimestamp1333663595953__endTimestamp
-__startAfterTerminationTimestamp1333663596551__endAfterTerminationTimestamp
-__startBeforeLaunchTimestamp1333663595557__endBeforeLaunchTimestamp
-"""
-
-        self.compare_error_message(bad_report, "] found before " +
-                                   "('__startBeforeLaunchTimestamp', " +
-                                   "'__endBeforeLaunchTimestamp') " +
-                                   "[character position:")
-
-    def test_multiple_reports(self):
-        """you're only allowed to have one report in a file"""
-
-        # this one works fine
-        good_report = """__start_report392__end_report
-
-Failed to load native module at path '/home/jhammel/firefox/components/libmozgnome.so':
-(80004005) libnotify.so.1: cannot open shared object file: No such file or directory
-Could not read chrome manifest
-'file:///home/jhammel/firefox/extensions/%7B972ce4c6-7e08-4474-a285-3208198ce6fd%7D/chrome.manifest'.
-[JavaScript Warning: "Use of enablePrivilege is deprecated.
-Please use code that runs with the system principal (e.g. an extension) instead.
-" {file: "http://localhost:15707/startup_test/startup_test.html?begin=1333663595557" line: 0}]
-__startTimestamp1333663595953__endTimestamp
-__startBeforeLaunchTimestamp1333663595557__endBeforeLaunchTimestamp
-__startAfterTerminationTimestamp1333663596551__endAfterTerminationTimestamp
-"""
-
-        # but there's no hope for this one
-        bad_report = good_report + good_report  # interesting math
-
-        self.compare_error_message(bad_report, "Multiple matches for %s,%s" %
-                                   (self.start_report(), self.end_report()))
-
-    def start_report(self):
-        """return a start report token"""
-        return BrowserLogResults.report_tokens[0][1][0]  # start token
-
-    def end_report(self):
-        """return a start report token"""
-        return BrowserLogResults.report_tokens[0][1][-1]  # end token
-
-    def compare_error_message(self, browser_log, substr):
-        """
-        ensures that exceptions give correct error messages
-        - browser_log : a browser log file
-        - substr : substring of the error message
-        """
-
-        try:
-            BrowserLogResults(results_raw=browser_log)
-        except TalosError as e:
-            if substr not in str(e):
-                import pdb
-                pdb.set_trace()
-            self.assertTrue(substr in str(e))
-
-
-class TestTalosError(unittest.TestCase):
-    """
-    test TalosError class
-    """
-    def test_browser_log_results(self):
-        # an example that should fail
-        # passing invalid value for argument result_raw
-        with self.assertRaises(TalosError):
-            BrowserLogResults(results_raw="__FAIL<bad test>__FAIL")
-
-
-if __name__ == '__main__':
-    unittest.main()
deleted file mode 100755
--- a/testing/talos/tests/test_filter.py
+++ /dev/null
@@ -1,81 +0,0 @@
-#!/usr/bin/env python
-
-"""
-test talos' filter module:
-
-http://hg.mozilla.org/build/talos/file/tip/talos/filter.py
-"""
-from __future__ import absolute_import
-
-import unittest
-
-import talos.filter
-
-
-class TestFilter(unittest.TestCase):
-
-    data = range(30)  # test data
-
-    def test_ignore(self):
-        """test the ignore filter"""
-        # a bit of a stub sanity test for a single filter
-
-        filtered = talos.filter.ignore_first(self.data)
-        self.assertEquals(filtered, self.data[1:])
-
-        filtered = talos.filter.ignore_first(self.data, 10)
-        self.assertEquals(filtered, self.data[10:])
-
-        # short series won't be filtered
-        filtered = talos.filter.ignore_first(self.data, 50)
-        self.assertEquals(filtered, self.data)
-
-    def test_getting_filters(self):
-        """test getting a list of filters from a string"""
-
-        filter_names = ['ignore_max', 'ignore_max', 'max']
-
-        # get the filter functions
-        filters = talos.filter.filters(*filter_names)
-        self.assertEquals(len(filter_names), len(filters))
-        for filter in filters:
-            self.assertTrue(self, hasattr(filter, '__call__'))
-
-        # apply them on the data
-        filtered = talos.filter.apply(self.data, filters)
-        self.assertEquals(filtered, 27)
-
-    def test_parse(self):
-        """test the filter name parse function"""
-
-        # an example with no arguments
-        parsed = talos.filter.parse('mean')
-        self.assertEquals(parsed, ['mean', []])
-
-        # an example with one integer argument
-        parsed = talos.filter.parse('ignore_first:10')
-        self.assertEquals(parsed, ['ignore_first', [10]])
-        self.assertEquals(type(parsed[1][0]), int)
-        self.assertNotEqual(type(parsed[1][0]), float)
-
-        # an example with several arguments
-
-        # temporarily add foo
-        # value is lambda function to mimic filter_dict key:value pair
-        talos.filter.scalar_filters['foo'] = lambda *args: args
-        parsed = talos.filter.parse('foo:10.1,2,5.0,6.')
-        self.assertEquals(parsed, ['foo', [10.1, 2, 5.0, 6.0]])
-        for index in (2, 3):
-            self.assertEquals(type(parsed[1][index]), float)
-            self.assertNotEqual(type(parsed[1][index]), int)
-
-        # an example that should fail
-        self.assertRaises(ValueError, talos.filter.parse, 'foo:bar')
-        self.assertRaises(ValueError, talos.filter.parse, 'foo:1,')
-
-        # delete foo again
-        del talos.filter.scalar_filters['foo']
-
-
-if __name__ == '__main__':
-    unittest.main()
deleted file mode 100644
--- a/testing/talos/tests/test_heavy.py
+++ /dev/null
@@ -1,163 +0,0 @@
-#!/usr/bin/env python
-
-"""
-test talos' heavy module:
-
-http://hg.mozilla.org/build/talos/file/tip/talos/heavy.py
-"""
-from __future__ import absolute_import
-import unittest
-import tempfile
-import shutil
-import datetime
-import contextlib
-import os
-import time
-
-import talos.heavy
-
-
-archive = os.path.join(os.path.dirname(__file__), 'profile.tgz')
-archive_size = os.stat(archive).st_size
-
-
-@contextlib.contextmanager
-def mock_requests(**kw):
-    class Session:
-        def mount(self, *args, **kw):
-            pass
-
-    kw['Session'] = Session
-    old = {}
-    for meth, func in kw.items():
-        curr = getattr(talos.heavy.requests, meth)
-        old[meth] = curr
-        setattr(talos.heavy.requests, meth, func)
-        setattr(Session, meth, func)
-    try:
-        yield
-    finally:
-        for meth, func in old.items():
-            setattr(talos.heavy.requests, meth, func)
-
-
-class _Response(object):
-    def __init__(self, code, headers=None, file=None):
-        if headers is None:
-            headers = {}
-        self.headers = headers
-        self.status_code = code
-        self.file = file
-
-    def raise_for_status(self):
-        pass
-
-    def iter_content(self, chunk_size):
-        with open(self.file, 'rb') as f:
-            yield f.read(chunk_size)
-
-
-class Logger:
-    def __init__(self):
-        self.data = []
-
-    def info(self, msg):
-        self.data.append(msg)
-
-
-class TestFilter(unittest.TestCase):
-
-    def setUp(self):
-        self.temp = tempfile.mkdtemp()
-        self.logs = talos.heavy.LOG.logger = Logger()
-
-    def tearDown(self):
-        shutil.rmtree(self.temp)
-
-    def test_profile_age(self):
-        """test profile_age function"""
-        days = talos.heavy.profile_age(self.temp)
-        self.assertEqual(days, 0)
-
-        _8_days = datetime.datetime.now() + datetime.timedelta(days=8)
-        days = talos.heavy.profile_age(self.temp, _8_days)
-        self.assertEqual(days, 8)
-
-    def test_directory_age(self):
-        """make sure it detects changes in files in subdirs"""
-        with open(os.path.join(self.temp, 'file'), 'w') as f:
-            f.write('xxx')
-
-        current_age = talos.heavy._recursive_mtime(self.temp)
-        time.sleep(1.1)
-
-        with open(os.path.join(self.temp, 'file'), 'w') as f:
-            f.write('----')
-
-        self.assertTrue(current_age < talos.heavy._recursive_mtime(self.temp))
-
-    def test_follow_redirect(self):
-        """test follow_redirect function"""
-        _8_days = datetime.datetime.now() + datetime.timedelta(days=8)
-        _8_days = _8_days.strftime('%a, %d %b %Y %H:%M:%S UTC')
-
-        resps = [_Response(303, {'Location': 'blah'}),
-                 _Response(303, {'Location': 'bli'}),
-                 _Response(200, {'Last-Modified': _8_days})]
-
-        class Counter:
-            c = 0
-
-        def _head(url, curr=Counter()):
-            curr.c += 1
-            return resps[curr.c]
-
-        with mock_requests(head=_head):
-            loc, lm = talos.heavy.follow_redirects('https://example.com')
-            days = talos.heavy.profile_age(self.temp, lm)
-            self.assertEqual(days, 8)
-
-    def _test_download(self, age):
-
-        def _days(num):
-            d = datetime.datetime.now() + datetime.timedelta(days=num)
-            return d.strftime('%a, %d %b %Y %H:%M:%S UTC')
-
-        resps = [_Response(303, {'Location': 'blah'}),
-                 _Response(303, {'Location': 'bli'}),
-                 _Response(200, {'Last-Modified': _days(age)})]
-
-        class Counter:
-            c = 0
-
-        def _head(url, curr=Counter()):
-            curr.c += 1
-            return resps[curr.c]
-
-        def _get(url, *args, **kw):
-            return _Response(200, {'Last-Modified': _days(age),
-                                   'content-length': str(archive_size)},
-                             file=archive)
-
-        with mock_requests(head=_head, get=_get):
-            target = talos.heavy.download_profile('simple',
-                                                  profiles_dir=self.temp)
-            profile = os.path.join(self.temp, 'simple')
-            self.assertTrue(os.path.exists(profile))
-            return target
-
-    def test_download_profile(self):
-        """test downloading heavy profile"""
-        # a 12 days old profile gets updated
-        self._test_download(12)
-
-        # a 8 days two
-        self._test_download(8)
-
-        # a 2 days sticks
-        self._test_download(2)
-        self.assertTrue("fresh enough" in self.logs.data[-2])
-
-
-if __name__ == '__main__':
-    unittest.main()
deleted file mode 100755
--- a/testing/talos/tests/test_results.py
+++ /dev/null
@@ -1,82 +0,0 @@
-#!/usr/bin/env python
-
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-"""
-test talos results parsing
-
-http://hg.mozilla.org/build/talos/file/tip/talos/results.py
-"""
-from __future__ import absolute_import
-
-import unittest
-
-import talos.filter
-import talos.results
-
-# example page load test results string
-results_string = """_x_x_mozilla_page_load
-_x_x_mozilla_page_load_details
-|i|pagename|runs|
-|0;gearflowers.svg;74;65;68;66;62
-|1;composite-scale.svg;43;44;35;41;41
-|2;composite-scale-opacity.svg;19;16;19;19;21
-|3;composite-scale-rotate.svg;18;19;20;20;19
-|4;composite-scale-rotate-opacity.svg;18;17;17;17;19
-|5;hixie-001.xml;71836;15057;15063;57436;15061
-|6;hixie-002.xml;53940;15057;38246;55323;31818
-|7;hixie-003.xml;5027;5026;13540;31503;5031
-|8;hixie-004.xml;5050;5054;5053;5054;5055
-|9;hixie-005.xml;4568;4569;4562;4545;4567
-|10;hixie-006.xml;5090;5165;5054;5015;5077
-|11;hixie-007.xml;1628;1623;1623;1617;1622
-"""
-
-
-class TestPageloaderResults(unittest.TestCase):
-
-    def test_parsing(self):
-        """test our ability to parse results data"""
-        results = talos.results.PageloaderResults(results_string)
-
-        # ensure you got all of them
-        self.assertEqual(len(results.results), 12)
-
-        # test the indices
-        indices = [i['index'] for i in results.results]
-        self.assertEqual(indices, range(12))
-
-        # test some pages
-        pages = [i['page'] for i in results.results]
-        comparison = ['hixie-00%d.xml' % i for i in range(1, 8)]
-        self.assertEqual(pages[-len(comparison):], comparison)
-
-        # test a few values
-        last = [1628., 1623., 1623, 1617., 1622.]
-        self.assertEqual(results.results[-1]['runs'], last)
-        first = [74., 65., 68., 66., 62.]
-        self.assertEqual(results.results[0]['runs'], first)
-
-    def test_filter(self):
-        """test PageloaderResults.filter function"""
-
-        # parse the data
-        results = talos.results.PageloaderResults(results_string)
-
-        # apply some filters
-        filters = [[talos.filter.ignore_first, [2]], [talos.filter.median]]
-        filtered = results.filter(*filters)
-        self.assertEqual(filtered[0][0], 66.)
-        self.assertEqual(filtered[-1][0], 1622.)
-
-        # apply some different filters
-        filters = [[talos.filter.ignore_max, []], [max, []]]
-        filtered = results.filter(*filters)
-        self.assertEqual(filtered[0][0], 68.)
-        self.assertEqual(filtered[-1][0], 1623.)
-
-
-if __name__ == '__main__':
-    unittest.main()
deleted file mode 100644
--- a/testing/talos/tests/test_talosconfig.py
+++ /dev/null
@@ -1,158 +0,0 @@
-from __future__ import absolute_import, print_function
-
-import json
-import unittest
-
-from talos import talosconfig
-from talos.configuration import YAML
-
-# globals
-ffox_path = 'test/path/to/firefox'
-command_args = [ffox_path,
-                '-profile',
-                'pathtoprofile',
-                '-tp',
-                'pathtotpmanifest',
-                '-tpchrome',
-                '-tpmozafterpaint',
-                '-tpnoisy',
-                '-rss',
-                '-tpcycles',
-                '1',
-                '-tppagecycles',
-                '1']
-with open("test_talosconfig_browser_config.json") as json_browser_config:
-    browser_config = json.load(json_browser_config)
-with open("test_talosconfig_test_config.json") as json_test_config:
-    test_config = json.load(json_test_config)
-
-
-class TestWriteConfig(unittest.TestCase):
-    def test_writeConfigFile(self):
-        obj = dict(some=123, thing='456', other=789)
-
-        self.assertEquals(
-            json.loads(talosconfig.writeConfigFile(obj, ('some', 'thing'))),
-            dict(some=123, thing='456')
-        )
-
-        # test without keys
-        self.assertEquals(
-            json.loads(talosconfig.writeConfigFile(obj, None)),
-            obj
-        )
-
-
-class TalosConfigUnitTest(unittest.TestCase):
-    """
-    A class inheriting from unittest.TestCase to test the generateTalosConfig function.
-    """
-
-    def validate(self, var1, var2):
-        # Function to check whether the output generated is correct or not.
-        # If the output generated is not correct then specify the expected output to be generated.
-        if var1 == var2:
-            return 1
-        else:
-            print("input '%s' != expected '%s'" % (var1, var2))
-
-    def test_talosconfig(self):
-        # This function stimulates a call to generateTalosConfig in talosconfig.py .
-        # It is then tested whether the output generated is correct or not.
-        # ensure that the output generated in yaml file is as expected or not.
-        yaml = YAML()
-        content = yaml.read(browser_config['bcontroller_config'])
-        self.validate(content['command'],
-                      "test/path/to/firefox " +
-                      "-profile " +
-                      "pathtoprofile " +
-                      "-tp " +
-                      "pathtotpmanifest " +
-                      "-tpchrome " +
-                      "-tpmozafterpaint " +
-                      "-tpnoisy " +
-                      "-rss " +
-                      "-tpcycles " +
-                      "1 " +
-                      "-tppagecycles " +
-                      "1")
-        self.validate(content['child_process'], "plugin-container")
-        self.validate(content['process'], "firefox.exe")
-        self.validate(content['browser_wait'], 5)
-        self.validate(content['test_timeout'], 1200)
-        self.validate(content['browser_log'], "browser_output.txt")
-        self.validate(content['browser_path'], "test/path/to/firefox")
-        self.validate(content['error_filename'], "pathtoerrorfile")
-        self.validate(content['xperf_path'],
-                      "C:/Program Files/Microsoft Windows Performance Toolkit/xperf.exe")
-        self.validate(content['buildid'], 20131205075310)
-        self.validate(content['sourcestamp'], "39faf812aaec")
-        self.validate(content['repository'], "http://hg.mozilla.org/releases/mozilla-release")
-        self.validate(content['title'], "qm-pxp01")
-        self.validate(content['testname'], "tp5n")
-        self.validate(content['xperf_providers'], ['PROC_THREAD',
-                                                   'LOADER',
-                                                   'HARD_FAULTS',
-                                                   'FILENAME',
-                                                   'FILE_IO',
-                                                   'FILE_IO_INIT'])
-        self.validate(content['xperf_user_providers'],
-                      ['Mozilla Generic Provider', 'Microsoft-Windows-TCPIP'])
-        self.validate(content['xperf_stackwalk'],
-                      ['FileCreate', 'FileRead', 'FileWrite', 'FileFlush', 'FileClose'])
-        self.validate(content['processID'], "None")
-        self.validate(content['approot'], "test/path/to")
-
-    def test_errors(self):
-        # Tests if errors are correctly raised.
-
-        # Testing that error is correctly raised or not if xperf_path is missing
-        browser_config_copy = browser_config.copy()
-        test_config_copy = test_config.copy()
-        del browser_config_copy['xperf_path']
-        talosconfig.generateTalosConfig(command_args, browser_config_copy, test_config_copy)
-        yaml = YAML()
-        content = yaml.read(browser_config['bcontroller_config'])
-
-        with self.assertRaises(KeyError):
-            self.validate(content['xperf_path'],
-                          "C:/Program Files/Microsoft Windows Performance Toolkit/xperf.exe")
-
-        # Test to see if keyerror is raised or not for calling testname when xperf_path is missing
-        with self.assertRaises(KeyError):
-            self.validate(content['testname'], "tp5n")
-
-        # Testing that error is correctly raised or not if xperf_providers is missing
-        browser_config_copy = browser_config.copy()
-        test_config_copy = test_config.copy()
-        del test_config_copy['xperf_providers']
-        talosconfig.generateTalosConfig(command_args, browser_config_copy, test_config_copy)
-        yaml = YAML()
-        content = yaml.read(browser_config['bcontroller_config'])
-
-        # Checking keyerror when calling xperf_providers
-        with self.assertRaises(KeyError):
-            self.validate(content['xperf_providers'], ['PROC_THREAD', 'LOADER', 'HARD_FAULTS',
-                                                       'FILENAME', 'FILE_IO', 'FILE_IO_INIT'])
-
-        # Checking keyerror when calling xperf_user_providers when xperf_providers is missing
-        with self.assertRaises(KeyError):
-            self.validate(content['xperf_user_providers'],
-                          ['Mozilla Generic Provider', 'Microsoft-Windows-TCPIP'])
-
-        # Checking keyerror when calling xperf_stackwalk when xperf_providers is missing
-        with self.assertRaises(KeyError):
-            self.validate(content['xperf_stackwalk'],
-                          ['FileCreate', 'FileRead', 'FileWrite', 'FileFlush', 'FileClose'])
-
-        # Checking keyerror when calling processID when xperf_providers is missing
-        with self.assertRaises(KeyError):
-            self.validate(content['processID'], "None")
-
-        # Checking keyerror when calling approot when xperf_providers is missing
-        with self.assertRaises(KeyError):
-            self.validate(content['approot'], "test/path/to")
-
-
-if __name__ == '__main__':
-    unittest.main()
deleted file mode 100755
--- a/testing/talos/tests/test_urlsplit.py
+++ /dev/null
@@ -1,56 +0,0 @@
-#!/usr/bin/env python
-
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-"""
-test URL parsing; see
-https://bugzilla.mozilla.org/show_bug.cgi?id=793875
-"""
-from __future__ import absolute_import
-
-import unittest
-
-import talos.utils
-
-
-class TestURLParsing(unittest.TestCase):
-
-    def test_http_url(self):
-        """test parsing an HTTP URL"""
-
-        url = 'https://www.mozilla.org/en-US/about/'
-        parsed = talos.utils.urlsplit(url)
-        self.assertEqual(parsed,
-                         ['https', 'www.mozilla.org', '/en-US/about/', '', ''])
-
-    def test_file_url(self):
-        """test parsing file:// URLs"""
-
-        # unix-like file path
-        url = 'file:///foo/bar'
-        parsed = talos.utils.urlsplit(url)
-        self.assertEqual(parsed,
-                         ['file', '', '/foo/bar', '', ''])
-
-        # windows-like file path
-        url = r'file://c:\foo\bar'
-        parsed = talos.utils.urlsplit(url)
-        self.assertEqual(parsed,
-                         ['file', '', r'c:\foo\bar', '', ''])
-
-    def test_implicit_file_url(self):
-        """
-        test parsing URLs with no scheme, which by default are assumed
-        to be file:// URLs
-        """
-
-        path = '/foo/bar'
-        parsed = talos.utils.urlsplit(path)
-        self.assertEqual(parsed,
-                         ['file', '', '/foo/bar', '', ''])
-
-
-if __name__ == '__main__':
-    unittest.main()
deleted file mode 100644
--- a/testing/talos/tests/test_utils.py
+++ /dev/null
@@ -1,43 +0,0 @@
-from __future__ import absolute_import
-
-import os
-import unittest
-
-from talos import utils
-
-
-class TestTimer(unittest.TestCase):
-    def test_timer(self):
-        timer = utils.Timer()
-        timer._start_time -= 3  # remove three seconds for the test
-        self.assertEquals(timer.elapsed(), '00:00:03')
-
-
-class TestRestoreEnv(unittest.TestCase):
-    def test_basic(self):
-        env_var = 'THIS_IS_A_ENV_VAR_NOT_USED'
-        self.assertNotIn(env_var, os.environ)
-        with utils.restore_environment_vars():
-            os.environ[env_var] = '1'
-        self.assertNotIn(env_var, os.environ)
-
-
-class TestInterpolate(unittest.TestCase):
-    def test_interpolate_talos_is_always_defines(self):
-        self.assertEquals(utils.interpolate('${talos}'), utils.here)
-
-    def test_interpolate_custom_placeholders(self):
-        self.assertEquals(utils.interpolate('${talos} ${foo} abc', foo='bar', unused=1),
-                          utils.here + ' bar abc')
-
-
-class TestParsePref(unittest.TestCase):
-    def test_parse_string(self):
-        self.assertEquals(utils.parse_pref('abc'), 'abc')
-
-    def test_parse_int(self):
-        self.assertEquals(utils.parse_pref('12'), 12)
-
-    def test_parse_bool(self):
-        self.assertEquals(utils.parse_pref('true'), True)
-        self.assertEquals(utils.parse_pref('false'), False)
deleted file mode 100755
--- a/testing/talos/tests/test_xrestop.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env python
-
-"""
-Tests for talos.xrestop
-"""
-from __future__ import absolute_import
-
-import os
-import subprocess
-import unittest
-
-from talos.cmanager_linux import xrestop
-
-here = os.path.dirname(os.path.abspath(__file__))
-xrestop_output = os.path.join(here, 'xrestop_output.txt')
-
-
-class TestXrestop(unittest.TestCase):
-
-    def test_parsing(self):
-        """test parsing xrestop output from xrestop_output.txt"""
-
-        class MockPopen(object):
-            """
-            stub class for subprocess.Popen
-            We mock this to return a local static copy of xrestop output
-            This has the unfortunate nature of depending on implementation
-            details.
-            """
-            def __init__(self, *args, **kwargs):
-                self.returncode = 0
-
-            def communicate(self):
-                stdout = open(xrestop_output).read()
-                return stdout, ''
-
-        # monkey-patch subprocess.Popen
-        Popen = subprocess.Popen
-        subprocess.Popen = MockPopen
-
-        # get the output
-        output = xrestop()
-
-        # ensure that the parsed output is equal to what is in
-        # xrestop_output.txt
-        self.assertEqual(len(output), 7)  # seven windows with PIDs
-
-        # the first window is Thunderbird
-        pid = 2035  # thundrbird's pid
-        self.assertTrue(pid in output)
-        thunderbird = output[pid]
-        self.assertEqual(thunderbird['index'], 0)
-        self.assertEqual(thunderbird['total bytes'], '~4728761')
-
-        # PID=1668 is a Terminal
-        pid = 1668
-        self.assertTrue(pid in output)
-        terminal = output[pid]
-        self.assertEqual(terminal['pixmap bytes'], '1943716')
-
-        # cleanup: set subprocess.Popen back
-        subprocess.Popen = Popen
-
-
-if __name__ == '__main__':
-    unittest.main()