Bug 1454466 - Move raptor in-tree; r=ahal
authorRob Wood <rwood@mozilla.com>
Mon, 16 Apr 2018 17:15:38 -0400
changeset 470351 c75acc48f536809b2c1ac58a857742cd1b107eb8
parent 470350 97a6ec0981d06780fcf8b1c8583d8850aaf2ce96
child 470352 1299c9a06811537495e7402d9e9cd4de1fe0eb57
push id1728
push userjlund@mozilla.com
push dateMon, 18 Jun 2018 21:12:27 +0000
treeherdermozilla-release@c296fde26f5f [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersahal
bugs1454466
milestone61.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1454466 - Move raptor in-tree; r=ahal MozReview-Commit-ID: AkqbROfvmbu
.hgignore
build/virtualenv_packages.txt
taskcluster/ci/source-test/python.yml
testing/raptor/.eslintrc.js
testing/raptor/MANIFEST.in
testing/raptor/raptor/__init__.py
testing/raptor/raptor/cmdline.py
testing/raptor/raptor/control_server.py
testing/raptor/raptor/gen_test_config.py
testing/raptor/raptor/manifest.py
testing/raptor/raptor/outputhandler.py
testing/raptor/raptor/playback/__init__.py
testing/raptor/raptor/playback/base.py
testing/raptor/raptor/playback/mitmproxy.py
testing/raptor/raptor/preferences/firefox.json
testing/raptor/raptor/raptor.ini
testing/raptor/raptor/raptor.py
testing/raptor/raptor/tests/raptor-chrome-tp7.ini
testing/raptor/raptor/tests/raptor-firefox-tp7.ini
testing/raptor/raptor/tests/raptor-speedometer.ini
testing/raptor/requirements.txt
testing/raptor/setup.py
testing/raptor/test/__init__.py
testing/raptor/test/conftest.py
testing/raptor/test/files/fake_binary.exe
testing/raptor/test/python.ini
testing/raptor/test/test_cmdline.py
testing/raptor/test/test_control_server.py
testing/raptor/test/test_playback.py
testing/raptor/test/test_raptor.py
testing/raptor/webext/raptor/benchmark-relay.js
testing/raptor/webext/raptor/manifest.json
testing/raptor/webext/raptor/measure.js
testing/raptor/webext/raptor/runner.js
toolkit/toolkit.mozbuild
tools/lint/flake8.yml
tools/lint/py2.yml
tools/lint/py3.yml
--- a/.hgignore
+++ b/.hgignore
@@ -172,8 +172,14 @@ tps_result\.json
 
 subinclude:servo/.hgignore
 
 # Ignore Infer output
 ^infer-out/
 
 # https://bz.mercurial-scm.org/show_bug.cgi?id=5322
 ^comm/
+
+# Ignore various raptor performance framework files
+^testing/raptor/.raptor-venv
+^testing/raptor/raptor-venv
+^testing/raptor/raptor/tests/.*.json
+^testing/raptor/webext/raptor/auto_gen_test_config.js
--- a/build/virtualenv_packages.txt
+++ b/build/virtualenv_packages.txt
@@ -41,16 +41,17 @@ mozilla.pth:layout/tools/reftest
 mozilla.pth:other-licenses/ply/
 mozilla.pth:taskcluster
 mozilla.pth:testing
 mozilla.pth:testing/firefox-ui/harness
 mozilla.pth:testing/marionette/client
 mozilla.pth:testing/marionette/harness
 mozilla.pth:testing/marionette/harness/marionette_harness/runner/mixins/browsermob-proxy-py
 mozilla.pth:testing/marionette/puppeteer/firefox
+mozilla.pth:testing/raptor
 mozilla.pth:testing/talos
 packages.txt:testing/mozbase/packages.txt
 mozilla.pth:tools
 mozilla.pth:testing/web-platform
 mozilla.pth:testing/web-platform/tests/tools/wptrunner
 mozilla.pth:testing/web-platform/tests/tools/wptserve
 mozilla.pth:testing/web-platform/tests/tools/six
 mozilla.pth:testing/xpcshell
--- a/taskcluster/ci/source-test/python.yml
+++ b/taskcluster/ci/source-test/python.yml
@@ -124,16 +124,26 @@ mozversioncontrol:
     treeherder:
         symbol: py(vcs)
     run:
         mach: python-test --subsuite mozversioncontrol
     when:
         files-changed:
             - 'python/mozversioncontrol/**'
 
+raptor:
+    description: testing/raptor unit tests
+    treeherder:
+        symbol: py(rap)
+    run:
+        mach: python-test --subsuite raptor
+    when:
+        files-changed:
+            - 'testing/raptor/**'
+
 reftest-harness:
     description: layout/tools/reftest unittests
     platform:
         - linux64/opt
         - linux64/debug
         - linux64-asan/opt
     require-build: true
     always-target: false
new file mode 100644
--- /dev/null
+++ b/testing/raptor/.eslintrc.js
@@ -0,0 +1,21 @@
+"use strict";
+
+module.exports = {
+
+  globals: {
+    "browser": [],
+    "chrome": [],
+    "getTestConfig": true,
+    "startMark": [],
+    "endMark": [],
+    "name": "",
+  },
+
+  "plugins": [
+    "mozilla"
+  ],
+
+  "rules": {
+    "mozilla/avoid-Date-timing": "error"
+  }
+};
new file mode 100644
--- /dev/null
+++ b/testing/raptor/MANIFEST.in
@@ -0,0 +1,3 @@
+include raptor/preferences/*.json
+include raptor/tests/*.ini
+include requirements.txt
new file mode 100644
new file mode 100644
--- /dev/null
+++ b/testing/raptor/raptor/cmdline.py
@@ -0,0 +1,39 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import absolute_import, print_function
+
+import argparse
+import os
+
+from mozlog.commandline import add_logging_group
+
+
+def create_parser(mach_interface=False):
+    parser = argparse.ArgumentParser()
+    add_arg = parser.add_argument
+
+    add_arg('-t', '--test', default=None, dest="test",
+            help="name of raptor test to run")
+    add_arg('--app', default='firefox', dest='app',
+            help="name of the application we are testing (default: firefox)",
+            choices=['firefox', 'chrome'])
+    add_arg('-b', '--binary', required=True,
+            help="path to the browser executable that we are testing")
+
+    add_logging_group(parser)
+    return parser
+
+
+def verify_options(parser, args):
+    ctx = vars(args)
+
+    if not os.path.isfile(args.binary):
+        parser.error("{binary} does not exist!".format(**ctx))
+
+
+def parse_args(argv=None):
+    parser = create_parser()
+    args = parser.parse_args(argv)
+    verify_options(parser, args)
+    return args
new file mode 100644
--- /dev/null
+++ b/testing/raptor/raptor/control_server.py
@@ -0,0 +1,94 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# simple local server on port 8000, to demonstrate
+# receiving hero element timing results from a web extension
+from __future__ import absolute_import
+
+import BaseHTTPServer
+import json
+import os
+import threading
+
+from mozlog import get_proxy_logger
+
+LOG = get_proxy_logger(component='control_server')
+
+here = os.path.abspath(os.path.dirname(__file__))
+
+
+class MyHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+
+    def do_GET(self):
+        # get handler, received request for test settings from web ext runner
+        self.send_response(200)
+        validFiles = ['raptor-firefox-tp7.json',
+                      'raptor-chrome-tp7.json',
+                      'raptor-speedometer.json']
+        head, tail = os.path.split(self.path)
+        if tail in validFiles:
+            LOG.info('reading test settings from ' + tail)
+            try:
+                with open(tail) as json_settings:
+                    self.send_header('Access-Control-Allow-Origin', '*')
+                    self.send_header('Content-type', 'application/json')
+                    self.end_headers()
+                    self.wfile.write(json.dumps(json.load(json_settings)))
+                    self.wfile.close()
+                    LOG.info('sent test settings to web ext runner')
+            except Exception as ex:
+                LOG.info('control server exception')
+                LOG.info(ex)
+        else:
+            LOG.info('received request for unknown file: ' + self.path)
+
+    def do_POST(self):
+        # post handler, received something from webext
+        self.send_response(200)
+        self.send_header('Access-Control-Allow-Origin', '*')
+        self.send_header('Content-type', 'text/html')
+        self.end_headers()
+        content_len = int(self.headers.getheader('content-length'))
+        post_body = self.rfile.read(content_len)
+        # could have received a status update or test results
+        data = json.loads(post_body)
+        LOG.info("received " + data['type'] + ": " + str(data['data']))
+
+    def do_OPTIONS(self):
+        self.send_response(200, "ok")
+        self.send_header('Access-Control-Allow-Origin', '*')
+        self.send_header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS')
+        self.send_header("Access-Control-Allow-Headers", "X-Requested-With")
+        self.send_header("Access-Control-Allow-Headers", "Content-Type")
+        self.end_headers()
+
+
+class RaptorControlServer():
+    """Container class for Raptor Control Server"""
+
+    def __init__(self):
+        self.raptor_venv = os.path.join(os.getcwd(), 'raptor-venv')
+        self.server = None
+        self._server_thread = None
+
+    def start(self):
+        config_dir = os.path.join(here, 'tests')
+        os.chdir(config_dir)
+        server_address = ('', 8000)
+
+        server_class = BaseHTTPServer.HTTPServer
+        handler_class = MyHandler
+
+        httpd = server_class(server_address, handler_class)
+
+        self._server_thread = threading.Thread(target=httpd.serve_forever)
+        self._server_thread.setDaemon(True)  # don't hang on exit
+        self._server_thread.start()
+        LOG.info("raptor control server running on port 8000...")
+        self.server = httpd
+
+    def stop(self):
+        LOG.info("shutting down control server")
+        self.server.shutdown()
+        self._server_thread.join()
new file mode 100644
--- /dev/null
+++ b/testing/raptor/raptor/gen_test_config.py
@@ -0,0 +1,32 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import absolute_import
+
+import os
+
+from mozlog import get_proxy_logger
+
+
+here = os.path.abspath(os.path.dirname(__file__))
+webext_dir = os.path.join(os.path.dirname(here), 'webext', 'raptor')
+LOG = get_proxy_logger(component="gen_test_url")
+
+
+def gen_test_config(browser, test):
+    LOG.info("writing test settings url background js, so webext can get it")
+
+    data = """// this file is auto-generated by raptor, do not edit directly
+function getTestConfig() {
+    return {"browser": "%s", "test_settings_url": "http://localhost:8000/%s.json"};
+}
+
+""" % (browser, test)
+
+    webext_background_script = (os.path.join(webext_dir, "auto_gen_test_config.js"))
+
+    file = open(webext_background_script, "w")
+    file.write(data)
+    file.close()
+
+    LOG.info("finished writing test config into webext")
new file mode 100644
--- /dev/null
+++ b/testing/raptor/raptor/manifest.py
@@ -0,0 +1,87 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import absolute_import
+
+import json
+import os
+
+from manifestparser import TestManifest
+from mozlog import get_proxy_logger
+
+here = os.path.abspath(os.path.dirname(__file__))
+raptor_ini = os.path.join(here, 'raptor.ini')
+tests_dir = os.path.join(here, 'tests')
+LOG = get_proxy_logger(component="manifest")
+
+
+def filter_app(tests, values):
+    for test in tests:
+        if values["app"] in test['apps']:
+            yield test
+
+
+def get_browser_test_list(browser_app):
+    LOG.info(raptor_ini)
+    test_manifest = TestManifest([raptor_ini], strict=False)
+    info = {"app": browser_app}
+    return test_manifest.active_tests(exists=False,
+                                      disabled=False,
+                                      filters=[filter_app],
+                                      **info)
+
+
+def write_test_settings_json(test_details):
+    # write test settings json file with test details that the control
+    # server will provide for the web ext
+    test_settings = {
+        "raptor-options": {
+            "type": test_details['type'],
+            "test_url": test_details['test_url'],
+            "page_cycles": int(test_details['page_cycles'])
+        }
+    }
+
+    if test_details['type'] == "pageload":
+        test_settings['raptor-options']['measure'] = {}
+        if "fnbpaint" in test_details['measure']:
+            test_settings['raptor-options']['measure']['fnbpaint'] = True
+        if "fcp" in test_details['measure']:
+            test_settings['raptor-options']['measure']['fcp'] = True
+        if "hero" in test_details['measure']:
+            test_settings['raptor-options']['measure']['hero'] = test_details['hero'].split()
+    if test_details.get("page_timeout", None) is not None:
+        test_settings['raptor-options']['page_timeout'] = int(test_details['page_timeout'])
+
+    settings_file = os.path.join(tests_dir, test_details['name'] + '.json')
+    try:
+        with open(settings_file, 'w') as out_file:
+            json.dump(test_settings, out_file, indent=4, ensure_ascii=False)
+            out_file.close()
+    except IOError:
+        LOG.info("abort: exception writing test settings json!")
+
+
+def get_raptor_test_list(args):
+    # get a list of available raptor tests, for the browser we're testing on
+    available_tests = get_browser_test_list(args.app)
+    tests_to_run = []
+
+    # if test name not provided on command line, run all available raptor tests for this browser;
+    # if test name provided on command line, make sure it exists, and then only include that one
+    if args.test is not None:
+        for next_test in available_tests:
+            if next_test['name'] == args.test:
+                tests_to_run = [next_test]
+                break
+        if len(tests_to_run) == 0:
+            LOG.critical("abort: specified test doesn't exist!")
+    else:
+        tests_to_run = available_tests
+
+    # write out .json test setting files for the control server to read and send to web ext
+    if len(tests_to_run) != 0:
+        for test in tests_to_run:
+            write_test_settings_json(test)
+
+    return tests_to_run
new file mode 100644
--- /dev/null
+++ b/testing/raptor/raptor/outputhandler.py
@@ -0,0 +1,51 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# originally from talos_process.py
+from __future__ import absolute_import
+
+import json
+import time
+from threading import Thread
+
+from mozlog import get_proxy_logger
+
+
+LOG = get_proxy_logger(component='raptor_process')
+
+
+class OutputHandler(object):
+    def __init__(self):
+        self.proc = None
+        self.kill_thread = Thread(target=self.wait_for_quit)
+        self.kill_thread.daemon = True
+
+    def __call__(self, line):
+        if not line.strip():
+            return
+        line = line.decode('utf-8', errors='replace')
+
+        try:
+            data = json.loads(line)
+        except ValueError:
+            if line.find('__raptor_shutdownBrowser') != -1:
+                self.kill_thread.start()
+            self.process_output(line)
+            return
+
+        if isinstance(data, dict) and 'action' in data:
+            LOG.log_raw(data)
+        else:
+            self.process_output(json.dumps(data))
+
+    def process_output(self, line):
+        LOG.process_output(self.proc.pid, line)
+
+    def wait_for_quit(self, timeout=5):
+        """Wait timeout seconds for the process to exit. If it hasn't
+        exited by then, kill it.
+        """
+        time.sleep(timeout)
+        if self.proc.poll() is None:
+            self.proc.kill()
new file mode 100644
--- /dev/null
+++ b/testing/raptor/raptor/playback/__init__.py
@@ -0,0 +1,23 @@
+from __future__ import absolute_import
+
+from mozlog import get_proxy_logger
+from .mitmproxy import Mitmproxy
+
+LOG = get_proxy_logger(component='mitmproxy')
+
+playback_cls = {
+    'mitmproxy': Mitmproxy,
+}
+
+
+def get_playback(config):
+    tool_name = config.get('playback_tool', None)
+    if tool_name is None:
+        LOG.critical("playback_tool name not found in config")
+        return
+    if playback_cls.get(tool_name, None) is None:
+        LOG.critical("specified playback tool is unsupported: %s" % tool_name)
+        return None
+
+    cls = playback_cls.get(tool_name)
+    return cls(config)
new file mode 100644
--- /dev/null
+++ b/testing/raptor/raptor/playback/base.py
@@ -0,0 +1,31 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# abstract class for all playback tools
+from __future__ import absolute_import
+
+from abc import ABCMeta, abstractmethod
+
+
+class Playback(object):
+    __metaclass__ = ABCMeta
+
+    def __init__(self, config):
+        self.config = config
+
+    @abstractmethod
+    def download(self):
+        pass
+
+    @abstractmethod
+    def setup(self):
+        pass
+
+    @abstractmethod
+    def start(self):
+        pass
+
+    @abstractmethod
+    def stop(self):
+        pass
new file mode 100644
--- /dev/null
+++ b/testing/raptor/raptor/playback/mitmproxy.py
@@ -0,0 +1,40 @@
+'''This helps loading mitmproxy's cert and change proxy settings for Firefox.'''
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import absolute_import
+
+import os
+
+from mozlog import get_proxy_logger
+
+from .base import Playback
+
+here = os.path.dirname(os.path.realpath(__file__))
+tooltool_cache = os.path.join(here, 'tooltoolcache')
+
+LOG = get_proxy_logger(component='mitmproxy')
+
+
+class Mitmproxy(Playback):
+
+    def __init__(self, config):
+        self.config = config
+        self.download()
+        self.setup()
+
+    def download(self):
+        LOG.info("todo: download mitmproxy release binary")
+        return
+
+    def setup(self):
+        LOG.info("todo: setup mitmproxy")
+        return
+
+    def start(self):
+        LOG.info("todo: start mitmproxy playback")
+        return
+
+    def stop(self):
+        LOG.info("todo: stop mitmproxy playback")
+        return
new file mode 100644
--- /dev/null
+++ b/testing/raptor/raptor/preferences/firefox.json
@@ -0,0 +1,109 @@
+{
+    "app.normandy.api_url": "https://127.0.0.1/selfsupport-dummy/",
+    "app.update.enabled": false,
+    "browser.EULA.override": true,
+    "browser.aboutHomeSnippets.updateUrl": "https://127.0.0.1/about-dummy/",
+    "browser.addon-watch.interval": -1,
+    "browser.bookmarks.max_backups": 0,
+    "browser.cache.disk.smart_size.enabled": false,
+    "browser.cache.disk.smart_size.first_run": false,
+    "browser.chrome.dynamictoolbar": false,
+    "browser.contentHandlers.types.0.uri": "http://127.0.0.1/rss?url=%s",
+    "browser.contentHandlers.types.1.uri": "http://127.0.0.1/rss?url=%s",
+    "browser.contentHandlers.types.2.uri": "http://127.0.0.1/rss?url=%s",
+    "browser.contentHandlers.types.3.uri": "http://127.0.0.1/rss?url=%s",
+    "browser.contentHandlers.types.4.uri": "http://127.0.0.1/rss?url=%s",
+    "browser.contentHandlers.types.5.uri": "http://127.0.0.1/rss?url=%s",
+    "browser.dom.window.dump.enabled": true,
+    "browser.link.open_newwindow": 2,
+    "browser.newtabpage.activity-stream.default.sites": "",
+    "browser.newtabpage.activity-stream.feeds.section.topstories": false,
+    "browser.newtabpage.activity-stream.feeds.snippets": false,
+    "browser.newtabpage.activity-stream.telemetry": false,
+    "browser.newtabpage.activity-stream.tippyTop.service.endpoint": "",
+    "browser.ping-centre.production.endpoint": "https://127.0.0.1/pingcentre/dummy/",
+    "browser.ping-centre.staging.endpoint": "https://127.0.0.1/pingcentre/dummy/",
+    "browser.reader.detectedFirstArticle": true,
+    "browser.safebrowsing.blockedURIs.enabled": false,
+    "browser.safebrowsing.downloads.enabled": false,
+    "browser.safebrowsing.downloads.remote.url": "http://127.0.0.1/safebrowsing-dummy/downloads",
+    "browser.safebrowsing.malware.enabled": false,
+    "browser.safebrowsing.passwords.enabled": false,
+    "browser.safebrowsing.phishing.enabled": false,
+    "browser.safebrowsing.provider.google.gethashURL": "http://127.0.0.1/safebrowsing-dummy/gethash",
+    "browser.safebrowsing.provider.google.updateURL": "http://127.0.0.1/safebrowsing-dummy/update",
+    "browser.safebrowsing.provider.google4.gethashURL": "http://127.0.0.1/safebrowsing4-dummy/gethash",
+    "browser.safebrowsing.provider.google4.updateURL": "http://127.0.0.1/safebrowsing4-dummy/update",
+    "browser.safebrowsing.provider.mozilla.gethashURL": "http://127.0.0.1/safebrowsing-dummy/gethash",
+    "browser.safebrowsing.provider.mozilla.updateURL": "http://127.0.0.1/safebrowsing-dummy/update",
+    "browser.search.countryCode": "US",
+    "browser.search.geoSpecificDefaults": false,
+    "browser.search.geoip.url": "",
+    "browser.search.isUS": true,
+    "browser.shell.checkDefaultBrowser": false,
+    "browser.snippets.enabled": false,
+    "browser.snippets.syncPromo.enabled": false,
+    "browser.tabs.remote.autostart": false,
+    "browser.urlbar.userMadeSearchSuggestionsChoice": true,
+    "browser.warnOnQuit": false,
+    "browser.webapps.checkForUpdates": 0,
+    "datareporting.healthreport.documentServerURI": "http://127.0.0.1/healthreport/",
+    "datareporting.policy.dataSubmissionPolicyBypassNotification": true,
+    "devtools.chrome.enabled": false,
+    "devtools.debugger.remote-enabled": false,
+    "devtools.theme": "light",
+    "devtools.timeline.enabled": false,
+    "dom.allow_scripts_to_close_windows": true,
+    "dom.disable_open_during_load": false,
+    "dom.disable_window_flip": true,
+    "dom.disable_window_move_resize": true,
+    "dom.max_chrome_script_run_time": 0,
+    "dom.max_script_run_time": 0,
+    "dom.performance.time_to_non_blank_paint.enabled": true,
+    "dom.send_after_paint_to_content": true,
+    "experiments.manifest.uri": "https://127.0.0.1/experiments-dummy/manifest",
+    "extensions.autoDisableScopes": 10,
+    "extensions.blocklist.enabled": false,
+    "extensions.blocklist.url": "http://127.0.0.1/extensions-dummy/blocklistURL",
+    "extensions.checkCompatibility": false,
+    "extensions.enabledScopes": 5,
+    "extensions.getAddons.get.url": "http://127.0.0.1/extensions-dummy/repositoryGetURL",
+    "extensions.getAddons.getWithPerformance.url": "http://127.0.0.1/extensions-dummy/repositoryGetWithPerformanceURL",
+    "extensions.getAddons.search.browseURL": "http://127.0.0.1/extensions-dummy/repositoryBrowseURL",
+    "extensions.hotfix.url": "http://127.0.0.1/extensions-dummy/hotfixURL",
+    "extensions.legacy.enabled": true,
+    "extensions.systemAddon.update.url": "http://127.0.0.1/dummy-system-addons.xml",
+    "extensions.update.background.url": "http://127.0.0.1/extensions-dummy/updateBackgroundURL",
+    "extensions.update.enabled": false,
+    "extensions.update.notifyUser": false,
+    "extensions.update.url": "http://127.0.0.1/extensions-dummy/updateURL",
+    "extensions.webservice.discoverURL": "http://127.0.0.1/extensions-dummy/discoveryURL",
+    "general.useragent.updates.enabled": false,
+    "hangmonitor.timeout": 0,
+    "identity.fxaccounts.auth.uri": "https://127.0.0.1/fxa-dummy/",
+    "identity.fxaccounts.migrateToDevEdition": false,
+    "lightweightThemes.selectedThemeID": "",
+    "media.capturestream_hints.enabled": true,
+    "media.gmp-manager.updateEnabled": false,
+    "media.gmp-manager.url": "http://127.0.0.1/gmpmanager-dummy/update.xml",
+    "media.libavcodec.allow-obsolete": true,
+    "media.navigator.enabled": true,
+    "media.navigator.permission.disabled": true,
+    "media.peerconnection.enabled": true,
+    "network.http.speculative-parallel-limit": 0,
+    "network.proxy.http": "localhost",
+    "network.proxy.http_port": 80,
+    "network.proxy.type": 1,
+    "places.database.lastMaintenance": 2147483647,
+    "plugin.state.flash": 0,
+    "plugins.flashBlock.enabled": false,
+    "privacy.trackingprotection.annotate_channels": false,
+    "privacy.trackingprotection.enabled": false,
+    "privacy.trackingprotection.introURL": "http://127.0.0.1/trackingprotection/tour",
+    "privacy.trackingprotection.pbmode.enabled": false,
+    "security.enable_java": false,
+    "security.fileuri.strict_origin_policy": false,
+    "security.turn_off_all_security_so_that_viruses_can_take_over_this_computer": true,
+    "toolkit.telemetry.server": "https://127.0.0.1/telemetry-dummy/",
+    "xpinstall.signatures.required": false
+}
new file mode 100644
--- /dev/null
+++ b/testing/raptor/raptor/raptor.ini
@@ -0,0 +1,4 @@
+# raptor tests
+[include:tests/raptor-firefox-tp7.ini]
+[include:tests/raptor-chrome-tp7.ini]
+[include:tests/raptor-speedometer.ini]
new file mode 100644
--- /dev/null
+++ b/testing/raptor/raptor/raptor.py
@@ -0,0 +1,150 @@
+#!/usr/bin/env python
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import absolute_import
+
+import json
+import os
+import sys
+import time
+
+import mozinfo
+
+from mozlog import commandline, get_default_logger
+from mozprofile import create_profile
+from mozrunner import runners
+
+from raptor.cmdline import parse_args
+from raptor.control_server import RaptorControlServer
+from raptor.gen_test_config import gen_test_config
+from raptor.outputhandler import OutputHandler
+from raptor.playback import get_playback
+from raptor.manifest import get_raptor_test_list
+
+here = os.path.abspath(os.path.dirname(__file__))
+webext_dir = os.path.join(os.path.dirname(here), 'webext')
+
+
+class Raptor(object):
+    """Container class for Raptor"""
+
+    def __init__(self, app, binary):
+        self.config = {}
+        self.config['app'] = app
+        self.config['binary'] = binary
+        self.config['platform'] = mozinfo.os
+
+        self.raptor_venv = os.path.join(os.getcwd(), 'raptor-venv')
+        self.log = get_default_logger(component='raptor')
+        self.control_server = None
+        self.playback = None
+
+        # Create the profile
+        pref_file = os.path.join(here, 'preferences', '{}.json'.format(self.config['app']))
+        prefs = {}
+        if os.path.isfile(pref_file):
+            with open(pref_file, 'r') as fh:
+                prefs = json.load(fh)
+
+        try:
+            self.profile = create_profile(self.config['app'], preferences=prefs)
+        except NotImplementedError:
+            self.profile = None
+
+        # Create the runner
+        self.output_handler = OutputHandler()
+        process_args = {
+            'processOutputLine': [self.output_handler],
+        }
+        runner_cls = runners[app]
+        self.runner = runner_cls(
+            binary, profile=self.profile, process_args=process_args)
+
+    def start_control_server(self):
+        self.control_server = RaptorControlServer()
+        self.control_server.start()
+
+    def run_test(self, test, timeout=None):
+        self.log.info("starting raptor test: %s" % test['name'])
+        gen_test_config(self.config['app'], test['name'])
+
+        self.profile.addons.install(os.path.join(webext_dir, 'raptor'))
+
+        # some tests require tools to playback the test pages
+        if test.get('playback', None) is not None:
+            self.config['playback_tool'] = test.get('playback')
+            self.log.info("test uses playback tool: %s " % self.config['playback_tool'])
+            self.playback = get_playback(self.config)
+            self.playback.start()
+
+        self.runner.start()
+
+        first_time = int(time.time()) * 1000
+        proc = self.runner.process_handler
+        self.output_handler.proc = proc
+
+        try:
+            self.runner.wait(timeout)
+        finally:
+            try:
+                self.runner.check_for_crashes()
+            except NotImplementedError:  # not implemented for Chrome
+                pass
+
+        if self.playback is not None:
+            self.playback.stop()
+
+        if self.runner.is_running():
+            self.log("Application timed out after {} seconds".format(timeout))
+            self.runner.stop()
+
+        proc.output.append(
+            "__startBeforeLaunchTimestamp%d__endBeforeLaunchTimestamp"
+            % first_time)
+        proc.output.append(
+            "__startAfterTerminationTimestamp%d__endAfterTerminationTimestamp"
+            % (int(time.time()) * 1000))
+
+    def process_results(self):
+        self.log.info('todo: process results and dump in PERFHERDER_JSON blob')
+        self.log.info('- or - do we want the control server to do that?')
+
+    def clean_up(self):
+        self.control_server.stop()
+        self.runner.stop()
+        self.log.info("raptor finished")
+
+
+def main(args=sys.argv[1:]):
+    args = parse_args()
+    commandline.setup_logging('raptor', args, {'tbpl': sys.stdout})
+    LOG = get_default_logger(component='raptor-main')
+
+    # if a test name specified on command line, and it exists, just run that one
+    # otherwise run all available raptor tests that are found for this browser
+    raptor_test_list = get_raptor_test_list(args)
+
+    # ensure we have at least one valid test to run
+    if len(raptor_test_list) == 0:
+        LOG.critical("abort: no tests found")
+        sys.exit(1)
+
+    LOG.info("raptor tests scheduled to run:")
+    for next_test in raptor_test_list:
+        LOG.info(next_test['name'])
+
+    raptor = Raptor(args.app, args.binary)
+
+    raptor.start_control_server()
+
+    for next_test in raptor_test_list:
+        raptor.run_test(next_test)
+
+    raptor.process_results()
+    raptor.clean_up()
+
+
+if __name__ == "__main__":
+    main()
new file mode 100644
--- /dev/null
+++ b/testing/raptor/raptor/tests/raptor-chrome-tp7.ini
@@ -0,0 +1,22 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# raptor tp7 chrome
+
+[DEFAULT]
+apps = chrome
+type =  pageload
+playback = mitmproxy
+release_bin_mac = mitmproxy-2.0.2-osx.tar.gz
+page_cycles = 25
+
+[raptor-chrome-tp7]
+test_url = http://localhost:8081/heroes
+measure =
+  fcp
+  hero
+hero =
+  mugshot
+  title
+  anime
new file mode 100644
--- /dev/null
+++ b/testing/raptor/raptor/tests/raptor-firefox-tp7.ini
@@ -0,0 +1,22 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# raptor tp7 firefox
+
+[DEFAULT]
+apps = firefox
+type =  pageload
+playback = mitmproxy
+release_bin_mac = mitmproxy-2.0.2-osx.tar.gz
+page_cycles = 25
+
+[raptor-firefox-tp7]
+test_url = http://localhost:8081/heroes
+measure =
+  fnbpaint
+  hero
+hero =
+  mugshot
+  title
+  anime
new file mode 100644
--- /dev/null
+++ b/testing/raptor/raptor/tests/raptor-speedometer.ini
@@ -0,0 +1,14 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# raptor speedometer
+
+[raptor-speedometer]
+apps =
+  firefox
+  chrome
+type = benchmark
+test_url = http://localhost:8081/Speedometer/index.html?raptor
+page_cycles = 1
+page_timeout = 120000
new file mode 100644
--- /dev/null
+++ b/testing/raptor/requirements.txt
@@ -0,0 +1,3 @@
+mozrunner ~= 7.0
+mozprofile ~= 1.1
+manifestparser >= 1.1
new file mode 100644
--- /dev/null
+++ b/testing/raptor/setup.py
@@ -0,0 +1,29 @@
+from __future__ import absolute_import
+
+import os
+
+from setuptools import setup
+
+here = os.path.dirname(os.path.abspath(__file__))
+description = 'Browser performance test framework prototype'
+version = "0.0"
+
+with open(os.path.join(here, "requirements.txt")) as f:
+    dependencies = f.read().splitlines()
+
+setup(name='raptor',
+      version=version,
+      description=description,
+      url='https://github.com/rwood-moz/raptor',
+      author='Mozilla',
+      author_email='tools@lists.mozilla.org',
+      license='MPL 2.0',
+      packages=['raptor'],
+      zip_safe=False,
+      install_requires=dependencies,
+      include_package_data=True,
+      entry_points="""
+      # -*- Entry points: -*-
+      [console_scripts]
+      raptor = raptor.raptor:main
+      """)
new file mode 100644
new file mode 100644
--- /dev/null
+++ b/testing/raptor/test/conftest.py
@@ -0,0 +1,57 @@
+from __future__ import absolute_import
+
+import json
+import os
+
+import pytest
+
+here = os.path.abspath(os.path.dirname(__file__))
+
+from raptor.raptor import Raptor
+
+
+@pytest.fixture(scope='function')
+def options(request):
+    opts = {
+        'app': 'firefox',
+        'binary': 'path/to/dummy/browser',
+    }
+
+    if hasattr(request.module, 'OPTIONS'):
+        opts.update(request.module.OPTIONS)
+    return opts
+
+
+@pytest.fixture(scope='function')
+def raptor(options):
+    return Raptor(**options)
+
+
+@pytest.fixture(scope='session')
+def get_prefs():
+    def _inner(browser):
+        import raptor
+        prefs_dir = os.path.join(raptor.__file__, 'preferences')
+        with open(os.path.join(prefs_dir, '{}.json'.format(browser)), 'r') as fh:
+            return json.load(fh)
+
+
+@pytest.fixture(scope='session')
+def filedir():
+    return os.path.join(here, 'files')
+
+
+@pytest.fixture
+def get_binary():
+    from moztest.selftest import fixtures
+
+    def inner(app):
+        if app != 'firefox':
+            pytest.xfail(reason="{} support not implemented".format(app))
+
+        binary = fixtures.binary()
+        if not binary:
+            pytest.skip("could not find a {} binary".format(app))
+        return binary
+
+    return inner
new file mode 100755
new file mode 100644
--- /dev/null
+++ b/testing/raptor/test/python.ini
@@ -0,0 +1,7 @@
+[DEFAULT]
+subsuite = raptor
+
+[test_cmdline.py]
+[test_control_server.py]
+[test_playback.py]
+[test_raptor.py]
new file mode 100644
--- /dev/null
+++ b/testing/raptor/test/test_cmdline.py
@@ -0,0 +1,24 @@
+from __future__ import absolute_import, unicode_literals
+
+import os
+import pytest
+
+import mozunit
+
+from argparse import ArgumentParser, Namespace
+from raptor.cmdline import verify_options
+
+
+def test_verify_options(filedir):
+    args = Namespace(binary='invalid/path')
+    parser = ArgumentParser()
+
+    with pytest.raises(SystemExit):
+        verify_options(parser, args)
+
+    args.binary = os.path.join(filedir, 'fake_binary.exe')
+    verify_options(parser, args)  # assert no exception
+
+
+if __name__ == '__main__':
+    mozunit.main()
new file mode 100644
--- /dev/null
+++ b/testing/raptor/test/test_control_server.py
@@ -0,0 +1,26 @@
+from __future__ import absolute_import, unicode_literals
+
+import mozunit
+
+from BaseHTTPServer import HTTPServer
+from mozlog.structuredlog import set_default_logger, StructuredLogger
+from raptor.control_server import RaptorControlServer
+
+set_default_logger(StructuredLogger('test_control_server'))
+
+
+def test_start_and_stop():
+    control = RaptorControlServer()
+
+    assert control.server is None
+    control.start()
+    assert isinstance(control.server, HTTPServer)
+    assert control.server.fileno()
+    assert control._server_thread.is_alive()
+
+    control.stop()
+    assert not control._server_thread.is_alive()
+
+
+if __name__ == '__main__':
+    mozunit.main()
new file mode 100644
--- /dev/null
+++ b/testing/raptor/test/test_playback.py
@@ -0,0 +1,33 @@
+from __future__ import absolute_import, unicode_literals
+
+import mozunit
+
+from mozlog.structuredlog import set_default_logger, StructuredLogger
+
+set_default_logger(StructuredLogger('test_playback'))
+
+from raptor.playback import get_playback, Mitmproxy
+
+
+config = {}
+
+
+def test_get_playback():
+    config['playback_tool'] = 'mitmproxy'
+    playback = get_playback(config)
+    assert isinstance(playback, Mitmproxy)
+
+
+def test_get_unsupported_playback():
+    config['playback_tool'] = 'unsupported'
+    playback = get_playback(config)
+    assert playback is None
+
+
+def test_get_playback_missing_tool_name():
+    playback = get_playback(config)
+    assert playback is None
+
+
+if __name__ == '__main__':
+    mozunit.main()
new file mode 100644
--- /dev/null
+++ b/testing/raptor/test/test_raptor.py
@@ -0,0 +1,84 @@
+from __future__ import absolute_import, unicode_literals
+
+import os
+import threading
+import time
+
+import mozunit
+import pytest
+
+from mozprofile import BaseProfile
+from mozrunner.errors import RunnerNotStartedError
+
+from raptor.control_server import RaptorControlServer
+from raptor.raptor import Raptor
+
+
+@pytest.mark.parametrize('app', ['firefox', 'chrome'])
+def test_create_profile(options, app, get_prefs):
+    options['app'] = app
+    raptor = Raptor(**options)
+
+    assert isinstance(raptor.profile, BaseProfile)
+    if app != 'firefox':
+        return
+
+    # This pref is set in mozprofile
+    firefox_pref = 'user_pref("app.update.enabled", false);'
+    # This pref is set in raptor
+    raptor_pref = 'user_pref("security.enable_java", false);'
+
+    prefs_file = os.path.join(raptor.profile.profile, 'user.js')
+    with open(prefs_file, 'r') as fh:
+        prefs = fh.read()
+        assert firefox_pref in prefs
+        assert raptor_pref in prefs
+
+
+def test_start_and_stop_server(raptor):
+    assert raptor.control_server is None
+
+    raptor.start_control_server()
+    assert isinstance(raptor.control_server, RaptorControlServer)
+
+    assert raptor.control_server._server_thread.is_alive()
+    raptor.clean_up()
+    assert not raptor.control_server._server_thread.is_alive()
+
+
+@pytest.mark.parametrize('app', [
+    'firefox',
+    pytest.mark.xfail('chrome'),
+])
+def test_start_browser(get_binary, app):
+    binary = get_binary(app)
+    assert binary
+
+    raptor = Raptor(app, binary)
+    raptor.start_control_server()
+
+    test = {}
+    test['name'] = 'raptor-{}-tp7'.format(app)
+
+    thread = threading.Thread(target=raptor.run_test, args=(test,))
+    thread.start()
+
+    timeout = time.time() + 5  # seconds
+    while time.time() < timeout:
+        try:
+            is_running = raptor.runner.is_running()
+            assert is_running
+            break
+        except RunnerNotStartedError:
+            time.sleep(0.1)
+    else:
+        assert False  # browser didn't start
+
+    raptor.clean_up()
+    thread.join(5)
+    assert not raptor.runner.is_running()
+    assert raptor.runner.returncode is not None
+
+
+if __name__ == '__main__':
+    mozunit.main()
new file mode 100644
--- /dev/null
+++ b/testing/raptor/webext/raptor/benchmark-relay.js
@@ -0,0 +1,23 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+// receives result from benchmark and relays onto our background runner
+
+function receiveMessage(event) {
+  console.log("received message!");
+  console.log(event.origin);
+  if (event.origin == "http://localhost:8081") {
+    sendResult("speedometer", event.data);
+  }
+}
+
+function sendResult(_type, _value) {
+  // send result back to background runner script
+  console.log("sending result back to runner: " + _type + " " + _value);
+  chrome.runtime.sendMessage({"type": _type, "value": _value}, function(response) {
+    console.log(response.text);
+  });
+}
+
+window.addEventListener("message", receiveMessage);
new file mode 100644
--- /dev/null
+++ b/testing/raptor/webext/raptor/manifest.json
@@ -0,0 +1,30 @@
+{
+  "applications": {
+    "gecko": {
+      "id": "raptor@mozilla.org"
+    }
+  },
+  "manifest_version": 2,
+  "name": "Raptor",
+  "version": "0.1",
+  "description": "Performance measurement framework prototype",
+  "background": {
+    "scripts": ["auto_gen_test_config.js", "runner.js"]
+  },
+  "content_scripts": [
+    {
+      "matches": ["http://*/tp6/tp6-*.html", "http://*/heroes/*"],
+      "js": ["measure.js"]
+    },
+    {
+      "matches": ["http://*/Speedometer/index.html*"],
+      "js": ["benchmark-relay.js"]
+    }
+  ],
+  "permissions": [
+    "http://127.0.0.1:8000/",
+    "tabs",
+    "storage",
+    "alarms"
+  ]
+}
new file mode 100644
--- /dev/null
+++ b/testing/raptor/webext/raptor/measure.js
@@ -0,0 +1,166 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+// content script for use with tp7 pageload tests
+var perfData = window.performance;
+var gRetryCounter = 0;
+
+// measure hero element; must exist inside test page;
+// default only; this is set via control server settings json
+var getHero = false;
+var heroesToCapture = [];
+
+// measure firefox time-to-first-non-blank-paint
+// note: this browser pref must be enabled:
+// dom.performance.time_to_non_blank_paint.enabled = True
+// default only; this is set via control server settings json
+var getFNBPaint = false;
+
+// measure google's first-contentful-paint
+// default only; this is set via control server settings json
+var getFCP = false;
+
+// performance.timing measurement used as 'starttime'
+var startMeasure = "fetchStart";
+
+function contentHandler() {
+  // retrieve test settings from local ext storage
+  if (typeof(browser) !== "undefined") {
+    // firefox, returns promise
+    browser.storage.local.get("settings").then(function(item) {
+      setup(item.settings);
+    });
+  } else {
+    // chrome, no promise so use callback
+    chrome.storage.local.get("settings", function(item) {
+      setup(item.settings);
+    });
+  }
+}
+
+function setup(settings) {
+  getFNBPaint = settings.measure.fnbpaint;
+  getFCP = settings.measure.fcp;
+  if (settings.measure.hero.length !== 0) {
+    getHero = true;
+    heroesToCapture = settings.measure.hero;
+  }
+  if (getHero) {
+    console.log("hero elements to measure: " + heroesToCapture);
+    measureHero();
+  }
+  if (getFNBPaint) {
+    console.log("will be measuring fnbpaint");
+    measureFNBPaint();
+  }
+  if (getFCP) {
+    console.log("will be measuring first-contentful-paint");
+    measureFirstContentfulPaint();
+  }
+}
+
+function measureHero() {
+  var obs = null;
+
+  var heroElementsFound = window.document.querySelectorAll("[elementtiming]");
+  console.log("found " + heroElementsFound.length + " hero elements in the page");
+
+  if (heroElementsFound) {
+    function callbackHero(entries, observer) {
+      entries.forEach(entry => {
+        var heroFound = entry.target.getAttribute("elementtiming");
+        // mark the time now as when hero element received
+        perfData.mark(heroFound);
+        console.log("found hero:" + heroFound);
+        // calculcate result: performance.timing.fetchStart - time when we got hero element
+        perfData.measure(name = resultType,
+                         startMark = startMeasure,
+                         endMark = heroFound);
+        var perfResult = perfData.getEntriesByName(resultType);
+        var _result = perfResult[0].duration;
+        var resultType = "hero:" + heroFound;
+        sendResult(resultType, _result);
+        perfData.clearMarks();
+        perfData.clearMeasures();
+        obs.disconnect();
+      });
+    }
+    // we want the element 100% visible on the viewport
+    var options = {root: null, rootMargin: "0px", threshold: [1]};
+    try {
+      obs = new window.IntersectionObserver(callbackHero, options);
+      heroElementsFound.forEach(function(el) {
+        // if hero element is one we want to measure, add it to the observer
+        if (heroesToCapture.indexOf(el.getAttribute("elementtiming")) > -1)
+          obs.observe(el);
+      });
+    } catch (err) {
+      console.log(err);
+    }
+  } else {
+      console.log("couldn't find hero element");
+  }
+
+}
+
+function measureFNBPaint() {
+  var x = window.performance.timing.timeToNonBlankPaint;
+
+  if (typeof(x) == "undefined") {
+    console.log("ERROR: timeToNonBlankPaint is undefined; ensure the pref is enabled");
+    return;
+  }
+  if (x > 0) {
+    console.log("got fnbpaint");
+    gRetryCounter = 0;
+    var startTime = perfData.timing.fetchStart;
+    sendResult("fnbpaint", x - startTime);
+  } else {
+    gRetryCounter += 1;
+    if (gRetryCounter <= 10) {
+      console.log("\nfnbpaint is not yet available (0), retry number " + gRetryCounter + "...\n");
+      window.setTimeout(measureFNBPaint, 100);
+    } else {
+      console.log("\nunable to get a value for fnbpaint after " + gRetryCounter + " retries\n");
+    }
+  }
+}
+
+function measureFirstContentfulPaint() {
+  // see https://developer.mozilla.org/en-US/docs/Web/API/PerformancePaintTiming
+  var resultType = "fcp";
+  var result = 0;
+
+  let performanceEntries = perfData.getEntriesByType("paint");
+
+  if (performanceEntries.length >= 2) {
+    if (performanceEntries[1].startTime != undefined)
+      result = performanceEntries[1].startTime;
+  }
+
+  if (result > 0) {
+    console.log("got time to first-contentful-paint");
+    sendResult(resultType, result);
+    perfData.clearMarks();
+    perfData.clearMeasures();
+  } else {
+    gRetryCounter += 1;
+    if (gRetryCounter <= 10) {
+      console.log("\ntime to first-contentful-paint is not yet available (0), retry number " + gRetryCounter + "...\n");
+      window.setTimeout(measureFirstContentfulPaint, 100);
+    } else {
+      console.log("\nunable to get a value for time-to-fcp after " + gRetryCounter + " retries\n");
+    }
+  }
+}
+
+function sendResult(_type, _value) {
+  // send result back to background runner script
+  console.log("sending result back to runner: " + _type + " " + _value);
+  chrome.runtime.sendMessage({"type": _type, "value": _value}, function(response) {
+    console.log(response.text);
+  });
+}
+
+window.onload = contentHandler();
new file mode 100644
--- /dev/null
+++ b/testing/raptor/webext/raptor/runner.js
@@ -0,0 +1,345 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+// this extension requires a 'control server' to be running on port 8000
+// (see raptor prototype framework). It will provide the test options, as
+// well as receive test results
+
+// note: currently the prototype assumes the test page(s) are
+// already available somewhere independently; so for now locally
+// inside the 'talos-pagesets' dir or 'heroes' dir (tarek's github
+// repo) or 'webkit/PerformanceTests' dir (for benchmarks) first run:
+// 'python -m SimpleHTTPServer 8081'
+// to serve out the pages that we want to prototype with. Also
+// update the manifest content 'matches' accordingly
+
+var browserName;
+var ext;
+var settingsURL = null;
+var testType;
+var pageCycles = 0;
+var pageCycle = 0;
+var pageCycleDelay = 1000;
+var testURL;
+var testTabID = 0;
+var results = {"page": "", "measurements": {}};
+var getHero = false;
+var getFNBPaint = false;
+var getFCP = false;
+var isHeroPending = false;
+var pendingHeroes = [];
+var settings = {};
+var isFNBPaintPending = false;
+var isFCPPending = false;
+var isBenchmarkPending = false;
+var pageTimeout = 5000; // default pageload timeout
+
+function getTestSettings() {
+  console.log("getting test settings from control server");
+  return new Promise(resolve => {
+
+    fetch(settingsURL).then(function(response) {
+      response.text().then(function(text) {
+        console.log(text);
+        settings = JSON.parse(text)["raptor-options"];
+
+        // parse the test settings
+        testType = settings.type;
+        pageCycles = settings.page_cycles;
+        testURL = settings.test_url;
+        results.page = testURL;
+        results.type = testType;
+
+        if (settings.page_timeout !== undefined) {
+          pageTimeout = settings.page_timeout;
+        }
+        console.log("using page timeout (ms): " + pageTimeout);
+
+        if (testType == "pageload") {
+          getFNBPaint = settings.measure.fnbpaint;
+          getFCP = settings.measure.fcp;
+          if (settings.measure.hero.length !== 0) {
+            getHero = true;
+          }
+        }
+
+        // write options to storage that our content script needs to know
+        if (browserName === "firefox") {
+          ext.storage.local.clear().then(function() {
+            ext.storage.local.set({settings}).then(function() {
+              console.log("wrote settings to ext local storage");
+              resolve();
+            });
+          });
+        } else {
+          ext.storage.local.clear(function() {
+            ext.storage.local.set({settings}, function() {
+              console.log("wrote settings to ext local storage");
+              resolve();
+            });
+          });
+        }
+      });
+    });
+  });
+}
+
+function getBrowserInfo() {
+  return new Promise(resolve => {
+    if (browserName === "firefox") {
+      ext = browser;
+      var gettingInfo = browser.runtime.getBrowserInfo();
+      gettingInfo.then(function(bi) {
+        results.browser = bi.name + " " + bi.version + " " + bi.buildID;
+        console.log("testing on " + results.browser);
+        resolve();
+      });
+    } else {
+      ext = chrome;
+      var browserInfo = window.navigator.userAgent.split(" ");
+      for (let x in browserInfo) {
+        if (browserInfo[x].indexOf("Chrome") > -1) {
+          results.browser = browserInfo[x];
+          break;
+        }
+      }
+      console.log("testing on " + results.browser);
+      resolve();
+    }
+  });
+}
+
+function testTabCreated(tab) {
+  testTabID = tab.id;
+  console.log("opened new empty tab " + testTabID);
+  nextCycle();
+}
+
+async function testTabUpdated(tab) {
+  console.log("tab " + tab.id + " reloaded");
+  // wait for pageload test result from content
+  await waitForResult();
+  // move on to next cycle (or test complete)
+  nextCycle();
+}
+
+function waitForResult() {
+  console.log("awaiting results...");
+  return new Promise(resolve => {
+    function checkForResult() {
+      if (testType == "pageload") {
+        if (!isHeroPending && !isFNBPaintPending && !isFCPPending) {
+          cancelTimeoutAlarm("raptor-page-timeout");
+          resolve();
+        } else {
+          setTimeout(checkForResult, 5);
+        }
+      } else if (testType == "benchmark") {
+        if (!isBenchmarkPending) {
+          cancelTimeoutAlarm("raptor-page-timeout");
+          resolve();
+        } else {
+          setTimeout(checkForResult, 5);
+        }
+      }
+    }
+    checkForResult();
+  });
+}
+
+function nextCycle() {
+  pageCycle++;
+  if (pageCycle == 1) {
+    var text = "running " + pageCycles + " pagecycles of " + testURL;
+    postToControlServer("status", text);
+  }
+  if (pageCycle <= pageCycles) {
+    setTimeout(function() {
+      var text = "begin pagecycle " + pageCycle;
+      console.log("\n" + text);
+      postToControlServer("status", text);
+
+      // set page timeout alarm
+      setTimeoutAlarm("raptor-page-timeout", pageTimeout);
+
+      if (testType == "pageload") {
+        if (getHero)
+          isHeroPending = true;
+          pendingHeroes = Array.from(settings.measure.hero);
+        if (getFNBPaint)
+          isFNBPaintPending = true;
+        if (getFCP)
+          isFCPPending = true;
+      } else if (testType == "benchmark") {
+        isBenchmarkPending = true;
+      }
+      // reload the test page
+      ext.tabs.update(testTabID, {url: testURL}, testTabUpdated);
+    }, pageCycleDelay);
+  } else {
+    verifyResults();
+  }
+}
+
+function timeoutAlarmListener(alarm) {
+  var text = alarm.name;
+  console.error(text);
+  postToControlServer("status", text);
+  // call clean-up to shutdown gracefully
+  cleanUp();
+}
+
+function setTimeoutAlarm(timeoutName, timeoutMS) {
+  var timeout_when = window.performance.now() + timeoutMS;
+  ext.alarms.create(timeoutName, { when: timeout_when });
+  console.log("set " + timeoutName);
+}
+
+function cancelTimeoutAlarm(timeoutName) {
+  if (browserName === "firefox") {
+    var clearAlarm = ext.alarms.clear(timeoutName);
+    clearAlarm.then(function(onCleared) {
+      if (onCleared) {
+        console.log("cancelled " + timeoutName);
+      } else {
+        console.error("failed to clear " + timeoutName);
+      }
+    });
+  } else {
+    chrome.alarms.clear(timeoutName, function(wasCleared) {
+      if (wasCleared) {
+        console.log("cancelled " + timeoutName);
+      } else {
+        console.error("failed to clear " + timeoutName);
+      }
+    });
+  }
+}
+
+function resultListener(request, sender, sendResponse) {
+  console.log("received message from " + sender.tab.url);
+  if (request.type && request.value) {
+    console.log("result: " + request.type + " " + request.value);
+    sendResponse({text: "confirmed " + request.type});
+
+    if (!(request.type in results.measurements))
+      results.measurements[request.type] = [];
+
+    if (testType == "pageload") {
+      // a single tp7 pageload measurement was received
+      if (request.type.indexOf("hero") > -1) {
+        results.measurements[request.type].push(request.value);
+        var _found = request.type.split("hero:")[1];
+        var index = pendingHeroes.indexOf(_found);
+        if (index > -1) {
+          pendingHeroes.splice(index, 1);
+          if (pendingHeroes.length == 0) {
+            console.log("measured all expected hero elements");
+            isHeroPending = false;
+          }
+        }
+      } else if (request.type == "fnbpaint") {
+        results.measurements.fnbpaint.push(request.value);
+        isFNBPaintPending = false;
+      } else if (request.type == "fcp") {
+        results.measurements.fcp.push(request.value);
+        isFCPPending = false;
+      }
+    } else if (testType == "benchmark") {
+      // benchmark results received (all results for that complete benchmark run)
+      console.log("received results from benchmark");
+      results.measurements[request.type].push(request.value);
+      isBenchmarkPending = false;
+    }
+  } else {
+    console.log("unknown message received from content: " + request);
+  }
+}
+
+function verifyResults() {
+  console.log("\nVerifying results:");
+  console.log(results);
+  for (var x in results.measurements) {
+    let count = results.measurements[x].length;
+    if (count == pageCycles) {
+      console.log("have " + count + " results for " + x + ", as expected");
+    } else {
+      console.log("ERROR: expected " + pageCycles + " results for "
+                  + x + " but only have " + count);
+    }
+  }
+  postToControlServer("results", results);
+}
+
+function postToControlServer(msgType, msgData) {
+  // requires 'control server' running at port 8000 to receive results
+  var url = "http://127.0.0.1:8000/";
+  var client = new XMLHttpRequest();
+  client.onreadystatechange = function() {
+    if (client.readyState == XMLHttpRequest.DONE && client.status == 200) {
+      console.log("post success");
+    }
+  };
+
+  client.open("POST", url, true);
+
+  client.setRequestHeader("Content-Type", "application/json");
+  if (client.readyState == 1) {
+    console.log("posting to control server");
+    var data = { "type": "webext_" + msgType, "data": msgData};
+    client.send(JSON.stringify(data));
+  }
+  if (msgType == "results") {
+    // we're finished, move to cleanup
+    cleanUp();
+  }
+}
+
+function cleanUp() {
+  // close tab
+  ext.tabs.remove(testTabID);
+  console.log("closed tab " + testTabID);
+  if (testType == "pageload") {
+    // remove listeners
+    ext.runtime.onMessage.removeListener(resultListener);
+    ext.tabs.onCreated.removeListener(testTabCreated);
+    ext.alarms.onAlarm.removeListener(timeoutAlarmListener);
+    console.log("pageloader test finished");
+  } else if (testType == "benchmark") {
+    console.log("benchmark complete");
+  }
+  window.onload = null;
+  // done, dump to console to tell framework to shutdown browser; currently
+  // this only works with Firefox as google chrome doesn't support dump()
+  if (browserName === "firefox")
+    window.dump("\n__raptor_shutdownBrowser\n");
+
+}
+
+function runner() {
+  let config = getTestConfig();
+  settingsURL = config.test_settings_url;
+  browserName = config.browser;
+  getBrowserInfo().then(function() {
+    getTestSettings().then(function() {
+      if (testType == "benchmark") {
+        // webkit benchmark type of test
+        console.log("benchmark test start");
+      } else if (testType == "pageload") {
+        // standard 'tp7' pageload test
+        console.log("pageloader test start");
+      }
+      // results listener
+      ext.runtime.onMessage.addListener(resultListener);
+      // tab creation listener
+      ext.tabs.onCreated.addListener(testTabCreated);
+      // timeout alarm listener
+      ext.alarms.onAlarm.addListener(timeoutAlarmListener);
+      // create new empty tab, which starts the test
+      ext.tabs.create({url: "about:blank"});
+    });
+  });
+}
+
+window.onload = runner();
--- a/toolkit/toolkit.mozbuild
+++ b/toolkit/toolkit.mozbuild
@@ -5,16 +5,17 @@
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 include('/js/app.mozbuild')
 
 PYTHON_UNITTEST_MANIFESTS += [
     '/layout/tools/reftest/selftest/python.ini',
     '/testing/marionette/harness/marionette_harness/tests/harness_unit/python.ini',
     '/testing/mochitest/tests/python/python.ini',
+    '/testing/raptor/test/python.ini',
     '/testing/talos/talos/unittests/python.ini'
 ]
 
 CONFIGURE_SUBST_FILES += [
     '/tools/update-packaging/Makefile',
 ]
 
 DIRS += [
--- a/tools/lint/flake8.yml
+++ b/tools/lint/flake8.yml
@@ -24,16 +24,17 @@ flake8:
         - testing/marionette/puppeteer
         - testing/mochitest
         - testing/mozbase
         - testing/mozharness/configs
         - testing/mozharness/mozfile
         - testing/mozharness/mozharness/mozilla/mar.py
         - testing/mozharness/mozinfo
         - testing/mozharness/scripts
+        - testing/raptor
         - testing/remotecppunittests.py
         - testing/runcppunittests.py
         - testing/talos/
         - testing/xpcshell
         - toolkit/components/telemetry
         - toolkit/crashreporter/tools/upload_symbols.py
         - tools/
     # Excludes should be added to topsrcdir/.flake8 due to a bug in flake8 where
--- a/tools/lint/py2.yml
+++ b/tools/lint/py2.yml
@@ -34,16 +34,17 @@ py2:
         - servo
         - taskcluster/docker/funsize-update-generator
         - testing/awsy
         - testing/firefox-ui
         - testing/geckodriver
         - testing/gtest
         - testing/mochitest
         - testing/mozharness
+        - testing/raptor
         - testing/remotecppunittests.py
         - testing/runcppunittests.py
         - testing/runtimes
         - testing/tools
         - testing/tps
         - testing/web-platform
         - third_party
         - toolkit
--- a/tools/lint/py3.yml
+++ b/tools/lint/py3.yml
@@ -27,16 +27,17 @@ py3:
         - security/manager/ssl
         - security/nss
         - servo
         - testing/awsy
         - testing/firefox-ui/harness/firefox_ui_harness/runners/update.py
         - testing/gtest
         - testing/mochitest
         - testing/mozharness
+        - testing/raptor
         - testing/tools/iceserver
         - testing/tps
         - testing/web-platform
         - third_party
         - toolkit
         - tools/jprof
         - tools/profiler
         - tools/rb