Bug 744902 - Add perf testing to Marionette, r=jgriffin, DONTBUILD because NPOTB
authorMalini Das <mdas@mozilla.com>
Fri, 08 Jun 2012 18:33:54 -0400
changeset 98930 301213b90a983582b68e41a7e4203ad5d1d7c6fa
parent 98929 ec473a671b9bbe2d7f05e56e9d1036b9b3453062
child 98931 7377fb1cbdb866522010a8128f3587ac9a4a38be
push id1729
push userlsblakk@mozilla.com
push dateMon, 16 Jul 2012 20:02:43 +0000
treeherdermozilla-aurora@f4e75e148951 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjgriffin, DONTBUILD
bugs744902
milestone16.0a1
Bug 744902 - Add perf testing to Marionette, r=jgriffin, DONTBUILD because NPOTB
testing/marionette/client/marionette/marionette.py
testing/marionette/client/marionette/marionette_test.py
testing/marionette/client/marionette/runtests.py
testing/marionette/client/marionette/tests/unit/test_perf.py
testing/marionette/client/marionette/tests/unit/unit-tests.ini
testing/marionette/client/marionette/venv_test.sh
testing/marionette/jar.mn
testing/marionette/marionette-actors.js
testing/marionette/marionette-listener.js
testing/marionette/marionette-perf.js
testing/marionette/marionette-simpletest.js
--- a/testing/marionette/client/marionette/marionette.py
+++ b/testing/marionette/client/marionette/marionette.py
@@ -376,12 +376,18 @@ class Marionette(object):
         return elements
 
     def log(self, msg, level=None):
         return self._send_message('log', 'ok', value=msg, level=level)
 
     def get_logs(self):
         return self._send_message('getLogs', 'value')
 
+    def add_perf_data(self, suite, name, value):
+        return self._send_message('addPerfData', 'ok', suite=suite, name=name, value=value)
+
+    def get_perf_data(self):
+        return self._send_message('getPerfData', 'value')
+
     def import_script(self, file):
         f = open(file, "r")
         js = f.read()
         return self._send_message('importScript', 'ok', script=js)
--- a/testing/marionette/client/marionette/marionette_test.py
+++ b/testing/marionette/client/marionette/marionette_test.py
@@ -85,16 +85,17 @@ whitelist_prefs.forEach(function (pref) 
 });
         """, [url, whitelist_prefs])
         emulator.set_context("content")
 
     def setUp(self):
         if self.marionette.session is None:
             self.marionette.start_session()
         self.loglines = None
+        self.perfdata = None
 
     def tearDown(self):
         if self.marionette.session is not None:
             self.marionette.delete_session()
 
 
 class MarionetteTestCase(CommonTestCase):
 
@@ -169,16 +170,19 @@ class MarionetteJSTestCase(CommonTestCas
             launch_app = launch_app.group(3)
             frame = self.launch_gaia_app(launch_app)
             args.append({'__marionetteArgs': {'appframe': frame}})
 
         try:
             results = self.marionette.execute_js_script(js, args)
 
             self.loglines = self.marionette.get_logs()
+            self.perfdata = self.marionette.get_perf_data()
+            print "in marionette_test"
+            print self.perfdata
 
             if launch_app:
                 self.kill_gaia_app(launch_app)
 
             self.assertTrue(not 'timeout' in self.jsFile,
                             'expected timeout not triggered')
 
             if 'fail' in self.jsFile:
@@ -188,18 +192,19 @@ class MarionetteJSTestCase(CommonTestCas
                 fails = []
                 for failure in results['failures']:
                     diag = "" if failure.get('diag') is None else "| %s " % failure['diag']
                     name = "got false, expected true" if failure.get('name') is None else failure['name']
                     fails.append('TEST-UNEXPECTED-FAIL %s| %s' % (diag, name))
                 self.assertEqual(0, results['failed'],
                                  '%d tests failed:\n%s' % (results['failed'], '\n'.join(fails)))
 
-            self.assertTrue(results['passed'] + results['failed'] > 0,
-                            'no tests run')
+            if not self.perfdata:
+                self.assertTrue(results['passed'] + results['failed'] > 0,
+                                'no tests run')
             if self.marionette.session is not None:
                 self.marionette.delete_session()
 
         except ScriptTimeoutException:
             if 'timeout' in self.jsFile:
                 # expected exception
                 pass
             else:
--- a/testing/marionette/client/marionette/runtests.py
+++ b/testing/marionette/client/marionette/runtests.py
@@ -8,37 +8,39 @@ import inspect
 import logging
 from optparse import OptionParser
 import os
 import types
 import unittest
 import socket
 import sys
 import time
+import platform
+import datazilla
 
 try:
     from manifestparser import TestManifest
     from mozhttpd import iface, MozHttpd
 except ImportError:
     print "manifestparser or mozhttpd not found!  Please install mozbase:\n"
     print "\tgit clone git clone git://github.com/mozilla/mozbase.git"
     print "\tpython setup_development.py\n"
     import sys
     sys.exit(1)
 
 
 from marionette import Marionette
 from marionette_test import MarionetteJSTestCase
 
-
 class MarionetteTestResult(unittest._TextTestResult):
 
     def __init__(self, *args):
         super(MarionetteTestResult, self).__init__(*args)
         self.passed = 0
+        self.perfdata = None
 
     def addSuccess(self, test):
         super(MarionetteTestResult, self).addSuccess(test)
         self.passed += 1
 
     def getInfo(self, test):
         if hasattr(test, 'jsFile'):
             return os.path.basename(test.jsFile)
@@ -60,16 +62,23 @@ class MarionetteTestResult(unittest._Tex
     def printLogs(self, test):
         for testcase in test._tests:
             if hasattr(testcase, 'loglines') and testcase.loglines:
                 print 'START LOG:'
                 for line in testcase.loglines:
                     print ' '.join(line)
                 print 'END LOG:'
 
+    def getPerfData(self, test):
+        for testcase in test._tests:
+            if testcase.perfdata:
+                if not self.perfdata:
+                    self.perfdata = datazilla.dzResult(testcase.perfdata)
+                else:
+                    self.perfdata.join_results(testcase.perfdata)
 
 class MarionetteTextTestRunner(unittest.TextTestRunner):
 
     resultclass = MarionetteTestResult
 
     def _makeResult(self):
         return self.resultclass(self.stream, self.descriptions, self.verbosity)
 
@@ -87,16 +96,17 @@ class MarionetteTextTestRunner(unittest.
         finally:
             stopTestRun = getattr(result, 'stopTestRun', None)
             if stopTestRun is not None:
                 stopTestRun()
         stopTime = time.time()
         timeTaken = stopTime - startTime
         result.printErrors()
         result.printLogs(test)
+        result.getPerfData(test)
         if hasattr(result, 'separator2'):
             self.stream.writeln(result.separator2)
         run = result.testsRun
         self.stream.writeln("Ran %d test%s in %.3fs" %
                             (run, run != 1 and "s" or "", timeTaken))
         self.stream.writeln()
 
         expectedFails = unexpectedSuccesses = skipped = 0
@@ -150,16 +160,17 @@ class MarionetteTestRunner(object):
         self.es_server = es_server
         self.rest_server = rest_server
         self.logger = logger
         self.noWindow = noWindow
         self.httpd = None
         self.baseurl = None
         self.marionette = None
         self.logcat_dir = logcat_dir
+        self.perfrequest = None
 
         self.reset_test_stats()
 
         if self.logger is None:
             self.logger = logging.getLogger('Marionette')
             self.logger.setLevel(logging.INFO)
             self.logger.addHandler(logging.StreamHandler())
 
@@ -167,16 +178,17 @@ class MarionetteTestRunner(object):
             if not os.access(self.logcat_dir, os.F_OK):
                 os.mkdir(self.logcat_dir)
 
     def reset_test_stats(self):
         self.passed = 0
         self.failed = 0
         self.todo = 0
         self.failures = []
+        self.perfrequest = None
 
     def start_httpd(self):
         host = iface.get_lan_ip()
         s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
         s.bind(("",0))
         port = s.getsockname()[1]
         s.close()
         self.baseurl = 'http://%s:%d/' % (host, port)
@@ -251,25 +263,33 @@ class MarionetteTestRunner(object):
         for f in self.failures:
             testgroup.add_test_failure(test=f[0], text=f[1], status=f[2])
 
         testgroup.submit()
 
     def run_tests(self, tests, testtype=None):
         self.reset_test_stats()
         starttime = datetime.utcnow()
-        for test in tests:
-            self.run_test(test, testtype)
+        while options.repeat >=0 :
+            for test in tests:
+                self.run_test(test, testtype)
+            options.repeat -= 1
         self.logger.info('\nSUMMARY\n-------')
         self.logger.info('passed: %d' % self.passed)
         self.logger.info('failed: %d' % self.failed)
         self.logger.info('todo: %d' % self.todo)
         elapsedtime = datetime.utcnow() - starttime
         if self.autolog:
             self.post_to_autolog(elapsedtime)
+        if self.perfrequest and options.perf:
+            try:
+                self.perfrequest.submit()
+            except Exception, e:
+                print "Could not submit to datazilla"
+                print e
         if self.marionette.emulator:
             self.marionette.emulator.close()
             self.marionette.emulator = None
         self.marionette = None
 
     def run_test(self, test, testtype):
         if not self.httpd:
             print "starting httpd"
@@ -303,16 +323,32 @@ class MarionetteTestRunner(object):
                         testargs.update({ atype[1:]: 'true' })
                     elif atype.startswith('-'):
                         testargs.update({ atype[1:]: 'false' })
                     else:
                         testargs.update({ atype: 'true' })
 
             manifest = TestManifest()
             manifest.read(filepath)
+            if options.perf:
+                if options.perfserv is None:
+                    options.perfserv = manifest.get("perfserv")[0]
+                machine_name = socket.gethostname()
+                try:
+                    manifest.has_key("machine_name")
+                    machine_name = manifest.get("machine_name")[0]
+                except:
+                    self.logger.info("Using machine_name: %s" % machine_name)
+                os_name = platform.system()
+                os_version = platform.release()
+                self.perfrequest = datazilla.dzRequest(server=options.perfserv, machine_name=machine_name, os=os_name, os_version=os_version,
+                                         platform=manifest.get("platform")[0], build_name=manifest.get("build_name")[0], 
+                                         version=manifest.get("version")[0], revision=self.revision,
+                                         branch=manifest.get("branch")[0], id=os.getenv('BUILD_ID'), test_date=int(time.time()))
+
             manifest_tests = manifest.get(**testargs)
 
             for i in manifest_tests:
                 self.run_test(i["path"], testtype)
             return
 
         self.logger.info('TEST-START %s' % os.path.basename(test))
 
@@ -328,16 +364,18 @@ class MarionetteTestRunner(object):
                         suite.addTest(obj(self.marionette, methodName=testname))
 
         elif file_ext == '.js':
             suite.addTest(MarionetteJSTestCase(self.marionette, jsFile=filepath))
 
         if suite.countTestCases():
             results = MarionetteTextTestRunner(verbosity=3).run(suite)
             self.failed += len(results.failures) + len(results.errors)
+            if results.perfdata:
+                self.perfrequest.add_dzresult(results.perfdata)
             if hasattr(results, 'skipped'):
                 self.todo += len(results.skipped) + len(results.expectedFailures)
             self.passed += results.passed
             for failure in results.failures + results.errors:
                 self.failures.append((results.getInfo(failure[0]), failure[1], 'TEST-UNEXPECTED-FAIL'))
             if hasattr(results, 'unexpectedSuccess'):
                 self.failed += len(results.unexpectedSuccesses)
                 for failure in results.unexpectedSuccesses:
@@ -353,17 +391,17 @@ class MarionetteTestRunner(object):
 if __name__ == "__main__":
     parser = OptionParser(usage='%prog [options] test_file_or_dir <test_file_or_dir> ...')
     parser.add_option("--autolog",
                       action = "store_true", dest = "autolog",
                       default = False,
                       help = "send test results to autolog")
     parser.add_option("--revision",
                       action = "store", dest = "revision",
-                      help = "git revision for autolog submissions")
+                      help = "git revision for autolog/perfdata submissions")
     parser.add_option("--testgroup",
                       action = "store", dest = "testgroup",
                       help = "testgroup names for autolog submissions")
     parser.add_option("--emulator",
                       action = "store", dest = "emulator",
                       default = None, choices = ["x86", "arm"],
                       help = "Launch a B2G emulator on which to run tests. "
                       "You need to specify which architecture to emulate.")
@@ -393,17 +431,26 @@ if __name__ == "__main__":
                       "tests from .ini files.")
     parser.add_option('--homedir', dest='homedir', action='store',
                       help='home directory of emulator files')
     parser.add_option('--binary', dest='bin', action='store',
                       help='gecko executable to launch before running the test')
     parser.add_option('--profile', dest='profile', action='store',
                       help='profile to use when launching the gecko process. If not '
                       'passed, then a profile will be constructed and used.')
-
+    parser.add_option('--perf', dest='perf', action='store_true',
+                      default = False,
+                      help='send performance data to perf data server')
+    parser.add_option('--perf-server', dest='perfserv', action='store',
+                      default=None,
+                      help='dataserver for perf data submission. Entering this value '
+                      'will overwrite the perfserv value in any passed .ini files.')
+    parser.add_option('--repeat', dest='repeat', action='store', type=int,
+                      default=0, help='number of times to repeat the test(s).')
+ 
     options, tests = parser.parse_args()
 
     if not tests:
         parser.print_usage()
         parser.exit()
 
     if not options.emulator and not options.address and not options.bin:
         parser.print_usage()
@@ -424,10 +471,8 @@ if __name__ == "__main__":
                                   noWindow=options.noWindow,
                                   revision=options.revision,
                                   testgroup=options.testgroup,
                                   autolog=options.autolog)
     runner.run_tests(tests, testtype=options.type)
     if runner.failed > 0:
         sys.exit(10)
 
-
-
new file mode 100644
--- /dev/null
+++ b/testing/marionette/client/marionette/tests/unit/test_perf.py
@@ -0,0 +1,57 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+from marionette_test import MarionetteTestCase
+
+class TestPerf(MarionetteTestCase):
+    def test_perf_basic(self):
+        self.marionette.add_perf_data("testgroup", "testperf", 10)
+        data = self.marionette.get_perf_data()
+        self.assertTrue(data.has_key("testgroup"))
+        self.assertTrue(data["testgroup"].has_key("testperf"))
+        self.assertEqual(10, data["testgroup"]["testperf"][0])
+        self.marionette.add_perf_data("testgroup", "testperf", 20)
+        data = self.marionette.get_perf_data()
+        self.assertTrue(data.has_key("testgroup"))
+        self.assertTrue(data["testgroup"].has_key("testperf"))
+        self.assertEqual(20, data["testgroup"]["testperf"][1])
+        self.marionette.add_perf_data("testgroup", "testperf2", 20)
+        data = self.marionette.get_perf_data()
+        self.assertTrue(data.has_key("testgroup"))
+        self.assertTrue(data["testgroup"].has_key("testperf2"))
+        self.assertEqual(20, data["testgroup"]["testperf2"][0])
+        self.marionette.add_perf_data("testgroup2", "testperf3", 30)
+        data = self.marionette.get_perf_data()
+        self.assertTrue(data.has_key("testgroup2"))
+        self.assertTrue(data["testgroup2"].has_key("testperf3"))
+        self.assertEqual(30, data["testgroup2"]["testperf3"][0])
+
+    def test_perf_script(self):
+        self.marionette.execute_script("addPerfData('testgroup', 'testperf', 10);")
+        data = self.marionette.get_perf_data()
+        self.assertTrue(data.has_key("testgroup"))
+        self.assertTrue(data["testgroup"].has_key("testperf"))
+        self.assertEqual(10, data["testgroup"]["testperf"][0])
+        self.marionette.execute_script("addPerfData('testgroup', 'testperf', 20);")
+        data = self.marionette.get_perf_data()
+        self.assertTrue(data.has_key("testgroup"))
+        self.assertTrue(data["testgroup"].has_key("testperf"))
+        self.assertEqual(20, data["testgroup"]["testperf"][1])
+        self.marionette.execute_script("addPerfData('testgroup', 'testperf2', 20);")
+        data = self.marionette.get_perf_data()
+        self.assertTrue(data.has_key("testgroup"))
+        self.assertTrue(data["testgroup"].has_key("testperf2"))
+        self.assertEqual(20, data["testgroup"]["testperf2"][0])
+        self.marionette.execute_script("addPerfData('testgroup2', 'testperf3', 30);")
+        data = self.marionette.get_perf_data()
+        self.assertTrue(data.has_key("testgroup2"))
+        self.assertTrue(data["testgroup2"].has_key("testperf3"))
+        self.assertEqual(30, data["testgroup2"]["testperf3"][0])
+
+class TestPerfChrome(TestPerf):
+    def setUp(self):
+        MarionetteTestCase.setUp(self)
+        self.marionette.set_context("chrome")
+
--- a/testing/marionette/client/marionette/tests/unit/unit-tests.ini
+++ b/testing/marionette/client/marionette/tests/unit/unit-tests.ini
@@ -5,16 +5,17 @@ b2g = false
 b2g = false
 [test_getattr.py]
 b2g = false
 [test_elementState.py]
 b2g = false
 [test_text.py]
 b2g = false
 
+[test_perf.py]
 [test_log.py]
 [test_emulator.py]
 [test_execute_async_script.py]
 [test_execute_script.py]
 [test_simpletest_fail.js]
 [test_findelement.py]
 b2g = false
 
--- a/testing/marionette/client/marionette/venv_test.sh
+++ b/testing/marionette/client/marionette/venv_test.sh
@@ -37,16 +37,20 @@ else
   echo "Creating a virtual environment in $VENV_DIR"
   curl https://raw.github.com/pypa/virtualenv/develop/virtualenv.py | ${PYTHON} - $VENV_DIR
   cd $VENV_DIR
   . bin/activate
   # set up mozbase
   git clone git://github.com/mozilla/mozbase.git
   cd mozbase
   python setup_development.py
+  cd ..
+  git clone git://github.com/mozilla/datazilla_client.git
+  cd datazilla_client
+  python setup.py develop
 fi
 
 # update the marionette_client
 cd $MARIONETTE_HOME
 python setup.py develop
 cd marionette
 
 # pop off the python parameter
--- a/testing/marionette/jar.mn
+++ b/testing/marionette/jar.mn
@@ -4,16 +4,17 @@
 
 marionette.jar:
 % content marionette %content/
   content/marionette-actors.js      (marionette-actors.js)
   content/marionette-listener.js    (marionette-listener.js)
   content/marionette-elements.js    (marionette-elements.js)
   content/marionette-log-obj.js     (marionette-log-obj.js)
   content/marionette-simpletest.js  (marionette-simpletest.js)
+  content/marionette-perf.js  (marionette-perf.js)
   content/EventUtils.js  (EventUtils.js)
   content/ChromeUtils.js  (ChromeUtils.js)
 
 % content specialpowers %content/
   content/specialpowers.js (../mochitest/specialpowers/content/specialpowers.js)
   content/SpecialPowersObserver.js (../mochitest/specialpowers/components/SpecialPowersObserver.js)
   content/specialpowersAPI.js (../mochitest/tests/SimpleTest/specialpowersAPI.js)
   content/SpecialPowersObserverAPI.js (../mochitest/tests/SimpleTest/SpecialPowersObserverAPI.js)
--- a/testing/marionette/marionette-actors.js
+++ b/testing/marionette/marionette-actors.js
@@ -9,16 +9,17 @@
  */
 
 let {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components;
 
 let loader = Cc["@mozilla.org/moz/jssubscript-loader;1"]
                .getService(Ci.mozIJSSubScriptLoader);
 loader.loadSubScript("chrome://marionette/content/marionette-simpletest.js");
 loader.loadSubScript("chrome://marionette/content/marionette-log-obj.js");
+loader.loadSubScript("chrome://marionette/content/marionette-perf.js");
 Cu.import("chrome://marionette/content/marionette-elements.js");
 let utils = {};
 loader.loadSubScript("chrome://marionette/content/EventUtils.js", utils);
 loader.loadSubScript("chrome://marionette/content/ChromeUtils.js", utils);
 loader.loadSubScript("chrome://marionette/content/atoms.js", utils);
 
 let specialpowers = {};
 loader.loadSubScript("chrome://specialpowers/content/SpecialPowersObserver.js",
@@ -116,27 +117,28 @@ function MarionetteDriverActor(aConnecti
   this.messageManager = Cc["@mozilla.org/globalmessagemanager;1"]
                           .getService(Ci.nsIChromeFrameMessageManager);
   this.browsers = {}; //holds list of BrowserObjs
   this.curBrowser = null; // points to current browser
   this.context = "content";
   this.scriptTimeout = null;
   this.timer = null;
   this.marionetteLog = new MarionetteLogObj();
+  this.marionettePerf = new MarionettePerfData();
   this.command_id = null;
   this.mainFrame = null; //topmost chrome frame
   this.curFrame = null; //subframe that currently has focus
   this.importedScripts = FileUtils.getFile('TmpD', ['marionettescriptchrome']);
   
   //register all message listeners
   this.messageManager.addMessageListener("Marionette:ok", this);
   this.messageManager.addMessageListener("Marionette:done", this);
   this.messageManager.addMessageListener("Marionette:error", this);
   this.messageManager.addMessageListener("Marionette:log", this);
-  this.messageManager.addMessageListener("Marionette:testLog", this);
+  this.messageManager.addMessageListener("Marionette:shareData", this);
   this.messageManager.addMessageListener("Marionette:register", this);
   this.messageManager.addMessageListener("Marionette:goUrl", this);
   this.messageManager.addMessageListener("Marionette:runEmulatorCmd", this);
 }
 
 MarionetteDriverActor.prototype = {
 
   //name of the actor
@@ -388,16 +390,31 @@ MarionetteDriverActor.prototype = {
   /**
    * Return all logged messages.
    */
   getLogs: function MDA_getLogs() {
     this.sendResponse(this.marionetteLog.getLogs());
   },
 
   /**
+   * Log some performance data
+   */
+  addPerfData: function MDA_addPerfData(aRequest) {
+    this.marionettePerf.addPerfData(aRequest.suite, aRequest.name, aRequest.value);
+    this.sendOk();
+  },
+
+  /**
+   * Retrieve the performance data
+   */
+  getPerfData: function MDA_getPerfData() {
+    this.sendResponse(this.marionettePerf.getPerfData());
+  },
+
+  /**
    * Sets the context of the subsequent commands to be either 'chrome' or 'content'
    *
    * @param object aRequest
    *        'value' member holds the name of the context to be switched to
    */
   setContext: function MDA_setContext(aRequest) {
     let context = aRequest.value;
     if (context != "content" && context != "chrome") {
@@ -518,17 +535,17 @@ MarionetteDriverActor.prototype = {
     if (this.context == "content") {
       this.sendAsync("executeScript", {value: aRequest.value,
                                        args: aRequest.args,
                                        newSandbox:aRequest.newSandbox});
       return;
     }
 
     let curWindow = this.getCurrentWindow();
-    let marionette = new Marionette(this, curWindow, "chrome", this.marionetteLog);
+    let marionette = new Marionette(this, curWindow, "chrome", this.marionetteLog, this.marionettePerf);
     let _chromeSandbox = this.createExecuteSandbox(curWindow, marionette, aRequest.args);
     if (!_chromeSandbox)
       return;
 
     try {
       _chromeSandbox.finish = function chromeSandbox_finish() {
         return marionette.generate_results();
       };
@@ -625,17 +642,17 @@ MarionetteDriverActor.prototype = {
                                             id: this.command_id,
                                             newSandbox: aRequest.newSandbox});
       return;
     }
 
     let curWindow = this.getCurrentWindow();
     let original_onerror = curWindow.onerror;
     let that = this;
-    let marionette = new Marionette(this, curWindow, "chrome", this.marionetteLog);
+    let marionette = new Marionette(this, curWindow, "chrome", this.marionetteLog, this.marionettePerf);
     marionette.command_id = this.command_id;
 
     function chromeAsyncReturnFunc(value, status) {
       if (that._emu_cbs && Object.keys(that._emu_cbs).length) {
         value = "Emulator callback still pending when finish() called";
         status = 500;
         that._emu_cbs = null;
       }
@@ -1196,17 +1213,17 @@ MarionetteDriverActor.prototype = {
         winEnum.getNext().messageManager.removeDelayedFrameScript("chrome://marionette/content/marionette-listener.js"); 
       }
     }
     this.sendOk();
     this.messageManager.removeMessageListener("Marionette:ok", this);
     this.messageManager.removeMessageListener("Marionette:done", this);
     this.messageManager.removeMessageListener("Marionette:error", this);
     this.messageManager.removeMessageListener("Marionette:log", this);
-    this.messageManager.removeMessageListener("Marionette:testLog", this);
+    this.messageManager.removeMessageListener("Marionette:shareData", this);
     this.messageManager.removeMessageListener("Marionette:register", this);
     this.messageManager.removeMessageListener("Marionette:goUrl", this);
     this.messageManager.removeMessageListener("Marionette:runEmulatorCmd", this);
     this.curBrowser = null;
     try {
       this.importedScripts.remove(false);
     }
     catch (e) {
@@ -1285,19 +1302,24 @@ MarionetteDriverActor.prototype = {
         break;
       case "Marionette:error":
         this.sendError(message.json.message, message.json.status, message.json.stacktrace, message.json.command_id);
         break;
       case "Marionette:log":
         //log server-side messages
         logger.info(message.json.message);
         break;
-      case "Marionette:testLog":
+      case "Marionette:shareData":
         //log messages from tests
-        this.marionetteLog.addLogs(message.json.value);
+        if (message.json.log) {
+          this.marionetteLog.addLogs(message.json.log);
+        }
+        if (message.json.perf) {
+          this.marionettePerf.appendPerfData(message.json.perf);
+        }
         break;
       case "Marionette:runEmulatorCmd":
         this.sendToClient(message.json);
         break;
       case "Marionette:register":
         // This code processes the content listener's registration information
         // and either accepts the listener, or ignores it
         let nullPrevious = (this.curBrowser.curFrameId == null);
@@ -1326,16 +1348,18 @@ MarionetteDriverActor.prototype = {
     }
   },
 };
 
 MarionetteDriverActor.prototype.requestTypes = {
   "newSession": MarionetteDriverActor.prototype.newSession,
   "log": MarionetteDriverActor.prototype.log,
   "getLogs": MarionetteDriverActor.prototype.getLogs,
+  "addPerfData": MarionetteDriverActor.prototype.addPerfData,
+  "getPerfData": MarionetteDriverActor.prototype.getPerfData,
   "setContext": MarionetteDriverActor.prototype.setContext,
   "executeScript": MarionetteDriverActor.prototype.execute,
   "setScriptTimeout": MarionetteDriverActor.prototype.setScriptTimeout,
   "executeAsyncScript": MarionetteDriverActor.prototype.executeWithCallback,
   "executeJSScript": MarionetteDriverActor.prototype.executeJSScript,
   "setSearchTimeout": MarionetteDriverActor.prototype.setSearchTimeout,
   "findElement": MarionetteDriverActor.prototype.findElement,
   "findElements": MarionetteDriverActor.prototype.findElements,
--- a/testing/marionette/marionette-listener.js
+++ b/testing/marionette/marionette-listener.js
@@ -8,30 +8,32 @@ let {classes: Cc, interfaces: Ci, utils:
 let uuidGen = Cc["@mozilla.org/uuid-generator;1"]
                 .getService(Ci.nsIUUIDGenerator);
 
 let loader = Cc["@mozilla.org/moz/jssubscript-loader;1"]
                .getService(Ci.mozIJSSubScriptLoader);
 
 loader.loadSubScript("chrome://marionette/content/marionette-simpletest.js");
 loader.loadSubScript("chrome://marionette/content/marionette-log-obj.js");
+loader.loadSubScript("chrome://marionette/content/marionette-perf.js");
 Cu.import("chrome://marionette/content/marionette-elements.js");
 Cu.import("resource://gre/modules/FileUtils.jsm");
 Cu.import("resource://gre/modules/NetUtil.jsm");  
 let utils = {};
 utils.window = content;
 // Load Event/ChromeUtils for use with JS scripts:
 loader.loadSubScript("chrome://marionette/content/EventUtils.js", utils);
 loader.loadSubScript("chrome://marionette/content/ChromeUtils.js", utils);
 loader.loadSubScript("chrome://marionette/content/atoms.js", utils);
 
 loader.loadSubScript("chrome://specialpowers/content/specialpowersAPI.js");
 loader.loadSubScript("chrome://specialpowers/content/specialpowers.js");
 
 let marionetteLogObj = new MarionetteLogObj();
+let marionettePerf = new MarionettePerfData();
 
 let isB2G = false;
 
 let marionetteTimeout = null;
 let winUtil = content.QueryInterface(Ci.nsIInterfaceRequestor)
                      .getInterface(Ci.nsIDOMWindowUtils);
 let listenerId = null; //unique ID of this listener
 let activeFrame = null;
@@ -243,17 +245,17 @@ function createExecuteContentSandbox(aWi
   let sandbox = new Cu.Sandbox(aWindow);
   sandbox.global = sandbox;
   sandbox.window = aWindow;
   sandbox.document = sandbox.window.document;
   sandbox.navigator = sandbox.window.navigator;
   sandbox.__proto__ = sandbox.window;
   sandbox.testUtils = utils;
 
-  let marionette = new Marionette(this, aWindow, "content", marionetteLogObj);
+  let marionette = new Marionette(this, aWindow, "content", marionetteLogObj, marionettePerf);
   sandbox.marionette = marionette;
   marionette.exports.forEach(function(fn) {
     sandbox[fn] = marionette[fn].bind(marionette);
   });
 
   sandbox.SpecialPowers = new SpecialPowers(aWindow);
 
   sandbox.asyncComplete = function sandbox_asyncComplete(value, status) {
@@ -265,19 +267,20 @@ function createExecuteContentSandbox(aWi
 
     curWindow.removeEventListener("unload", errUnload, false);
 
     /* clear all timeouts potentially generated by the script*/
     for (let i = 0; i <= asyncTestTimeoutId; i++) {
       curWindow.clearTimeout(i);
     }
 
-    sendSyncMessage("Marionette:testLog",
-                    {value: elementManager.wrapValue(marionetteLogObj.getLogs())});
+    sendSyncMessage("Marionette:shareData", {log: elementManager.wrapValue(marionetteLogObj.getLogs()),
+                                             perf: elementManager.wrapValue(marionettePerf.getPerfData())});
     marionetteLogObj.clearLogs();
+    marionettePerf.clearPerfData();
     if (status == 0){
       sendResponse({value: elementManager.wrapValue(value), status: status}, asyncTestCommandId);
     }
     else {
       sendError(value, status, null, asyncTestCommandId);
     }
 
     asyncTestRunning = false;
@@ -318,18 +321,20 @@ function executeScript(msg, directInject
       if (importedScripts.exists()) {
         let stream = Components.classes["@mozilla.org/network/file-input-stream;1"].  
                       createInstance(Components.interfaces.nsIFileInputStream);
         stream.init(importedScripts, -1, 0, 0);
         let data = NetUtil.readInputStreamToString(stream, stream.available());
         script = data + script;
       }
       let res = Cu.evalInSandbox(script, sandbox, "1.8");
-      sendSyncMessage("Marionette:testLog", {value: elementManager.wrapValue(marionetteLogObj.getLogs())});
+      sendSyncMessage("Marionette:shareData", {log: elementManager.wrapValue(marionetteLogObj.getLogs()),
+                                               perf: elementManager.wrapValue(marionettePerf.getPerfData())});
       marionetteLogObj.clearLogs();
+      marionettePerf.clearPerfData();
       if (res == undefined || res.passed == undefined) {
         sendError("Marionette.finish() not called", 17, null);
       }
       else {
         sendResponse({value: elementManager.wrapValue(res)});
       }
     }
     else {
@@ -347,18 +352,20 @@ function executeScript(msg, directInject
       if (importedScripts.exists()) {
         let stream = Components.classes["@mozilla.org/network/file-input-stream;1"].  
                       createInstance(Components.interfaces.nsIFileInputStream);
         stream.init(importedScripts, -1, 0, 0);
         let data = NetUtil.readInputStreamToString(stream, stream.available());
         scriptSrc = data + scriptSrc;
       }
       let res = Cu.evalInSandbox(scriptSrc, sandbox, "1.8");
-      sendSyncMessage("Marionette:testLog", {value: elementManager.wrapValue(marionetteLogObj.getLogs())});
+      sendSyncMessage("Marionette:shareData", {log: elementManager.wrapValue(marionetteLogObj.getLogs()),
+                                               perf: elementManager.wrapValue(marionettePerf.getPerfData())});
       marionetteLogObj.clearLogs();
+      marionettePerf.clearPerfData();
       sendResponse({value: elementManager.wrapValue(res)});
     }
   }
   catch (e) {
     // 17 = JavascriptException
     sendError(e.name + ': ' + e.message, 17, e.stack);
   }
 }
new file mode 100644
--- /dev/null
+++ b/testing/marionette/marionette-perf.js
@@ -0,0 +1,83 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+function MarionettePerfData() {
+  this.perfData = {};
+}
+MarionettePerfData.prototype = {
+  /**
+   * Add performance data. 
+   *
+   * Datapoints within a testSuite get rolled up into
+   * one value in Datazilla. You can then drill down to
+   * individual (testName,data) pairs
+   * 
+   * If the testSuite and testName exist, the data will
+   * be added to this dataset.
+   *
+   * @param testSuite String
+   *        name of the test suite
+   * @param testName String
+   *        name of the test
+   * @param object data
+   *        data value to store
+   */
+  addPerfData: function Marionette__addPerfData(testSuite, testName, data) {
+    if (this.perfData[testSuite]) {
+      if (this.perfData[testSuite][testName]) {
+        this.perfData[testSuite][testName].push(data);
+      }
+      else {
+        this.perfData[testSuite][testName.toString()] = [data];
+      }
+    }
+    else {
+      this.perfData[testSuite] = {}
+      this.perfData[testSuite][testName.toString()] = [data];
+    }
+  },
+
+  /**
+   * Join another set of performance data this this set.
+   * Used by the actor to join data gathered from the listener
+   * @param object data
+   *        The performance data to join
+   */
+  appendPerfData: function Marionette__appendPerfData(data) {
+    for (var suite in data) {
+      if (data.hasOwnProperty(suite)) {
+        if (this.perfData[suite]) {
+          for (var test in data[suite]) {
+            if (this.perfData[suite][test]) {
+              this.perfData[suite][test] = this.perfData[suite][test].concat(data[suite][test]);
+            }
+            else {
+              this.perfData[suite][test] = data[suite][test];
+            }
+          }
+        }
+        else {
+          this.perfData[suite] = data[suite];
+        }
+      }
+    }
+  },
+
+  /**
+   * Retrieve the performance data
+   *        
+   * @return object
+   *      Returns a list of test names to metric value
+   */
+  getPerfData: function Marionette__getPerfData() {
+    return this.perfData;
+  },
+
+  /**
+   * Clears the current performance data
+   */
+  clearPerfData: function Marionette_clearPerfData() {
+    this.perfData = {};
+  },
+}
--- a/testing/marionette/marionette-simpletest.js
+++ b/testing/marionette/marionette-simpletest.js
@@ -1,27 +1,28 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 /*
  * The Marionette object, passed to the script context.
  */
 
-function Marionette(scope, window, context, logObj) {
+function Marionette(scope, window, context, logObj, perfData) {
   this.scope = scope;
   this.window = window;
   this.tests = [];
   this.logObj = logObj;
+  this.perfData = perfData;
   this.context = context;
   this.timeout = 0;
 }
 
 Marionette.prototype = {
   exports: ['ok', 'is', 'isnot', 'log', 'getLogs', 'generate_results', 'waitFor',
-            'runEmulatorCmd'],
+            'runEmulatorCmd', 'addPerfData', 'getPerfData'],
 
   ok: function Marionette__ok(condition, name, diag) {
     let test = {'result': !!condition, 'name': name, 'diag': diag};
     this.logResult(test, "TEST-PASS", "TEST-UNEXPECTED-FAIL");
     this.tests.push(test);
   },
 
   is: function Marionette__is(a, b, name) {
@@ -33,16 +34,24 @@ Marionette.prototype = {
 
   isnot: function Marionette__isnot (a, b, name) {
     let pass = (a != b);
     let diag = pass ? this.repr(a) + " should not equal " + this.repr(b)
                     : "didn't expect " + this.repr(a) + ", but got it";
     this.ok(pass, name, diag);
   },
 
+  addPerfData: function Marionette__addPerfData(testSuite, testName, data) {
+    this.perfData.addPerfData(testSuite, testName, data);
+  },
+
+  getPerfData: function Marionette__getPerfData() {
+    return this.perfData.perfData;
+  },
+
   log: function Marionette__log(msg, level) {
     dump("MARIONETTE LOG: " + (level ? level : "INFO") + ": " + msg + "\n");
     if (this.logObj != null) {
       this.logObj.log(msg, level);
     }
   },
 
   getLogs: function Marionette__getLogs() {