Bug 865867 - Remove unused perf code from Marionette, r=mdas
authorJonathan Griffin <jgriffin@mozilla.com>
Mon, 29 Apr 2013 09:38:54 -0700
changeset 141171 ff7cea777341657658ef034641838e6ca85430fd
parent 141170 33b755e0237da7ae5c65eb752b94c127991f1ee6
child 141172 c74d9bbb00490291adc044f175e240ab3cd05d5e
push id2579
push userakeybl@mozilla.com
push dateMon, 24 Jun 2013 18:52:47 +0000
treeherdermozilla-beta@b69b7de8a05a [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersmdas
bugs865867
milestone23.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 865867 - Remove unused perf code from Marionette, r=mdas
testing/marionette/client/marionette/b2g_update_test.py
testing/marionette/client/marionette/marionette.py
testing/marionette/client/marionette/marionette_test.py
testing/marionette/client/marionette/runtests.py
testing/marionette/client/marionette/tests/unit/test_perf.py
testing/marionette/client/marionette/tests/unit/unit-tests.ini
testing/marionette/jar.mn
testing/marionette/marionette-actors.js
testing/marionette/marionette-listener.js
testing/marionette/marionette-perf.js
testing/marionette/marionette-simpletest.js
--- a/testing/marionette/client/marionette/b2g_update_test.py
+++ b/testing/marionette/client/marionette/b2g_update_test.py
@@ -198,17 +198,16 @@ class B2GUpdateTestCase(MarionetteTestCa
         self.marionette.set_context(Marionette.CONTEXT_CONTENT)
         self.marionette.execute_script("log('TEST-END: %s:%s')" %
                                        (self.filepath.replace('\\', '\\\\'), self.methodName))
         self.marionette.test_name = None
 
         self.duration = time.time() - self.start_time
         if self.marionette.session is not None:
             self.loglines.extend(self.marionette.get_logs())
-            self.perfdata = self.marionette.get_perf_data()
             self.marionette.delete_session()
         self.marionette = None
 
     def reset(self, b2g_pid):
         self.print_status('RESET-MARIONETTE')
         self._marionette_weakref = weakref.ref(self.runner.reset(b2g_pid))
         self.marionette = self._marionette_weakref()
         if self.marionette.session is None:
--- a/testing/marionette/client/marionette/marionette.py
+++ b/testing/marionette/client/marionette/marionette.py
@@ -640,22 +640,16 @@ class Marionette(object):
         return HTMLElement(self, response)
 
     def log(self, msg, level=None):
         return self._send_message('log', 'ok', value=msg, level=level)
 
     def get_logs(self):
         return self._send_message('getLogs', 'value')
 
-    def add_perf_data(self, suite, name, value):
-        return self._send_message('addPerfData', 'ok', suite=suite, name=name, value=value)
-
-    def get_perf_data(self):
-        return self._send_message('getPerfData', 'value')
-
     def import_script(self, js_file):
         js = ''
         with open(js_file, 'r') as f:
             js = f.read()
         return self._send_message('importScript', 'ok', script=js)
 
     def add_cookie(self, cookie):
         """
--- a/testing/marionette/client/marionette/marionette_test.py
+++ b/testing/marionette/client/marionette/marionette_test.py
@@ -24,17 +24,16 @@ def skip_if_b2g(target):
 
 class CommonTestCase(unittest.TestCase):
 
     match_re = None
 
     def __init__(self, methodName):
         unittest.TestCase.__init__(self, methodName)
         self.loglines = None
-        self.perfdata = None
         self.duration = 0
 
     @classmethod
     def match(cls, filename):
         """
         Determines if the specified filename should be handled by this
         test class; this is done by looking for a match for the filename
         using cls.match_re.
@@ -88,17 +87,16 @@ permissions.forEach(function (perm) {
         self.marionette = self._marionette_weakref()
         if self.marionette.session is None:
             self.marionette.start_session()
 
     def tearDown(self):
         self.duration = time.time() - self.start_time
         if self.marionette.session is not None:
             self.loglines = self.marionette.get_logs()
-            self.perfdata = self.marionette.get_perf_data()
             self.marionette.delete_session()
         self.marionette = None
 
 class MarionetteTestCase(CommonTestCase):
 
     match_re = re.compile(r"test_(.*)\.py$")
 
     def __init__(self, marionette_weakref, methodName='runTest',
@@ -223,19 +221,18 @@ class MarionetteJSTestCase(CommonTestCas
                 for failure in results['failures']:
                     diag = "" if failure.get('diag') is None else "| %s " % failure['diag']
                     name = "got false, expected true" if failure.get('name') is None else failure['name']
                     fails.append('TEST-UNEXPECTED-FAIL | %s %s| %s' %
                                  (os.path.basename(self.jsFile), diag, name))
                 self.assertEqual(0, results['failed'],
                                  '%d tests failed:\n%s' % (results['failed'], '\n'.join(fails)))
 
-            if not self.perfdata:
-                self.assertTrue(results['passed'] + results['failed'] > 0,
-                                'no tests run')
+            self.assertTrue(results['passed'] + results['failed'] > 0,
+                            'no tests run')
 
         except ScriptTimeoutException:
             if 'timeout' in self.jsFile:
                 # expected exception
                 pass
             else:
                 self.loglines = self.marionette.get_logs()
                 raise
--- a/testing/marionette/client/marionette/runtests.py
+++ b/testing/marionette/client/marionette/runtests.py
@@ -26,17 +26,16 @@ from marionette_test import MarionetteJS
 
 class MarionetteTestResult(unittest._TextTestResult):
 
     def __init__(self, *args, **kwargs):
         self.marionette = kwargs['marionette']
         del kwargs['marionette']
         super(MarionetteTestResult, self).__init__(*args, **kwargs)
         self.passed = 0
-        self.perfdata = None
         self.tests_passed = []
 
     def addSuccess(self, test):
         super(MarionetteTestResult, self).addSuccess(test)
         self.passed += 1
         self.tests_passed.append(test)
 
     def getInfo(self, test):
@@ -65,24 +64,16 @@ class MarionetteTestResult(unittest._Tex
                         break
                 if skip_log:
                     return
                 self.stream.writeln('START LOG:')
                 for line in testcase.loglines:
                     self.stream.writeln(' '.join(line).encode('ascii', 'replace'))
                 self.stream.writeln('END LOG:')
 
-    def getPerfData(self, test):
-        for testcase in test._tests:
-            if testcase.perfdata:
-                if not self.perfdata:
-                    self.perfdata = datazilla.DatazillaResult(testcase.perfdata)
-                else:
-                    self.perfdata.join_results(testcase.perfdata)
-
     def printErrorList(self, flavour, errors):
         for test, err in errors:
             self.stream.writeln(self.separator1)
             self.stream.writeln("%s: %s" % (flavour, self.getDescription(test)))
             self.stream.writeln(self.separator2)
             errlines = err.strip().split('\n')
             for line in errlines[0:-1]:
                 self.stream.writeln("%s" % line)
@@ -99,18 +90,16 @@ class MarionetteTestResult(unittest._Tex
             self.shouldStop = True
 
 
 class MarionetteTextTestRunner(unittest.TextTestRunner):
 
     resultclass = MarionetteTestResult
 
     def __init__(self, **kwargs):
-        self.perf = kwargs['perf']
-        del kwargs['perf']
         self.marionette = kwargs['marionette']
         del kwargs['marionette']
         unittest.TextTestRunner.__init__(self, **kwargs)
 
     def _makeResult(self):
         return self.resultclass(self.stream,
                                 self.descriptions,
                                 self.verbosity,
@@ -132,18 +121,16 @@ class MarionetteTextTestRunner(unittest.
         finally:
             stopTestRun = getattr(result, 'stopTestRun', None)
             if stopTestRun is not None:
                 stopTestRun()
         stopTime = time.time()
         timeTaken = stopTime - startTime
         result.printErrors()
         result.printLogs(test)
-        if self.perf:
-            result.getPerfData(test)
         if hasattr(result, 'separator2'):
             self.stream.writeln(result.separator2)
         run = result.testsRun
         self.stream.writeln("Ran %d test%s in %.3fs" %
                             (run, run != 1 and "s" or "", timeTaken))
         self.stream.writeln()
 
         expectedFails = unexpectedSuccesses = skipped = 0
@@ -181,19 +168,19 @@ class MarionetteTextTestRunner(unittest.
 
 class MarionetteTestRunner(object):
 
     def __init__(self, address=None, emulator=None, emulatorBinary=None,
                  emulatorImg=None, emulator_res='480x800', homedir=None,
                  app=None, bin=None, profile=None, autolog=False, revision=None,
                  es_server=None, rest_server=None, logger=None,
                  testgroup="marionette", noWindow=False, logcat_dir=None,
-                 xml_output=None, repeat=0, perf=False, perfserv=None,
-                 gecko_path=None, testvars=None, tree=None, type=None,
-                 device=None, symbols_path=None, **kwargs):
+                 xml_output=None, repeat=0, gecko_path=None, testvars=None,
+                 tree=None, type=None, device=None, symbols_path=None,
+                 **kwargs):
         self.address = address
         self.emulator = emulator
         self.emulatorBinary = emulatorBinary
         self.emulatorImg = emulatorImg
         self.emulator_res = emulator_res
         self.homedir = homedir
         self.app = app
         self.bin = bin
@@ -204,21 +191,18 @@ class MarionetteTestRunner(object):
         self.es_server = es_server
         self.rest_server = rest_server
         self.logger = logger
         self.noWindow = noWindow
         self.httpd = None
         self.baseurl = None
         self.marionette = None
         self.logcat_dir = logcat_dir
-        self.perfrequest = None
         self.xml_output = xml_output
         self.repeat = repeat
-        self.perf = perf
-        self.perfserv = perfserv
         self.gecko_path = gecko_path
         self.testvars = {}
         self.test_kwargs = kwargs
         self.tree = tree
         self.type = type
         self.device = device
         self.symbols_path = symbols_path
 
@@ -249,17 +233,16 @@ class MarionetteTestRunner(object):
         self.testvars['xml_output'] = self.xml_output
         self.results = []
 
     def reset_test_stats(self):
         self.passed = 0
         self.failed = 0
         self.todo = 0
         self.failures = []
-        self.perfrequest = None
 
     def start_httpd(self):
         host = moznetwork.get_ip()
         s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
         s.bind(("",0))
         port = s.getsockname()[1]
         s.close()
         self.baseurl = 'http://%s:%d/' % (host, port)
@@ -369,22 +352,16 @@ class MarionetteTestRunner(object):
         try:
             self.marionette.check_for_crash()
         except:
             traceback.print_exc()
 
         self.elapsedtime = datetime.utcnow() - starttime
         if self.autolog:
             self.post_to_autolog(self.elapsedtime)
-        if self.perfrequest and options.perf:
-            try:
-                self.perfrequest.submit()
-            except Exception, e:
-                print "Could not submit to datazilla"
-                print e
 
         if self.xml_output:
             xml_dir = os.path.dirname(os.path.abspath(self.xml_output))
             if not os.path.exists(xml_dir):
                 os.makedirs(xml_dir)
             with open(self.xml_output, 'w') as f:
                 f.write(self.generate_xml(self.results))
 
@@ -429,40 +406,16 @@ class MarionetteTestRunner(object):
                     elif atype.startswith('-'):
                         testargs.update({ atype[1:]: 'false' })
                     else:
                         testargs.update({ atype: 'true' })
 
             manifest = TestManifest()
             manifest.read(filepath)
 
-            if self.perf:
-                if self.perfserv is None:
-                    self.perfserv = manifest.get("perfserv")[0]
-                machine_name = socket.gethostname()
-                try:
-                    manifest.has_key("machine_name")
-                    machine_name = manifest.get("machine_name")[0]
-                except:
-                    self.logger.info("Using machine_name: %s" % machine_name)
-                os_name = platform.system()
-                os_version = platform.release()
-                self.perfrequest = datazilla.DatazillaRequest(
-                             server=self.perfserv,
-                             machine_name=machine_name,
-                             os=os_name,
-                             os_version=os_version,
-                             platform=manifest.get("platform")[0],
-                             build_name=manifest.get("build_name")[0],
-                             version=manifest.get("version")[0],
-                             revision=self.revision,
-                             branch=manifest.get("branch")[0],
-                             id=os.getenv('BUILD_ID'),
-                             test_date=int(time.time()))
-
             manifest_tests = manifest.active_tests(disabled=False)
 
             for i in manifest.get(tests=manifest_tests, **testargs):
                 self.run_test(i["path"])
                 if self.marionette.check_for_crash():
                     return
             return
 
@@ -476,24 +429,21 @@ class MarionetteTestRunner(object):
                                            testloader,
                                            self.marionette,
                                            self.testvars,
                                            **self.test_kwargs)
                 break
 
         if suite.countTestCases():
             runner = MarionetteTextTestRunner(verbosity=3,
-                                              perf=self.perf,
                                               marionette=self.marionette)
             results = runner.run(suite)
             self.results.append(results)
 
             self.failed += len(results.failures) + len(results.errors)
-            if results.perfdata and options.perf:
-                self.perfrequest.add_datazilla_result(results.perfdata)
             if hasattr(results, 'skipped'):
                 self.todo += len(results.skipped) + len(results.expectedFailures)
             self.passed += results.passed
             for failure in results.failures + results.errors:
                 self.failures.append((results.getInfo(failure[0]), failure[1], 'TEST-UNEXPECTED-FAIL'))
             if hasattr(results, 'unexpectedSuccess'):
                 self.failed += len(results.unexpectedSuccesses)
                 for failure in results.unexpectedSuccesses:
@@ -584,17 +534,17 @@ class MarionetteTestOptions(OptionParser
         self.add_option('--autolog',
                         action='store_true',
                         dest='autolog',
                         default=False,
                         help='send test results to autolog')
         self.add_option('--revision',
                         action='store',
                         dest='revision',
-                        help='git revision for autolog/perfdata submissions')
+                        help='git revision for autolog submissions')
         self.add_option('--testgroup',
                         action='store',
                         dest='testgroup',
                         help='testgroup names for autolog submissions')
         self.add_option('--emulator',
                         action='store',
                         dest='emulator',
                         choices=['x86', 'arm'],
@@ -610,17 +560,17 @@ class MarionetteTestOptions(OptionParser
                         action='store',
                         dest='emulatorImg',
                         help='use a specific image file instead of a fresh one')
         self.add_option('--emulator-res',
                         action='store',
                         dest='emulator_res',
                         type='str',
                         help='set a custom resolution for the emulator'
-                          'Example: "480x800"')
+                             'Example: "480x800"')
         self.add_option('--no-window',
                         action='store_true',
                         dest='noWindow',
                         default=False,
                         help='when Marionette launches an emulator, start it with the -no-window argument')
         self.add_option('--logcat-dir',
                         dest='logcat_dir',
                         action='store',
@@ -654,26 +604,16 @@ class MarionetteTestOptions(OptionParser
                         dest='bin',
                         action='store',
                         help='gecko executable to launch before running the test')
         self.add_option('--profile',
                         dest='profile',
                         action='store',
                         help='profile to use when launching the gecko process. if not passed, then a profile will be '
                              'constructed and used')
-        self.add_option('--perf',
-                        dest='perf',
-                        action='store_true',
-                        default=False,
-                        help='send performance data to perf data server')
-        self.add_option('--perf-server',
-                        dest='perfserv',
-                        action='store',
-                        help='dataserver for perf data submission. entering this value will overwrite the perfserv '
-                             'value in any passed manifest files')
         self.add_option('--repeat',
                         dest='repeat',
                         action='store',
                         type=int,
                         default=0,
                         help='number of times to repeat the test(s)')
         self.add_option('-x', '--xml-output',
                         action='store',
@@ -707,19 +647,16 @@ class MarionetteTestOptions(OptionParser
         if not options.emulator and not options.address and not options.bin:
             print 'must specify --binary, --emulator or --address'
             sys.exit(1)
 
         # default to storing logcat output for emulator runs
         if options.emulator and not options.logcat_dir:
             options.logcat_dir = 'logcat'
 
-        if options.perf:
-            import datazilla
-
         # check for valid resolution string, strip whitespaces
         try:
             if options.emulator_res:
                 dims = options.emulator_res.split('x')
                 assert len(dims) == 2
                 width = str(int(dims[0]))
                 height = str(int(dims[1]))
                 options.emulator_res = 'x'.join([width, height])
deleted file mode 100644
--- a/testing/marionette/client/marionette/tests/unit/test_perf.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-import os
-from marionette_test import MarionetteTestCase
-
-class TestPerf(MarionetteTestCase):
-    def test_perf_basic(self):
-        self.marionette.add_perf_data("testgroup", "testperf", 10)
-        data = self.marionette.get_perf_data()
-        self.assertTrue(data.has_key("testgroup"))
-        self.assertTrue(data["testgroup"].has_key("testperf"))
-        self.assertEqual(10, data["testgroup"]["testperf"][0])
-        self.marionette.add_perf_data("testgroup", "testperf", 20)
-        data = self.marionette.get_perf_data()
-        self.assertTrue(data.has_key("testgroup"))
-        self.assertTrue(data["testgroup"].has_key("testperf"))
-        self.assertEqual(20, data["testgroup"]["testperf"][1])
-        self.marionette.add_perf_data("testgroup", "testperf2", 20)
-        data = self.marionette.get_perf_data()
-        self.assertTrue(data.has_key("testgroup"))
-        self.assertTrue(data["testgroup"].has_key("testperf2"))
-        self.assertEqual(20, data["testgroup"]["testperf2"][0])
-        self.marionette.add_perf_data("testgroup2", "testperf3", 30)
-        data = self.marionette.get_perf_data()
-        self.assertTrue(data.has_key("testgroup2"))
-        self.assertTrue(data["testgroup2"].has_key("testperf3"))
-        self.assertEqual(30, data["testgroup2"]["testperf3"][0])
-
-    def test_perf_script(self):
-        self.marionette.execute_script("addPerfData('testgroup', 'testperf', 10);")
-        data = self.marionette.get_perf_data()
-        self.assertTrue(data.has_key("testgroup"))
-        self.assertTrue(data["testgroup"].has_key("testperf"))
-        self.assertEqual(10, data["testgroup"]["testperf"][0])
-        self.marionette.execute_script("addPerfData('testgroup', 'testperf', 20);")
-        data = self.marionette.get_perf_data()
-        self.assertTrue(data.has_key("testgroup"))
-        self.assertTrue(data["testgroup"].has_key("testperf"))
-        self.assertEqual(20, data["testgroup"]["testperf"][1])
-        self.marionette.execute_script("addPerfData('testgroup', 'testperf2', 20);")
-        data = self.marionette.get_perf_data()
-        self.assertTrue(data.has_key("testgroup"))
-        self.assertTrue(data["testgroup"].has_key("testperf2"))
-        self.assertEqual(20, data["testgroup"]["testperf2"][0])
-        self.marionette.execute_script("addPerfData('testgroup2', 'testperf3', 30);")
-        data = self.marionette.get_perf_data()
-        self.assertTrue(data.has_key("testgroup2"))
-        self.assertTrue(data["testgroup2"].has_key("testperf3"))
-        self.assertEqual(30, data["testgroup2"]["testperf3"][0])
-
-class TestPerfChrome(TestPerf):
-    def setUp(self):
-        MarionetteTestCase.setUp(self)
-        self.marionette.set_context("chrome")
-
--- a/testing/marionette/client/marionette/tests/unit/unit-tests.ini
+++ b/testing/marionette/client/marionette/tests/unit/unit-tests.ini
@@ -24,17 +24,16 @@ b2g = false
 b2g = false
 [test_getattr.py]
 b2g = false
 [test_elementState.py]
 b2g = false
 [test_text.py]
 b2g = false
 
-[test_perf.py]
 [test_log.py]
 [test_emulator.py]
 browser = false
 qemu = true
 
 [test_execute_async_script.py]
 [test_execute_script.py]
 [test_simpletest_fail.js]
--- a/testing/marionette/jar.mn
+++ b/testing/marionette/jar.mn
@@ -5,17 +5,16 @@
 marionette.jar:
 % content marionette %content/
   content/marionette-actors.js      (marionette-actors.js)
   content/marionette-listener.js    (marionette-listener.js)
   content/marionette-elements.js    (marionette-elements.js)
   content/marionette-sendkeys.js    (marionette-sendkeys.js)
   content/marionette-log-obj.js     (marionette-log-obj.js)
   content/marionette-simpletest.js  (marionette-simpletest.js)
-  content/marionette-perf.js  (marionette-perf.js)
   content/EventUtils.js  (EventUtils.js)
   content/ChromeUtils.js  (ChromeUtils.js)
 #ifdef ENABLE_TESTS
   content/test.xul  (client/marionette/chrome/test.xul)
   content/test2.xul  (client/marionette/chrome/test2.xul)
   content/test_nested_iframe.xul  (client/marionette/chrome/test_nested_iframe.xul)
 #endif
 
--- a/testing/marionette/marionette-actors.js
+++ b/testing/marionette/marionette-actors.js
@@ -11,17 +11,16 @@
 const FRAME_SCRIPT = "chrome://marionette/content/marionette-listener.js";
 
 let {classes: Cc, interfaces: Ci, utils: Cu, results: Cr} = Components;
 
 let loader = Cc["@mozilla.org/moz/jssubscript-loader;1"]
                .getService(Ci.mozIJSSubScriptLoader);
 loader.loadSubScript("chrome://marionette/content/marionette-simpletest.js");
 loader.loadSubScript("chrome://marionette/content/marionette-log-obj.js");
-loader.loadSubScript("chrome://marionette/content/marionette-perf.js");
 Cu.import("chrome://marionette/content/marionette-elements.js");
 let utils = {};
 loader.loadSubScript("chrome://marionette/content/EventUtils.js", utils);
 loader.loadSubScript("chrome://marionette/content/ChromeUtils.js", utils);
 loader.loadSubScript("chrome://marionette/content/atoms.js", utils);
 
 let specialpowers = {};
 loader.loadSubScript("chrome://specialpowers/content/SpecialPowersObserver.js",
@@ -187,17 +186,16 @@ function MarionetteDriverActor(aConnecti
   this.messageManager = this.globalMessageManager;
   this.browsers = {}; //holds list of BrowserObjs
   this.curBrowser = null; // points to current browser
   this.context = "content";
   this.scriptTimeout = null;
   this.pageTimeout = null;
   this.timer = null;
   this.marionetteLog = new MarionetteLogObj();
-  this.marionettePerf = new MarionettePerfData();
   this.command_id = null;
   this.mainFrame = null; //topmost chrome frame
   this.curFrame = null; //subframe that currently has focus
   this.importedScripts = FileUtils.getFile('TmpD', ['marionettescriptchrome']);
   this.currentRemoteFrame = null; // a member of remoteFrames
   this.testName = null;
   this.mozBrowserClose = null;
 
@@ -647,33 +645,16 @@ MarionetteDriverActor.prototype = {
 
   /**
    * Return all logged messages.
    */
   getLogs: function MDA_getLogs() {
     this.command_id = this.getCommandId();
     this.sendResponse(this.marionetteLog.getLogs(), this.command_id);
   },
-  
-  /**
-   * Log some performance data
-   */
-  addPerfData: function MDA_addPerfData(aRequest) {
-    this.command_id = this.getCommandId();
-    this.marionettePerf.addPerfData(aRequest.suite, aRequest.name, aRequest.value);
-    this.sendOk(this.command_id);
-  },
-
-  /**
-   * Retrieve the performance data
-   */
-  getPerfData: function MDA_getPerfData() {
-    this.command_id = this.getCommandId();
-    this.sendResponse(this.marionettePerf.getPerfData(), this.command_id);
-  },
 
   /**
    * Sets the context of the subsequent commands to be either 'chrome' or 'content'
    *
    * @param object aRequest
    *        'value' member holds the name of the context to be switched to
    */
   setContext: function MDA_setContext(aRequest) {
@@ -815,17 +796,17 @@ MarionetteDriverActor.prototype = {
                        specialPowers: aRequest.specialPowers
                      },
                      command_id);
       return;
     }
 
     let curWindow = this.getCurrentWindow();
     let marionette = new Marionette(this, curWindow, "chrome",
-                                    this.marionetteLog, this.marionettePerf,
+                                    this.marionetteLog,
                                     timeout, this.testName);
     let _chromeSandbox = this.createExecuteSandbox(curWindow,
                                                    marionette,
                                                    aRequest.args,
                                                    aRequest.specialPowers,
                                                    command_id);
     if (!_chromeSandbox)
       return;
@@ -949,17 +930,17 @@ MarionetteDriverActor.prototype = {
       return;
     }
 
     let curWindow = this.getCurrentWindow();
     let original_onerror = curWindow.onerror;
     let that = this;
     that.timeout = timeout;
     let marionette = new Marionette(this, curWindow, "chrome",
-                                    this.marionetteLog, this.marionettePerf,
+                                    this.marionetteLog,
                                     timeout, this.testName);
     marionette.command_id = this.command_id;
 
     function chromeAsyncReturnFunc(value, status) {
       if (that._emu_cbs && Object.keys(that._emu_cbs).length) {
         value = "Emulator callback still pending when finish() called";
         status = 500;
         that._emu_cbs = null;
@@ -2199,19 +2180,16 @@ MarionetteDriverActor.prototype = {
         //log server-side messages
         logger.info(message.json.message);
         break;
       case "Marionette:shareData":
         //log messages from tests
         if (message.json.log) {
           this.marionetteLog.addLogs(message.json.log);
         }
-        if (message.json.perf) {
-          this.marionettePerf.appendPerfData(message.json.perf);
-        }
         break;
       case "Marionette:runEmulatorCmd":
         this.sendToClient(message.json, -1);
         break;
       case "Marionette:switchToFrame":
         // Switch to a remote frame.
         let thisWin = this.getCurrentWindow();
         let frameWindow = thisWin.QueryInterface(Ci.nsIInterfaceRequestor)
@@ -2304,18 +2282,16 @@ MarionetteDriverActor.prototype = {
 };
 
 MarionetteDriverActor.prototype.requestTypes = {
   "newSession": MarionetteDriverActor.prototype.newSession,
   "getSessionCapabilities": MarionetteDriverActor.prototype.getSessionCapabilities,
   "getStatus": MarionetteDriverActor.prototype.getStatus,
   "log": MarionetteDriverActor.prototype.log,
   "getLogs": MarionetteDriverActor.prototype.getLogs,
-  "addPerfData": MarionetteDriverActor.prototype.addPerfData,
-  "getPerfData": MarionetteDriverActor.prototype.getPerfData,
   "setContext": MarionetteDriverActor.prototype.setContext,
   "executeScript": MarionetteDriverActor.prototype.execute,
   "setScriptTimeout": MarionetteDriverActor.prototype.setScriptTimeout,
   "timeouts": MarionetteDriverActor.prototype.timeouts,
   "singleTap": MarionetteDriverActor.prototype.singleTap,
   "doubleTap": MarionetteDriverActor.prototype.doubleTap,
   "press": MarionetteDriverActor.prototype.press,
   "release": MarionetteDriverActor.prototype.release,
--- a/testing/marionette/marionette-listener.js
+++ b/testing/marionette/marionette-listener.js
@@ -8,34 +8,32 @@ let {classes: Cc, interfaces: Ci, utils:
 let uuidGen = Cc["@mozilla.org/uuid-generator;1"]
                 .getService(Ci.nsIUUIDGenerator);
 
 let loader = Cc["@mozilla.org/moz/jssubscript-loader;1"]
                .getService(Ci.mozIJSSubScriptLoader);
 
 loader.loadSubScript("chrome://marionette/content/marionette-simpletest.js");
 loader.loadSubScript("chrome://marionette/content/marionette-log-obj.js");
-loader.loadSubScript("chrome://marionette/content/marionette-perf.js");
 Cu.import("chrome://marionette/content/marionette-elements.js");
 Cu.import("resource://gre/modules/FileUtils.jsm");
 Cu.import("resource://gre/modules/NetUtil.jsm");
 Cu.import("resource://gre/modules/XPCOMUtils.jsm");
 let utils = {};
 utils.window = content;
 // Load Event/ChromeUtils for use with JS scripts:
 loader.loadSubScript("chrome://marionette/content/EventUtils.js", utils);
 loader.loadSubScript("chrome://marionette/content/ChromeUtils.js", utils);
 loader.loadSubScript("chrome://marionette/content/atoms.js", utils);
 loader.loadSubScript("chrome://marionette/content/marionette-sendkeys.js", utils);
 
 loader.loadSubScript("chrome://specialpowers/content/specialpowersAPI.js");
 loader.loadSubScript("chrome://specialpowers/content/specialpowers.js");
 
 let marionetteLogObj = new MarionetteLogObj();
-let marionettePerf = new MarionettePerfData();
 
 let isB2G = false;
 
 let marionetteTestName;
 let winUtil = content.QueryInterface(Ci.nsIInterfaceRequestor)
                      .getInterface(Ci.nsIDOMWindowUtils);
 let listenerId = null; //unique ID of this listener
 let activeFrame = null;
@@ -318,17 +316,17 @@ function createExecuteContentSandbox(aWi
   sandbox.global = sandbox;
   sandbox.window = aWindow;
   sandbox.document = sandbox.window.document;
   sandbox.navigator = sandbox.window.navigator;
   sandbox.testUtils = utils;
   sandbox.asyncTestCommandId = asyncTestCommandId;
 
   let marionette = new Marionette(this, aWindow, "content",
-                                  marionetteLogObj, marionettePerf,
+                                  marionetteLogObj,
                                   timeout, marionetteTestName);
   sandbox.marionette = marionette;
   marionette.exports.forEach(function(fn) {
     try {
       sandbox[fn] = marionette[fn].bind(marionette);
     }
     catch(e) {
       sandbox[fn] = marionette[fn];
@@ -339,20 +337,19 @@ function createExecuteContentSandbox(aWi
     return new SpecialPowers(aWindow);
   });
 
   sandbox.asyncComplete = function sandbox_asyncComplete(value, status, stack, commandId) {
     if (commandId == asyncTestCommandId) {
       curWindow.removeEventListener("unload", onunload, false);
       curWindow.clearTimeout(asyncTestTimeoutId);
 
-      sendSyncMessage("Marionette:shareData", {log: elementManager.wrapValue(marionetteLogObj.getLogs()),
-                                               perf: elementManager.wrapValue(marionettePerf.getPerfData())});
+      sendSyncMessage("Marionette:shareData",
+                      {log: elementManager.wrapValue(marionetteLogObj.getLogs())});
       marionetteLogObj.clearLogs();
-      marionettePerf.clearPerfData();
 
       if (status == 0){
         if (Object.keys(_emu_cbs).length) {
           _emu_cbs = {};
           sendError("Emulator callback still pending when finish() called",
                     500, null, commandId);
         }
         else {
@@ -408,20 +405,20 @@ function executeScript(msg, directInject
       if (importedScripts.exists()) {
         let stream = Components.classes["@mozilla.org/network/file-input-stream;1"].  
                       createInstance(Components.interfaces.nsIFileInputStream);
         stream.init(importedScripts, -1, 0, 0);
         let data = NetUtil.readInputStreamToString(stream, stream.available());
         script = data + script;
       }
       let res = Cu.evalInSandbox(script, sandbox, "1.8");
-      sendSyncMessage("Marionette:shareData", {log: elementManager.wrapValue(marionetteLogObj.getLogs()),
-                                               perf: elementManager.wrapValue(marionettePerf.getPerfData())});
+      sendSyncMessage("Marionette:shareData",
+                      {log: elementManager.wrapValue(marionetteLogObj.getLogs())});
       marionetteLogObj.clearLogs();
-      marionettePerf.clearPerfData();
+
       if (res == undefined || res.passed == undefined) {
         sendError("Marionette.finish() not called", 17, null, asyncTestCommandId);
       }
       else {
         sendResponse({value: elementManager.wrapValue(res)}, asyncTestCommandId);
       }
     }
     else {
@@ -439,20 +436,19 @@ function executeScript(msg, directInject
       if (importedScripts.exists()) {
         let stream = Components.classes["@mozilla.org/network/file-input-stream;1"].  
                       createInstance(Components.interfaces.nsIFileInputStream);
         stream.init(importedScripts, -1, 0, 0);
         let data = NetUtil.readInputStreamToString(stream, stream.available());
         scriptSrc = data + scriptSrc;
       }
       let res = Cu.evalInSandbox(scriptSrc, sandbox, "1.8");
-      sendSyncMessage("Marionette:shareData", {log: elementManager.wrapValue(marionetteLogObj.getLogs()),
-                                               perf: elementManager.wrapValue(marionettePerf.getPerfData())});
+      sendSyncMessage("Marionette:shareData",
+                      {log: elementManager.wrapValue(marionetteLogObj.getLogs())});
       marionetteLogObj.clearLogs();
-      marionettePerf.clearPerfData();
       sendResponse({value: elementManager.wrapValue(res)}, asyncTestCommandId);
     }
   }
   catch (e) {
     // 17 = JavascriptException
     sendError(e.name + ': ' + e.message, 17, e.stack, asyncTestCommandId);
   }
 }
deleted file mode 100644
--- a/testing/marionette/marionette-perf.js
+++ /dev/null
@@ -1,83 +0,0 @@
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this file,
- * You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-function MarionettePerfData() {
-  this.perfData = {};
-}
-MarionettePerfData.prototype = {
-  /**
-   * Add performance data. 
-   *
-   * Datapoints within a testSuite get rolled up into
-   * one value in Datazilla. You can then drill down to
-   * individual (testName,data) pairs
-   * 
-   * If the testSuite and testName exist, the data will
-   * be added to this dataset.
-   *
-   * @param testSuite String
-   *        name of the test suite
-   * @param testName String
-   *        name of the test
-   * @param object data
-   *        data value to store
-   */
-  addPerfData: function Marionette__addPerfData(testSuite, testName, data) {
-    if (this.perfData[testSuite]) {
-      if (this.perfData[testSuite][testName]) {
-        this.perfData[testSuite][testName].push(data);
-      }
-      else {
-        this.perfData[testSuite][testName.toString()] = [data];
-      }
-    }
-    else {
-      this.perfData[testSuite] = {}
-      this.perfData[testSuite][testName.toString()] = [data];
-    }
-  },
-
-  /**
-   * Join another set of performance data this this set.
-   * Used by the actor to join data gathered from the listener
-   * @param object data
-   *        The performance data to join
-   */
-  appendPerfData: function Marionette__appendPerfData(data) {
-    for (var suite in data) {
-      if (data.hasOwnProperty(suite)) {
-        if (this.perfData[suite]) {
-          for (var test in data[suite]) {
-            if (this.perfData[suite][test]) {
-              this.perfData[suite][test] = this.perfData[suite][test].concat(data[suite][test]);
-            }
-            else {
-              this.perfData[suite][test] = data[suite][test];
-            }
-          }
-        }
-        else {
-          this.perfData[suite] = data[suite];
-        }
-      }
-    }
-  },
-
-  /**
-   * Retrieve the performance data
-   *        
-   * @return object
-   *      Returns a list of test names to metric value
-   */
-  getPerfData: function Marionette__getPerfData() {
-    return this.perfData;
-  },
-
-  /**
-   * Clears the current performance data
-   */
-  clearPerfData: function Marionette_clearPerfData() {
-    this.perfData = {};
-  },
-}
--- a/testing/marionette/marionette-simpletest.js
+++ b/testing/marionette/marionette-simpletest.js
@@ -1,33 +1,32 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 /*
  * The Marionette object, passed to the script context.
  */
 
-function Marionette(scope, window, context, logObj, perfData, timeout, testName) {
+function Marionette(scope, window, context, logObj, timeout, testName) {
   this.scope = scope;
   this.window = window;
   this.tests = [];
   this.logObj = logObj;
-  this.perfData = perfData;
   this.context = context;
   this.timeout = timeout;
   this.testName = testName;
   this.TEST_UNEXPECTED_FAIL = "TEST-UNEXPECTED-FAIL";
   this.TEST_PASS = "TEST-PASS";
   this.TEST_KNOWN_FAIL = "TEST-KNOWN-FAIL";
 }
 
 Marionette.prototype = {
   exports: ['ok', 'is', 'isnot', 'log', 'getLogs', 'generate_results', 'waitFor',
-            'runEmulatorCmd', 'addPerfData', 'getPerfData', 'TEST_PASS',
-            'TEST_KNOWN_FAIL', 'TEST_UNEXPECTED_FAIL'],
+            'runEmulatorCmd', 'TEST_PASS', 'TEST_KNOWN_FAIL',
+            'TEST_UNEXPECTED_FAIL'],
 
   ok: function Marionette__ok(condition, name, passString, failString, diag) {
     if (typeof(diag) == "undefined") {
       diag = this.repr(condition) + " was " + !!condition + ", expected true";
     }
     let test = {'result': !!condition, 'name': name, 'diag': diag};
     this.logResult(test,
                    typeof(passString) == "undefined" ? this.TEST_PASS : passString,
@@ -44,24 +43,16 @@ Marionette.prototype = {
 
   isnot: function Marionette__isnot (a, b, name, passString, failString) {
     let pass = (a != b);
     let diag = pass ? this.repr(a) + " should not equal " + this.repr(b)
                     : "didn't expect " + this.repr(a) + ", but got it";
     this.ok(pass, name, passString, failString, diag);
   },
 
-  addPerfData: function Marionette__addPerfData(testSuite, testName, data) {
-    this.perfData.addPerfData(testSuite, testName, data);
-  },
-
-  getPerfData: function Marionette__getPerfData() {
-    return this.perfData.perfData;
-  },
-
   log: function Marionette__log(msg, level) {
     dump("MARIONETTE LOG: " + (level ? level : "INFO") + ": " + msg + "\n");
     if (this.logObj != null) {
       this.logObj.log(msg, level);
     }
   },
 
   getLogs: function Marionette__getLogs() {