Bug 1454053 - re-record tp6 pages for raptor; r=igoldan
authorRob Wood <rwood@mozilla.com>
Tue, 24 Jul 2018 09:33:10 -0400
changeset 428162 70d57bb33a7fe978d5f1ff120fb066059fe3c341
parent 428161 5cc1be9e8d1428489aaa2ff4d101d4999a1772e9
child 428163 da4ff0f7bff51632758a9081d4ad3b6fb1be0823
push id105635
push usernbeleuzu@mozilla.com
push dateTue, 24 Jul 2018 22:23:10 +0000
treeherdermozilla-inbound@4ae19227350c [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersigoldan
bugs1454053
milestone63.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1454053 - re-record tp6 pages for raptor; r=igoldan MozReview-Commit-ID: Esy7cWeADdy
testing/mozharness/mozharness/mozilla/testing/raptor.py
testing/raptor/raptor/manifest.py
testing/raptor/raptor/output.py
testing/raptor/raptor/playback/mitmproxy-playback-set.manifest
testing/raptor/raptor/playback/mitmproxy-recordings-raptor-tp6.manifest
testing/raptor/raptor/playback/mitmproxy.py
testing/raptor/raptor/tests/raptor-tp6.ini
testing/raptor/test/test_playback.py
testing/raptor/webext/raptor/manifest.json
testing/raptor/webext/raptor/measure.js
--- a/testing/mozharness/mozharness/mozilla/testing/raptor.py
+++ b/testing/mozharness/mozharness/mozilla/testing/raptor.py
@@ -367,30 +367,30 @@ class Raptor(TestingMixin, MercurialScri
         self.info("Validating PERFHERDER_DATA against %s" % schema_path)
         try:
             with open(schema_path) as f:
                 schema = json.load(f)
             data = json.loads(parser.found_perf_data[0])
             jsonschema.validate(data, schema)
         except Exception as e:
             self.exception("Error while validating PERFHERDER_DATA")
-            self.info(e)
+            self.info(str(e))
 
     def _artifact_perf_data(self, dest):
         src = os.path.join(self.query_abs_dirs()['abs_work_dir'], 'raptor.json')
         if not os.path.isdir(os.path.dirname(dest)):
             # create upload dir if it doesn't already exist
             self.info("creating dir: %s" % os.path.dirname(dest))
             os.makedirs(os.path.dirname(dest))
         self.info('copying raptor results from %s to %s' % (src, dest))
         try:
             copyfile(src, dest)
         except Exception as e:
             self.critical("Error copying results %s to upload dir %s" % (src, dest))
-            self.info(e)
+            self.info(str(e))
 
     def run_tests(self, args=None, **kw):
         """run raptor tests"""
 
         # get raptor options
         options = self.raptor_options(args=args, **kw)
 
         # python version check
--- a/testing/raptor/raptor/manifest.py
+++ b/testing/raptor/raptor/manifest.py
@@ -13,18 +13,17 @@ from utils import transform_platform
 here = os.path.abspath(os.path.dirname(__file__))
 raptor_ini = os.path.join(here, 'raptor.ini')
 tests_dir = os.path.join(here, 'tests')
 LOG = get_proxy_logger(component="raptor-manifest")
 
 required_settings = ['apps', 'type', 'page_cycles', 'test_url', 'measure',
                      'unit', 'lower_is_better', 'alert_threshold']
 
-playback_settings = ['playback_binary_manifest', 'playback_binary_zip_mac',
-                     'playback_pageset_manifest', 'playback_pageset_zip_mac',
+playback_settings = ['playback_binary_manifest', 'playback_pageset_manifest',
                      'playback_recordings', 'python3_win_manifest']
 
 
 def filter_app(tests, values):
     for test in tests:
         if values["app"] in test['apps']:
             yield test
 
--- a/testing/raptor/raptor/output.py
+++ b/testing/raptor/raptor/output.py
@@ -89,24 +89,25 @@ class Output(object):
             elif test.type == "benchmark":
                 if 'speedometer' in test.measurements:
                     subtests, vals = self.parseSpeedometerOutput(test)
                 elif 'motionmark' in test.measurements:
                     subtests, vals = self.parseMotionmarkOutput(test)
                 elif 'webaudio' in test.measurements:
                     subtests, vals = self.parseWebaudioOutput(test)
                 suite['subtests'] = subtests
+
+                # if there is more than one subtest, calculate a summary result
+                if len(subtests) > 1:
+                    suite['value'] = self.construct_summary(vals, testname=test.name)
+
             else:
                 LOG.error("output.summarize received unsupported test results type")
                 return
 
-        # if there is more than one subtest, calculate a summary result
-        if len(subtests) > 1:
-            suite['value'] = self.construct_results(vals, testname=test.name)
-
         self.summarized_results = test_results
 
     def parseSpeedometerOutput(self, test):
         # each benchmark 'index' becomes a subtest; each pagecycle / iteration
         # of the test has multiple values per index/subtest
 
         # this is the format we receive the results in from the benchmark
         # i.e. this is ONE pagecycle of speedometer:
@@ -336,17 +337,17 @@ class Output(object):
         # the 4 test values, not the sub test values.
         if len(results) != 52:
             raise Exception("StyleBench has 52 subtests, found: %s instead" % len(results))
 
         results = results[12::13]
         score = 60 * 1000 / filter.geometric_mean(results) / correctionFactor
         return score
 
-    def construct_results(self, vals, testname):
+    def construct_summary(self, vals, testname):
         if testname.startswith('raptor-v8_7'):
             return self.v8_Metric(vals)
         elif testname.startswith('raptor-kraken'):
             return self.JS_Metric(vals)
         elif testname.startswith('raptor-jetstream'):
             return self.benchmark_score(vals)
         elif testname.startswith('raptor-speedometer'):
             return self.speedometer_score(vals)
rename from testing/raptor/raptor/playback/mitmproxy-playback-set.manifest
rename to testing/raptor/raptor/playback/mitmproxy-recordings-raptor-tp6.manifest
--- a/testing/raptor/raptor/playback/mitmproxy-playback-set.manifest
+++ b/testing/raptor/raptor/playback/mitmproxy-recordings-raptor-tp6.manifest
@@ -1,9 +1,9 @@
 [
     {
-        "filename": "mitmproxy-recording-set-win10.zip",
-        "size": 9189938,
-        "digest": "e904917ed6bf1cef7201284385dc603a283e8e22f992876f17edcf0f1f20db95b609f0d8c7f593b4a0a6c20957dcb6a4d502c562ed74fb6cf4bc255c2f691f32",
+        "filename": "mitmproxy-recordings-raptor-tp6.zip",
+        "size": 8767174,
+        "digest": "3343f54b6c727a2061534872888da4e3aa647e81903c675dc318c717ed0c93f6ce4e2b98c66eb4128376cf8507590531283c95f3951259607edaaae28944d9a5",
         "algorithm": "sha512",
         "unpack": true
     }
 ]
\ No newline at end of file
--- a/testing/raptor/raptor/playback/mitmproxy.py
+++ b/testing/raptor/raptor/playback/mitmproxy.py
@@ -144,17 +144,17 @@ class Mitmproxy(Playback, Python3Virtual
             LOG.info("python3 path is: %s" % self.py3_path)
         else:
             # on osx and linux we use pre-built binaries
             LOG.info("downloading mitmproxy binary")
             _manifest = os.path.join(here, self.config['playback_binary_manifest'])
             transformed_manifest = transform_platform(_manifest, self.config['platform'])
             self._tooltool_fetch(transformed_manifest)
 
-        # we use one pageset for all platforms (pageset was recorded on win10)
+        # we use one pageset for all platforms
         LOG.info("downloading mitmproxy pageset")
         _manifest = os.path.join(here, self.config['playback_pageset_manifest'])
         transformed_manifest = transform_platform(_manifest, self.config['platform'])
         self._tooltool_fetch(transformed_manifest)
         return
 
     def fetch_python3(self):
         """Mitmproxy on windows needs Python 3.x"""
--- a/testing/raptor/raptor/tests/raptor-tp6.ini
+++ b/testing/raptor/raptor/tests/raptor-tp6.ini
@@ -3,66 +3,65 @@
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 # raptor tp6
 
 [DEFAULT]
 type =  pageload
 playback = mitmproxy
 playback_binary_manifest = mitmproxy-rel-bin-{platform}.manifest
-playback_binary_zip_mac = mitmproxy-2.0.2-{platform}.tar.gz
 python3_win_manifest = python3{x64}.manifest
-playback_pageset_manifest = mitmproxy-playback-set.manifest
-playback_pageset_zip_mac = mitmproxy-recording-set-win10.zip
+playback_pageset_manifest = mitmproxy-recordings-raptor-tp6.manifest
 page_cycles = 25
 unit = ms
 lower_is_better = true
 alert_threshold = 2.0
 
 [raptor-firefox-tp6-amazon]
 apps = firefox
 test_url = https://www.amazon.com/s/url=search-alias%3Daps&field-keywords=laptop
-playback_recordings = mitmproxy-recording-amazon.mp
+playback_recordings = amazon.mp
 measure = fnbpaint
 
 [raptor-firefox-tp6-facebook]
 apps = firefox
 test_url = https://www.facebook.com
-playback_recordings = mitmproxy-recording-facebook.mp
+playback_recordings = facebook.mp
 measure = fnbpaint
 
 [raptor-firefox-tp6-google]
 apps = firefox
-test_url = https://www.google.com/#hl=en&q=barack+obama
-playback_recordings = mitmproxy-recording-google.mp
-measure = fnbpaint, hero
-hero = hero
+# note: use the full url as the first part (without '&cad=h') redirects
+# to the url + '&cad=h'; that redirection causes measure.js content
+# to be loaded into that page also; resulting in 2 fnbpaint values etc.
+test_url = https://www.google.com/search?hl=en&q=barack+obama&cad=h
+playback_recordings = google-search.mp
+measure = fnbpaint
 
 [raptor-firefox-tp6-youtube]
 apps = firefox
 test_url = https://www.youtube.com
-playback_recordings = mitmproxy-recording-youtube.mp
+playback_recordings = youtube.mp
 measure = fnbpaint
 
 [raptor-chrome-tp6-amazon]
 apps = chrome
 test_url = https://www.amazon.com/s/url=search-alias%3Daps&field-keywords=laptop
-playback_recordings = mitmproxy-recording-amazon.mp
+playback_recordings = amazon.mp
 measure = fcp
 
 [raptor-chrome-tp6-facebook]
 apps = chrome
 test_url = https://www.facebook.com
-playback_recordings = mitmproxy-recording-facebook.mp
+playback_recordings = facebook.mp
 measure = fcp
 
 [raptor-chrome-tp6-google]
 apps = chrome
 test_url = https://www.google.com/#hl=en&q=barack+obama
-playback_recordings = mitmproxy-recording-google.mp
-measure = fcp, hero
-hero = hero
+playback_recordings = google-search.mp
+measure = fcp
 
 [raptor-chrome-tp6-youtube]
 apps = chrome
 test_url = https://www.youtube.com
-playback_recordings = mitmproxy-recording-youtube.mp
+playback_recordings = youtube.mp
 measure = fcp
--- a/testing/raptor/test/test_playback.py
+++ b/testing/raptor/test/test_playback.py
@@ -18,19 +18,17 @@ def test_get_playback(get_binary):
     config['platform'] = mozinfo.os
     if 'win' in config['platform']:
         # this test is not yet supported on windows
         assert True
         return
     config['obj_path'] = os.path.dirname(get_binary('firefox'))
     config['playback_tool'] = 'mitmproxy'
     config['playback_binary_manifest'] = 'mitmproxy-rel-bin-osx.manifest'
-    config['playback_binary_zip_mac'] = 'mitmproxy-2.0.2-osx.tar.gz'
     config['playback_pageset_manifest'] = 'mitmproxy-playback-set.manifest'
-    config['playback_pageset_zip_mac'] = 'mitmproxy-recording-set-win10.zip'
     config['playback_recordings'] = 'mitmproxy-recording-amazon.mp'
     config['binary'] = get_binary('firefox')
     playback = get_playback(config)
     assert isinstance(playback, Mitmproxy)
     playback.stop()
 
 
 def test_get_unsupported_playback():
--- a/testing/raptor/webext/raptor/manifest.json
+++ b/testing/raptor/webext/raptor/manifest.json
@@ -11,17 +11,16 @@
   "background": {
     "scripts": ["auto_gen_test_config.js", "runner.js"]
   },
   "content_scripts": [
     {
       "matches": ["*://*.amazon.com/*",
                   "*://*.facebook.com/*",
                   "*://*.google.com/*",
-                  "*://*.google.ca/*",
                   "*://*.youtube.com/*"],
       "js": ["measure.js"]
     },
     {
       "matches": ["*://*/Speedometer/index.html*",
                   "*://*/StyleBench/*",
                   "*://*/webaudio/*",
                   "*://*/MotionMark/*"],
--- a/testing/raptor/webext/raptor/measure.js
+++ b/testing/raptor/webext/raptor/measure.js
@@ -88,17 +88,17 @@ function measureHero() {
         // mark the time now as when hero element received
         perfData.mark(heroFound);
         console.log("found hero:" + heroFound);
         // calculcate result: performance.timing.fetchStart - time when we got hero element
         perfData.measure(name = resultType,
                          startMark = startMeasure,
                          endMark = heroFound);
         var perfResult = perfData.getEntriesByName(resultType);
-        var _result = perfResult[0].duration;
+        var _result = Math.round(perfResult[0].duration);
         var resultType = "hero:" + heroFound;
         sendResult(resultType, _result);
         perfData.clearMarks();
         perfData.clearMeasures();
         obs.disconnect();
       });
     }
     // we want the element 100% visible on the viewport