Bug 1546741. Land initial idle-power-usage-measurement test for Android. r=rwood
authorStephen Donner <stephen.donner@gmail.com>
Wed, 08 May 2019 19:21:12 +0000
changeset 531938 751aaa1009d37b550c2396081bb4a9d5de8c0238
parent 531937 efc907d677c24b15d2f652e41efa6bca3239fbcc
child 531939 d8c72aea4006e2797425d8c0075025f2ee651410
push id11265
push userffxbld-merge
push dateMon, 13 May 2019 10:53:39 +0000
treeherdermozilla-beta@77e0fe8dbdd3 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersrwood
bugs1546741
milestone68.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1546741. Land initial idle-power-usage-measurement test for Android. r=rwood Differential Revision: https://phabricator.services.mozilla.com/D29654
testing/raptor/raptor/manifest.py
testing/raptor/raptor/output.py
testing/raptor/raptor/raptor.ini
testing/raptor/raptor/tests/raptor-scn-power-idle.ini
testing/raptor/webext/raptor/runner.js
--- a/testing/raptor/raptor/manifest.py
+++ b/testing/raptor/raptor/manifest.py
@@ -17,16 +17,17 @@ LOG = get_proxy_logger(component="raptor
 
 required_settings = [
     'alert_threshold',
     'apps',
     'lower_is_better',
     'measure',
     'page_cycles',
     'test_url',
+    'scenario_time',
     'type',
     'unit',
 ]
 
 playback_settings = [
     'playback_pageset_manifest',
     'playback_recordings',
 ]
@@ -67,16 +68,18 @@ def get_browser_test_list(browser_app, r
 def validate_test_ini(test_details):
     # validate all required test details were found in the test INI
     valid_settings = True
 
     for setting in required_settings:
         # measure setting not required for benchmark type tests
         if setting == 'measure' and test_details['type'] == 'benchmark':
             continue
+        if setting == 'scenario_time' and test_details['type'] != 'scenario':
+            continue
         if test_details.get(setting) is None:
             # if page-cycles is not specified, it's ok as long as browser-cycles is there
             if setting == "page-cycles" and test_details.get('browser_cycles') is not None:
                 continue
             valid_settings = False
             LOG.error("ERROR: setting '%s' is required but not found in %s"
                       % (setting, test_details['manifest']))
 
@@ -185,16 +188,19 @@ def write_test_settings_json(args, test_
             'gecko_profile_interval': int(test_details.get('gecko_profile_interval')),
             'gecko_profile_threads': ','.join(set(threads)),
         })
 
     if test_details.get("newtab_per_cycle", None) is not None:
         test_settings['raptor-options']['newtab_per_cycle'] = \
             bool(test_details['newtab_per_cycle'])
 
+    if test_details['type'] == "scenario":
+        test_settings['raptor-options']['scenario_time'] = test_details['scenario_time']
+
     settings_file = os.path.join(tests_dir, test_details['name'] + '.json')
     try:
         with open(settings_file, 'w') as out_file:
             json.dump(test_settings, out_file, indent=4, ensure_ascii=False)
             out_file.close()
     except IOError:
         LOG.info("abort: exception writing test settings json!")
 
--- a/testing/raptor/raptor/output.py
+++ b/testing/raptor/raptor/output.py
@@ -65,17 +65,17 @@ class Output(object):
             if test.cold is True:
                 suite['cold'] = True
                 suite['browser_cycle'] = int(test.browser_cycle)
                 suite['expected_browser_cycles'] = int(test.expected_browser_cycles)
 
             suites.append(suite)
 
             # process results for pageloader type of tests
-            if test.type == "pageload":
+            if test.type in ("pageload", "scenario"):
                 # each test can report multiple measurements per pageload
                 # each measurement becomes a subtest inside the 'suite'
 
                 # this is the format we receive the results in from the pageload test
                 # i.e. one test (subtest) in raptor-firefox-tp6:
 
                 # {u'name': u'raptor-firefox-tp6-amazon', u'type': u'pageload', u'measurements':
                 # {u'fnbpaint': [788, 315, 334, 286, 318, 276, 296, 296, 292, 285, 268, 277, 274,
--- a/testing/raptor/raptor/raptor.ini
+++ b/testing/raptor/raptor/raptor.ini
@@ -59,8 +59,11 @@
 [include:tests/raptor-wasm-godot-baseline.ini]
 [include:tests/raptor-wasm-godot-ion.ini]
 [include:tests/raptor-wasm-godot-cranelift.ini]
 [include:tests/raptor-wasm-misc.ini]
 [include:tests/raptor-wasm-misc-baseline.ini]
 [include:tests/raptor-wasm-misc-ion.ini]
 [include:tests/raptor-wasm-misc-cranelift.ini]
 [include:tests/raptor-webaudio.ini]
+
+# raptor scenario tests
+[include:tests/raptor-scn-power-idle.ini]
new file mode 100644
--- /dev/null
+++ b/testing/raptor/raptor/tests/raptor-scn-power-idle.ini
@@ -0,0 +1,16 @@
+[DEFAULT]
+type = scenario
+test_url = about:blank
+scenario_time = 1200000
+measure = fakeMeasure
+unit = scenarioComplete
+page_cycles = 1
+page_timeout = 1320000
+lower_is_better = true
+alert_threshold = 2.0
+
+[raptor-scn-power-idle-fennec]
+apps = fennec
+
+[raptor-scn-power-idle-geckoview]
+apps = geckoview
--- a/testing/raptor/webext/raptor/runner.js
+++ b/testing/raptor/webext/raptor/runner.js
@@ -13,16 +13,17 @@
 // repo) or 'webkit/PerformanceTests' dir (for benchmarks) first run:
 // 'python -m SimpleHTTPServer 8081'
 // to serve out the pages that we want to prototype with. Also
 // update the manifest content 'matches' accordingly
 
 // Supported test types
 const TEST_BENCHMARK = "benchmark";
 const TEST_PAGE_LOAD = "pageload";
+const TEST_SCENARIO = "scenario";
 
 // when the browser starts this webext runner will start automatically; we
 // want to give the browser some time (ms) to settle before starting tests
 var postStartupDelay;
 
 // delay (ms) between pageload cycles
 var pageCycleDelay = 1000;
 
@@ -37,30 +38,32 @@ var csPort = null;
 var host = null;
 var benchmarkPort = null;
 var testType;
 var browserCycle = 0;
 var pageCycles = 0;
 var pageCycle = 0;
 var testURL;
 var testTabID = 0;
+var scenarioTestTime = 60000;
 var getHero = false;
 var getFNBPaint = false;
 var getFCP = false;
 var getDCF = false;
 var getTTFI = false;
 var getLoadTime = false;
 var isHeroPending = false;
 var pendingHeroes = [];
 var settings = {};
 var isFNBPaintPending = false;
 var isFCPPending = false;
 var isDCFPending = false;
 var isTTFIPending = false;
 var isLoadTimePending = false;
+var isScenarioPending = false;
 var isBenchmarkPending = false;
 var pageTimeout = 10000; // default pageload timeout
 var geckoProfiling = false;
 var geckoInterval = 1;
 var geckoEntries = 1000000;
 var geckoThreads = [];
 var debugMode = 0;
 var screenCapture = false;
@@ -82,16 +85,17 @@ function getTestSettings() {
       response.text().then(function(text) {
         console.log(text);
         settings = JSON.parse(text)["raptor-options"];
 
         // parse the test settings
         testType = settings.type;
         pageCycles = settings.page_cycles;
         testURL = settings.test_url;
+        scenarioTestTime = settings.scenario_time;
 
         // for pageload type tests, the testURL is fine as is - we don't have
         // to add a port as it's accessed via proxy and the playback tool
         // however for benchmark tests, their source is served out on a local
         // webserver, so we need to swap in the webserver port into the testURL
         if (testType == TEST_BENCHMARK) {
           // just replace the '<port>' keyword in the URL with actual benchmarkPort
           testURL = testURL.replace("<port>", benchmarkPort);
@@ -209,16 +213,24 @@ function getBrowserInfo() {
         }
       }
       console.log(`testing on ${results.browser}`);
       resolve();
     }
   });
 }
 
+function scenarioTimer() {
+  postToControlServer("status", `started scenario test timer`);
+    setTimeout(function() {
+      isScenarioPending = false;
+      results.measurements.scenario = [1];
+  }, scenarioTestTime);
+}
+
 function testTabCreated(tab) {
   testTabID = tab.id;
   postToControlServer("status", `opened new empty tab: ${testTabID}`);
   // update raptor browser toolbar icon text, for a visual indicator when debugging
   ext.browserAction.setTitle({title: "Raptor RUNNING"});
 }
 
 function testTabRemoved(tab) {
@@ -254,16 +266,26 @@ async function waitForResult() {
               !isDCFPending &&
               !isTTFIPending &&
               !isLoadTimePending) {
             resolve();
           } else {
             setTimeout(checkForResult, 250);
           }
           break;
+
+        case TEST_SCENARIO:
+          if (!isScenarioPending) {
+            cancelTimeoutAlarm("raptor-page-timeout");
+            postToControlServer("status", `scenario test ended`);
+            resolve();
+          } else {
+            setTimeout(checkForResult, 5);
+          }
+          break;
       }
     }
 
     checkForResult();
   });
 
   cancelTimeoutAlarm("raptor-page-timeout");
 
@@ -363,31 +385,38 @@ async function nextCycle() {
             isFCPPending = true;
           if (getDCF)
             isDCFPending = true;
           if (getTTFI)
             isTTFIPending = true;
           if (getLoadTime)
             isLoadTimePending = true;
           break;
+
+        case TEST_SCENARIO:
+          isScenarioPending = true;
+          break;
       }
 
       if (reuseTab && testTabID != 0) {
         // close previous test tab
         ext.tabs.remove(testTabID);
         postToControlServer("status", `closing Tab: ${testTabID}`);
 
         // open new tab
         ext.tabs.create({url: "about:blank"});
         postToControlServer("status", "Open new tab");
       }
       setTimeout(function() {
         postToControlServer("status", `update tab: ${testTabID}`);
         // update the test page - browse to our test URL
         ext.tabs.update(testTabID, {url: testURL}, testTabUpdated);
+        if (testType == TEST_SCENARIO) {
+          scenarioTimer();
+        }
         }, newTabDelay);
       }, pageCycleDelay);
     } else {
       verifyResults();
     }
 }
 
 async function timeoutAlarmListener() {