Bug 1522604 - I changed the files from testing.raptor.raptor folder to use the new RaptorLogger + cleanup draft
authorpulselistener
Mon, 27 May 2019 13:47:38 +0000
changeset 2026405 c79c20d3cb8f790676e0809b275dfc41cb78e1a3
parent 2026404 06c8c03b3c73039a1862041341129f680cfd7939
child 2026406 85c747a337cafb6d31c0e0af5423dfde6689050f
push id368007
push userreviewbot
push dateMon, 27 May 2019 13:48:24 +0000
treeherdertry@85c747a337ca [default view] [failures only]
bugs1522604
milestone68.0a1
Bug 1522604 - I changed the files from testing.raptor.raptor folder to use the new RaptorLogger + cleanup Differential Revision: https://phabricator.services.mozilla.com/D32106 Differential Diff: PHID-DIFF-kpibtcu523v6o5oxjus6
testing/raptor/raptor/RaptorLogger.py
testing/raptor/raptor/benchmark.py
testing/raptor/raptor/control_server.py
testing/raptor/raptor/gecko_profile.py
testing/raptor/raptor/gen_test_config.py
testing/raptor/raptor/manifest.py
testing/raptor/raptor/output.py
testing/raptor/raptor/outputhandler.py
testing/raptor/raptor/raptor.py
testing/raptor/raptor/results.py
testing/raptor/raptor/utils.py
--- a/testing/raptor/raptor/RaptorLogger.py
+++ b/testing/raptor/raptor/RaptorLogger.py
@@ -1,17 +1,17 @@
 #!/usr/bin/env python
 
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import
 
-from mozlog import ProxyLogger
+from mozlog.proxy import ProxyLogger
 
 
 class RaptorLogger(ProxyLogger):
 
     def __init__(self, component=None):
         self.component = component
         self.logger = ProxyLogger(self.component)
 
--- a/testing/raptor/raptor/benchmark.py
+++ b/testing/raptor/raptor/benchmark.py
@@ -3,30 +3,30 @@
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 from __future__ import absolute_import
 
 import os
 import shutil
 import socket
 
-from mozlog import get_proxy_logger
+from RaptorLogger import RaptorLogger
 
 from wptserve import server, handlers
 
-LOG = get_proxy_logger(component="raptor-benchmark")
 here = os.path.abspath(os.path.dirname(__file__))
 
 
 class Benchmark(object):
     """utility class for running benchmarks in raptor"""
 
     def __init__(self, config, test):
         self.config = config
         self.test = test
+        self.log = RaptorLogger(component='raptor-benchmark')
 
         # bench_dir is where we will download all mitmproxy required files
         # when running locally it comes from obj_path via mozharness/mach
         if self.config.get("obj_path", None) is not None:
             self.bench_dir = self.config.get("obj_path")
         else:
             # in production it is ../tasks/task_N/build/tests/raptor/raptor/...
             # 'here' is that path, we can start with that
@@ -43,18 +43,18 @@ class Benchmark(object):
             # sure they get copied over.
             fetches_dir = os.environ.get('MOZ_FETCHES_DIR')
             if fetches_dir and os.path.isdir(fetches_dir):
                 for name in os.listdir(fetches_dir):
                     path = os.path.join(fetches_dir, name)
                     if os.path.isdir(path):
                         shutil.copytree(path, os.path.join(self.bench_dir, name))
 
-        LOG.info("bench_dir contains:")
-        LOG.info(os.listdir(self.bench_dir))
+        self.log.info("bench_dir contains:")
+        self.log.info(os.listdir(self.bench_dir))
 
         # now have the benchmark source ready, go ahead and serve it up!
         self.start_http_server()
 
     def start_http_server(self):
         self.write_server_headers()
 
         # pick a free port
@@ -65,27 +65,27 @@ class Benchmark(object):
         _webserver = '%s:%d' % (self.config['host'], self.port)
 
         self.httpd = self.setup_webserver(_webserver)
         self.httpd.start()
 
     def write_server_headers(self):
         # to add specific headers for serving files via wptserve, write out a headers dir file
         # see http://wptserve.readthedocs.io/en/latest/handlers.html#file-handlers
-        LOG.info("writing wptserve headers file")
+        self.log.info("writing wptserve headers file")
         headers_file = os.path.join(self.bench_dir, '__dir__.headers')
         file = open(headers_file, 'w')
         file.write("Access-Control-Allow-Origin: *")
         file.close()
-        LOG.info("wrote wpt headers file: %s" % headers_file)
+        self.log.info("wrote wpt headers file: %s" % headers_file)
 
     def setup_webserver(self, webserver):
-        LOG.info("starting webserver on %r" % webserver)
-        LOG.info("serving benchmarks from here: %s" % self.bench_dir)
+        self.log.info("starting webserver on %r" % webserver)
+        self.log.info("serving benchmarks from here: %s" % self.bench_dir)
         self.host, self.port = webserver.split(':')
 
         return server.WebTestHttpd(host=self.host, port=int(self.port),
                                    doc_root=self.bench_dir,
                                    routes=[("GET", "*", handlers.file_handler)])
 
     def stop_serve(self):
-        LOG.info("TODO: stop serving benchmark source")
+        self.log.info("TODO: stop serving benchmark source")
         pass
--- a/testing/raptor/raptor/control_server.py
+++ b/testing/raptor/raptor/control_server.py
@@ -9,19 +9,19 @@ from __future__ import absolute_import
 import BaseHTTPServer
 import datetime
 import json
 import os
 import socket
 import threading
 import time
 
-from mozlog import get_proxy_logger
+from RaptorLogger import RaptorLogger
 
-LOG = get_proxy_logger(component='raptor-control-server')
+LOG = RaptorLogger(component='raptor-control-server')
 
 here = os.path.abspath(os.path.dirname(__file__))
 
 
 def MakeCustomHandlerClass(results_handler, shutdown_browser, write_raw_gecko_profile):
 
     class MyHandler(BaseHTTPServer.BaseHTTPRequestHandler, object):
         """
--- a/testing/raptor/raptor/gecko_profile.py
+++ b/testing/raptor/raptor/gecko_profile.py
@@ -8,22 +8,22 @@ module to handle Gecko profilling.
 from __future__ import absolute_import
 
 import json
 import os
 import tempfile
 import zipfile
 
 import mozfile
-from mozlog import get_proxy_logger
+from RaptorLogger import RaptorLogger
 
 from profiler import symbolication, profiling
 
 here = os.path.dirname(os.path.realpath(__file__))
-LOG = get_proxy_logger()
+LOG = RaptorLogger(component='raptor-gecko-profile')
 
 
 class GeckoProfile(object):
     """
     Handle Gecko profilling.
 
     This allow to collect Gecko profiling data and to zip results in one file.
     """
--- a/testing/raptor/raptor/gen_test_config.py
+++ b/testing/raptor/raptor/gen_test_config.py
@@ -1,21 +1,21 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 from __future__ import absolute_import
 
 import os
 
-from mozlog import get_proxy_logger
+from RaptorLogger import RaptorLogger
 
 
 here = os.path.abspath(os.path.dirname(__file__))
 webext_dir = os.path.join(os.path.dirname(here), 'webext', 'raptor')
-LOG = get_proxy_logger(component="raptor-gen-test-config")
+LOG = RaptorLogger(component='raptor-gen-test-config')
 
 
 def gen_test_config(browser, test, cs_port, post_startup_delay,
                     host='127.0.0.1', b_port=0, debug_mode=0,
                     browser_cycle=1):
     LOG.info("writing test settings into background js, so webext can get it")
 
     data = """// this file is auto-generated by raptor, do not edit directly
--- a/testing/raptor/raptor/manifest.py
+++ b/testing/raptor/raptor/manifest.py
@@ -2,23 +2,23 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 from __future__ import absolute_import
 
 import json
 import os
 
 from manifestparser import TestManifest
-from mozlog import get_proxy_logger
+from RaptorLogger import RaptorLogger
 from utils import transform_platform
 
 here = os.path.abspath(os.path.dirname(__file__))
 raptor_ini = os.path.join(here, 'raptor.ini')
 tests_dir = os.path.join(here, 'tests')
-LOG = get_proxy_logger(component="raptor-manifest")
+LOG = RaptorLogger(component='raptor-manifest')
 
 required_settings = [
     'alert_threshold',
     'apps',
     'lower_is_better',
     'measure',
     'page_cycles',
     'test_url',
@@ -75,41 +75,41 @@ def validate_test_ini(test_details):
             continue
         if setting == 'scenario_time' and test_details['type'] != 'scenario':
             continue
         if test_details.get(setting) is None:
             # if page-cycles is not specified, it's ok as long as browser-cycles is there
             if setting == "page-cycles" and test_details.get('browser_cycles') is not None:
                 continue
             valid_settings = False
-            LOG.error("ERROR: setting '%s' is required but not found in %s"
+            LOG.error("setting '%s' is required but not found in %s"
                       % (setting, test_details['manifest']))
 
     test_details.setdefault("page_timeout", 30000)
 
     # if playback is specified, we need more playback settings
     if test_details.get('playback') is not None:
         for setting in playback_settings:
             if test_details.get(setting) is None:
                 valid_settings = False
-                LOG.error("ERROR: setting '%s' is required but not found in %s"
+                LOG.error("setting '%s' is required but not found in %s"
                           % (setting, test_details['manifest']))
 
     # if 'alert-on' is specified, we need to make sure that the value given is valid
     # i.e. any 'alert_on' values must be values that exist in the 'measure' ini setting
     if test_details.get('alert_on') is not None:
 
         # support with or without spaces, i.e. 'measure = fcp, loadtime' or '= fcp,loadtime'
         # convert to a list; and remove any spaces
         test_details['alert_on'] = [_item.strip() for _item in test_details['alert_on'].split(',')]
 
         # now make sure each alert_on value provided is valid
         for alert_on_value in test_details['alert_on']:
             if alert_on_value not in test_details['measure']:
-                LOG.error("ERROR: The 'alert_on' value of '%s' is not valid because "
+                LOG.error("The 'alert_on' value of '%s' is not valid because "
                           "it doesn't exist in the 'measure' test setting!"
                           % alert_on_value)
                 valid_settings = False
 
     return valid_settings
 
 
 def write_test_settings_json(args, test_details, oskey):
--- a/testing/raptor/raptor/output.py
+++ b/testing/raptor/raptor/output.py
@@ -8,19 +8,19 @@
 """output raptor test results"""
 from __future__ import absolute_import
 
 import filter
 
 import json
 import os
 
-from mozlog import get_proxy_logger
+from RaptorLogger import RaptorLogger
 
-LOG = get_proxy_logger(component="raptor-output")
+LOG = RaptorLogger(component='raptor-output')
 
 
 class Output(object):
     """class for raptor output"""
 
     def __init__(self, results, supporting_data, subtest_alert_on):
         """
         - results : list of RaptorTestResult instances
@@ -38,17 +38,17 @@ class Output(object):
             'framework': {
                 'name': 'raptor',
             },
             'suites': suites,
         }
 
         # check if we actually have any results
         if len(self.results) == 0:
-            LOG.error("error: no raptor test results found for %s" %
+            LOG.error("no raptor test results found for %s" %
                       ', '.join(test_names))
             return
 
         for test in self.results:
             vals = []
             subtests = []
             suite = {
                 'name': test.name,
@@ -744,17 +744,17 @@ class Output(object):
                                         'raptor.json')
             screenshot_path = os.path.join(os.path.dirname(os.environ['MOZ_UPLOAD_DIR']),
                                            'screenshots.html')
         else:
             results_path = os.path.join(os.getcwd(), 'raptor.json')
             screenshot_path = os.path.join(os.getcwd(), 'screenshots.html')
 
         if self.summarized_results == {}:
-            LOG.error("error: no summarized raptor results found for %s" %
+            LOG.error("no summarized raptor results found for %s" %
                       ', '.join(test_names))
         else:
             with open(results_path, 'w') as f:
                 for result in self.summarized_results:
                     f.write("%s\n" % result)
 
         if len(self.summarized_screenshots) > 0:
             with open(screenshot_path, 'w') as f:
@@ -793,17 +793,17 @@ class Output(object):
         been summarized, now output it appropriately.
 
         We want to output supporting data in a completely separate perfherder json blob and
         in a corresponding file artifact. This way supporting data can be ingested as it's own
         test suite in perfherder and alerted upon if desired. Kept outside of the test results
         from the actual Raptor test that was ran when the supporting data was gathered.
         '''
         if len(self.summarized_supporting_data) == 0:
-            LOG.error("error: no summarized supporting data found for %s" %
+            LOG.error("no summarized supporting data found for %s" %
                       ', '.join(test_names))
             return False
 
         for next_data_set in self.summarized_supporting_data:
             data_type = next_data_set['suites'][0]['type']
 
             if os.environ['MOZ_UPLOAD_DIR']:
                 # i.e. testing/mozharness/build/raptor.json locally; in production it will
--- a/testing/raptor/raptor/outputhandler.py
+++ b/testing/raptor/raptor/outputhandler.py
@@ -2,37 +2,35 @@
 # License, v. 2.0. If a copy of the MPL was not distributed with this file,
 # You can obtain one at http://mozilla.org/MPL/2.0/.
 
 # originally from talos_process.py
 from __future__ import absolute_import
 
 import json
 
-from mozlog import get_proxy_logger
-
-
-LOG = get_proxy_logger(component='raptor-output-handler')
+from RaptorLogger import RaptorLogger
 
 
 class OutputHandler(object):
     def __init__(self):
         self.proc = None
+        self.log = RaptorLogger(component='raptor-output-handler')
 
     def __call__(self, line):
         if not line.strip():
             return
         line = line.decode('utf-8', errors='replace')
 
         try:
             data = json.loads(line)
         except ValueError:
             self.process_output(line)
             return
 
         if isinstance(data, dict) and 'action' in data:
-            LOG.log_raw(data)
+            self.log.log_raw(data)
         else:
             self.process_output(json.dumps(data))
 
     def process_output(self, line):
         if "raptor" in line:
-            LOG.process_output(self.proc.pid, line)
+            self.log.process_output(self.proc.pid, line)
--- a/testing/raptor/raptor/raptor.py
+++ b/testing/raptor/raptor/raptor.py
@@ -15,17 +15,17 @@ import sys
 import tempfile
 import time
 
 import requests
 
 import mozcrash
 import mozinfo
 from mozdevice import ADBDevice
-from mozlog import commandline, get_default_logger
+from mozlog import commandline
 from mozprofile import create_profile
 from mozproxy import get_playback
 from mozrunner import runners
 
 # need this so raptor imports work both from /raptor and via mach
 here = os.path.abspath(os.path.dirname(__file__))
 paths = [here]
 if os.environ.get('SCRIPTSPATH') is not None:
@@ -60,16 +60,18 @@ from gen_test_config import gen_test_con
 from outputhandler import OutputHandler
 from manifest import get_raptor_test_list
 from memory import generate_android_memory_profile
 from mozproxy import get_playback
 from power import init_android_power_test, finish_android_power_test
 from results import RaptorResultsHandler
 from utils import view_gecko_profile
 
+from RaptorLogger import RaptorLogger
+
 
 class SignalHandler:
 
     def __init__(self):
         signal.signal(signal.SIGINT, self.handle_signal)
         signal.signal(signal.SIGTERM, self.handle_signal)
 
     def handle_signal(self, signum, frame):
@@ -108,17 +110,17 @@ class Raptor(object):
             'power_test': power_test,
             'memory_test': memory_test,
             'is_release_build': is_release_build,
             'enable_control_server_wait': memory_test,
             'e10s': e10s,
         }
 
         self.raptor_venv = os.path.join(os.getcwd(), 'raptor-venv')
-        self.log = get_default_logger(component='raptor-main')
+        self.log = RaptorLogger(component='raptor-main')
         self.control_server = None
         self.playback = None
         self.benchmark = None
         self.benchmark_port = 0
         self.gecko_profiler = None
         self.post_startup_delay = post_startup_delay
         self.device = None
         self.profile_class = profile_class or app
@@ -1131,17 +1133,17 @@ class RaptorAndroid(Raptor):
         self.device.rm(self.remote_test_root, force=True, recursive=True)
 
         super(RaptorAndroid, self).clean_up()
 
 
 def main(args=sys.argv[1:]):
     args = parse_args()
     commandline.setup_logging('raptor', args, {'tbpl': sys.stdout})
-    LOG = get_default_logger(component='raptor-main')
+    LOG = RaptorLogger(component='raptor-main')
     LOG.info("raptor-start")
 
     if args.debug_mode:
         LOG.info("debug-mode enabled")
 
     LOG.info("received command line arguments: %s" % str(args))
 
     # if a test name specified on command line, and it exists, just run that one
--- a/testing/raptor/raptor/results.py
+++ b/testing/raptor/raptor/results.py
@@ -1,40 +1,39 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 # class to process, format, and report raptor test results
 # received from the raptor control server
 from __future__ import absolute_import
 
-from mozlog import get_proxy_logger
+from RaptorLogger import RaptorLogger
 from output import Output
 
-LOG = get_proxy_logger(component='results-handler')
-
 
 class RaptorResultsHandler():
     """Handle Raptor test results"""
 
     def __init__(self):
         self.results = []
         self.page_timeout_list = []
         self.images = []
         self.supporting_data = None
+        self.log = RaptorLogger(component='raptor-results-handler')
 
     def add(self, new_result_json):
         # add to results
-        LOG.info("received results in RaptorResultsHandler.add")
+        self.log.info("received results in RaptorResultsHandler.add")
         new_result = RaptorTestResult(new_result_json)
         self.results.append(new_result)
 
     def add_image(self, screenshot, test_name, page_cycle):
         # add to results
-        LOG.info("received screenshot")
+        self.log.info("received screenshot")
         self.images.append({'screenshot': screenshot,
                             'test_name': test_name,
                             'page_cycle': page_cycle})
 
     def add_page_timeout(self, test_name, page_url, pending_metrics):
         timeout_details = {'test_name': test_name,
                            'url': page_url}
         if pending_metrics:
@@ -60,25 +59,25 @@ class RaptorResultsHandler():
                            'test': 'raptor-speedometer-geckoview',
                            'unit': 'mAh',
                            'values': {
                                'cpu': cpu,
                                'wifi': wifi,
                                'screen': screen,
                                'proportional': proportional}}
         '''
-        LOG.info("RaptorResultsHandler.add_supporting_data received %s data"
-                 % supporting_data['type'])
+        self.log.info("RaptorResultsHandler.add_supporting_data received %s data"
+                      % supporting_data['type'])
         if self.supporting_data is None:
             self.supporting_data = []
         self.supporting_data.append(supporting_data)
 
     def summarize_and_output(self, test_config, test_names):
         # summarize the result data, write to file and output PERFHERDER_DATA
-        LOG.info("summarizing raptor test results")
+        self.log.info("summarizing raptor test results")
         output = Output(self.results, self.supporting_data, test_config['subtest_alert_on'])
         output.summarize(test_names)
         # that has each browser cycle separate; need to check if there were multiple browser
         # cycles, and if so need to combine results from all cycles into one overall result
         output.combine_browser_cycles()
         output.summarize_screenshots(self.images)
         # only dump out supporting data (i.e. power) if actual Raptor test completed
         if self.supporting_data is not None and len(self.results) != 0:
--- a/testing/raptor/raptor/utils.py
+++ b/testing/raptor/raptor/utils.py
@@ -5,19 +5,19 @@
 
 from __future__ import absolute_import
 
 import os
 import subprocess
 import sys
 import time
 
-from mozlog import get_proxy_logger, get_default_logger
+from RaptorLogger import RaptorLogger
 
-LOG = get_proxy_logger(component="raptor-utils")
+LOG = RaptorLogger(component='raptor-utils')
 here = os.path.dirname(os.path.realpath(__file__))
 
 if os.environ.get('SCRIPTSPATH', None) is not None:
     # in production it is env SCRIPTS_PATH
     mozharness_dir = os.environ['SCRIPTSPATH']
 else:
     # locally it's in source tree
     mozharness_dir = os.path.join(here, '../../mozharness')
@@ -56,17 +56,17 @@ def transform_platform(str_to_transform,
         else:
             str_to_transform = str_to_transform.replace('{x64}', '')
 
     return str_to_transform
 
 
 def view_gecko_profile(ffox_bin):
     # automatically load the latest talos gecko-profile archive in profiler.firefox.com
-    LOG = get_default_logger(component='raptor-view-gecko-profile')
+    LOG = RaptorLogger(component='raptor-view-gecko-profile')
 
     if sys.platform.startswith('win') and not ffox_bin.endswith(".exe"):
         ffox_bin = ffox_bin + ".exe"
 
     if not os.path.exists(ffox_bin):
         LOG.info("unable to find Firefox bin, cannot launch view-gecko-profile")
         return