Bug 1636534 - Added test for notebook layer and perftestnotebook. r=sparky
authorxuanqi xu <xuxuanqi13@gmail.com>
Tue, 02 Jun 2020 21:35:03 +0000
changeset 597795 57d02627fd056442e43862fca95d983f37783f78
parent 597794 7a77bd49f8950b0cee5c3f13c6d827793e928edb
child 597796 124e127d05f467bed582507cc3e3db8d3100b7c7
push id13310
push userffxbld-merge
push dateMon, 29 Jun 2020 14:50:06 +0000
treeherdermozilla-beta@15a59a0afa5c [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerssparky
bugs1636534
milestone79.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1636534 - Added test for notebook layer and perftestnotebook. r=sparky Differential Revision: https://phabricator.services.mozilla.com/D77386
python/mozperftest/mozperftest/tests/conftest.py
python/mozperftest/mozperftest/tests/python.ini
python/mozperftest/mozperftest/tests/test_notebookupload.py
python/mozperftest/mozperftest/tests/test_perftestetl.py
python/mozperftest/mozperftest/tests/test_perftestnotebook.py
--- a/python/mozperftest/mozperftest/tests/conftest.py
+++ b/python/mozperftest/mozperftest/tests/conftest.py
@@ -1,12 +1,13 @@
 import json
 import pathlib
 import pytest
 from mozperftest.tests.support import temp_dir
+from mozperftest.metrics.notebook.perftestetl import PerftestETL
 from mozperftest.metrics.notebook.perftestnotebook import PerftestNotebook
 
 
 @pytest.fixture(scope="session", autouse=True)
 def data():
     data_1 = {
         "browserScripts": [
             {"timings": {"firstPaint": 101}},
@@ -30,16 +31,32 @@ def data():
             {"timings": {"firstPaint": 303}},
         ],
     }
 
     yield {"data_1": data_1, "data_2": data_2, "data_3": data_3}
 
 
 @pytest.fixture(scope="session", autouse=True)
+def standarized_data():
+    return {
+        "browsertime": [
+            {
+                "data": [
+                    {"value": 1, "xaxis": 1, "file": "file_1"},
+                    {"value": 2, "xaxis": 2, "file": "file_2"},
+                ],
+                "name": "name",
+                "subtest": "subtest",
+            }
+        ]
+    }
+
+
+@pytest.fixture(scope="session", autouse=True)
 def files(data):
     # Create a temporary directory.
     with temp_dir() as td:
         tmp_path = pathlib.Path(td)
 
     dirs = {
         "resources": tmp_path / "resources",
         "output": tmp_path / "output",
@@ -65,18 +82,23 @@ def files(data):
     resources["file_3"] = _create_temp_files(txt_3, str(data["data_3"]))
 
     output = dirs["output"] / "output.json"
 
     yield resources, dirs, output.resolve().as_posix()
 
 
 @pytest.fixture(scope="session", autouse=True)
-def ptnbs(files):
+def ptetls(files):
     resources, dirs, output = files
     config = {"output": output}
     file_group_list = {"group_1": list(resources.values())}
     file_group_str = {"group_1": dirs["resources"].resolve().as_posix()}
 
     yield {
-        "ptnb_list": PerftestNotebook(file_group_list, config, sort_files=True),
-        "ptnb_str": PerftestNotebook(file_group_str, config, sort_files=True),
+        "ptetl_list": PerftestETL(file_group_list, config, sort_files=True),
+        "ptetl_str": PerftestETL(file_group_str, config, sort_files=True),
     }
+
+
+@pytest.fixture(scope="session", autouse=True)
+def ptnb(standarized_data):
+    return PerftestNotebook(standarized_data)
--- a/python/mozperftest/mozperftest/tests/python.ini
+++ b/python/mozperftest/mozperftest/tests/python.ini
@@ -6,17 +6,19 @@ skip-if = python == 2
 [test_argparser.py]
 [test_browsertime.py]
 [test_consoleoutput.py]
 [test_environment.py]
 [test_ir_schema.py]
 [test_layers.py]
 [test_mach_commands.py]
 [test_metrics_utils.py]
+[test_notebookupload.py]
 [test_perfherder.py]
+[test_perftestetl.py]
 [test_perftestnotebook.py]
 [test_profile.py]
 [test_proxy.py]
 [test_runner.py]
 [test_scriptinfo.py]
 [test_single_json_transformer.py]
 [test_logcat_transformer.py]
 [test_transformer.py]
new file mode 100644
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_notebookupload.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+import pytest
+
+import mock
+import mozunit
+from mozperftest.environment import METRICS
+from mozperftest.tests.support import BT_DATA, EXAMPLE_TEST, get_running_env, temp_file
+from mozperftest.utils import silence
+
+
+def setup_env(options):
+    mach_cmd, metadata, env = get_running_env(**options)
+    runs = []
+
+    def _run_process(*args, **kw):
+        runs.append((args, kw))
+
+    mach_cmd.run_process = _run_process
+    metrics = env.layers[METRICS]
+    env.set_arg("tests", [EXAMPLE_TEST])
+    metadata.add_result({"results": str(BT_DATA), "name": "browsertime"})
+    return metrics, metadata, env
+
+
+@pytest.mark.parametrize("no_filter", [True, False])
+@mock.patch("mozperftest.metrics.notebookupload.PerftestNotebook")
+def test_notebookupload_with_filter(notebook, no_filter):
+
+    options = {
+        "notebook-metrics": [],
+        "notebook-prefix": "",
+        "notebook": True,
+        "notebook-analysis": ["scatterplot"],
+        "notebook-analyze-strings": no_filter,
+    }
+
+    metrics, metadata, env = setup_env(options)
+
+    with temp_file() as output:
+        env.set_arg("output", output)
+        with metrics as m, silence():
+            m(metadata)
+
+    if no_filter:
+        args, kwargs = notebook.call_args_list[0]
+        assert type(args[0][0]["data"][0]["value"]) == str
+    else:
+        for call in notebook.call_args_list:
+            args, kwargs = call
+            for a in args:
+                for data_dict in a:
+                    for data in data_dict["data"]:
+                        assert type(data["value"]) in (int, float)
+
+    notebook.assert_has_calls(mock.call().post_to_iodide(["scatterplot"]))
+
+
+if __name__ == "__main__":
+    mozunit.main()
new file mode 100644
--- /dev/null
+++ b/python/mozperftest/mozperftest/tests/test_perftestetl.py
@@ -0,0 +1,88 @@
+import json
+import mozunit
+import pathlib
+from mozperftest.metrics.notebook.constant import Constant
+from mozperftest.metrics.notebook.transformer import Transformer
+from mozperftest.metrics.notebook.transforms.single_json import SingleJsonRetriever
+
+
+def test_init(ptetls):
+    for ptetl in ptetls.values():
+        assert isinstance(ptetl.fmt_data, dict)
+        assert isinstance(ptetl.file_groups, dict)
+        assert isinstance(ptetl.config, dict)
+        assert isinstance(ptetl.sort_files, bool)
+        assert isinstance(ptetl.const, Constant)
+        assert isinstance(ptetl.transformer, Transformer)
+
+
+def test_parse_file_grouping(ptetls):
+    def _check_files_created(ptetl, expected_files):
+        actual_files = set(ptetl.parse_file_grouping(expected_files))
+        expected_files = set(expected_files)
+
+        # Check all parsed files are regular files.
+        assert all([pathlib.Path(file).is_file for file in actual_files])
+        # Check parse_file_grouping function returns correct result.
+        assert actual_files - expected_files == set()
+
+    # If file_grouping is a list of files.
+    ptetl = ptetls["ptetl_list"]
+    expected_files = ptetl.file_groups["group_1"]
+    _check_files_created(ptetl, expected_files)
+
+    # If file_grouping is a directory string.
+    ptetl = ptetls["ptetl_str"]
+    expected_path = ptetl.file_groups["group_1"]
+    expected_files = [
+        f.resolve().as_posix() for f in pathlib.Path(expected_path).iterdir()
+    ]
+    _check_files_created(ptetl, expected_files)
+
+
+def test_process(ptetls, files):
+    # Temporary resource files.
+    files, _, output = files
+    file_1 = files["file_1"]
+    file_2 = files["file_2"]
+
+    # Create expected output.
+    expected_output = [
+        {
+            "data": [
+                {"value": 101, "xaxis": 1, "file": file_1},
+                {"value": 102, "xaxis": 1, "file": file_1},
+                {"value": 103, "xaxis": 1, "file": file_1},
+                {"value": 201, "xaxis": 2, "file": file_2},
+                {"value": 202, "xaxis": 2, "file": file_2},
+                {"value": 203, "xaxis": 2, "file": file_2},
+            ],
+            "name": "group_1",
+            "subtest": "browserScripts.timings.firstPaint",
+        }
+    ]
+
+    ptetl = ptetls["ptetl_str"]
+
+    # Set a custom transformer.
+    ptetl.transformer = Transformer([], SingleJsonRetriever())
+
+    # Create expected result.
+    expected_result = {
+        "data": expected_output,
+        "file-output": output,
+    }
+
+    # Check return value.
+    actual_result = ptetl.process()
+    assert actual_result == expected_result
+
+    # Check output file.
+    with pathlib.Path(output).open() as f:
+        actual_output = json.load(f)
+
+    assert expected_output == actual_output
+
+
+if __name__ == "__main__":
+    mozunit.main()
--- a/python/mozperftest/mozperftest/tests/test_perftestnotebook.py
+++ b/python/mozperftest/mozperftest/tests/test_perftestnotebook.py
@@ -1,90 +1,80 @@
-import json
+#!/usr/bin/env python
+from pathlib import Path
+import pytest
+import mock
 import mozunit
-import pathlib
-from mozperftest.metrics.notebook.analyzer import NotebookAnalyzer
 from mozperftest.metrics.notebook.constant import Constant
-from mozperftest.metrics.notebook.transformer import Transformer
-from mozperftest.metrics.notebook.transforms.single_json import SingleJsonRetriever
+
+
+def test_init(ptnb, standarized_data):
+    assert isinstance(ptnb.data, dict)
+    assert isinstance(ptnb.const, Constant)
 
 
-def test_init(ptnbs):
-    for ptnb in ptnbs.values():
-        assert isinstance(ptnb.fmt_data, dict)
-        assert isinstance(ptnb.file_groups, dict)
-        assert isinstance(ptnb.config, dict)
-        assert isinstance(ptnb.sort_files, bool)
-        assert isinstance(ptnb.const, Constant)
-        assert isinstance(ptnb.analyzer, NotebookAnalyzer)
-        assert isinstance(ptnb.transformer, Transformer)
+def test_get_notebook_section(ptnb):
+    func = "scatterplot"
+    with (ptnb.const.here / "notebook-sections" / func).open() as f:
+        assert ptnb.get_notebook_section(func) == f.read()
+
+
+def test_get_notebook_section_unknown_analysis(ptnb):
+    func = "unknown"
+    with mock.patch(
+        "mozperftest.metrics.notebook.perftestnotebook.logger"
+    ) as logger:
+        assert ptnb.get_notebook_section(func) == ""
+        logger.assert_has_calls(mock.call.warning(
+                'Could not find the notebook-section called unknown'
+            ))
 
 
-def test_parse_file_grouping(ptnbs):
-    def _check_files_created(ptnb, expected_files):
-        actual_files = set(ptnb.parse_file_grouping(expected_files))
-        expected_files = set(expected_files)
+@pytest.mark.parametrize("analysis", [["scatterplot"], None])
+def test_post_to_iodide(ptnb, standarized_data, analysis):
 
-        # Check all parsed files are regular files.
-        assert all([pathlib.Path(file).is_file for file in actual_files])
-        # Check parse_file_grouping function returns correct result.
-        assert actual_files - expected_files == set()
+    opener = mock.mock_open()
 
-    # If file_grouping is a list of files.
-    ptnb = ptnbs["ptnb_list"]
-    expected_files = ptnb.file_groups["group_1"]
-    _check_files_created(ptnb, expected_files)
+    def mocked_open(self, *args, **kwargs):
+        return opener(self, *args, **kwargs)
 
-    # If file_grouping is a directory string.
-    ptnb = ptnbs["ptnb_str"]
-    expected_path = ptnb.file_groups["group_1"]
-    expected_files = [
-        f.resolve().as_posix() for f in pathlib.Path(expected_path).iterdir()
-    ]
-    _check_files_created(ptnb, expected_files)
+    with mock.patch.object(Path, "open", mocked_open), mock.patch(
+        "mozperftest.metrics.notebook.perftestnotebook.webbrowser.open_new_tab"
+    ) as browser, mock.patch(
+        "mozperftest.metrics.notebook.perftestnotebook.HTTPServer"
+    ) as server:
+        ptnb.post_to_iodide(analysis=analysis)
 
+        list_of_calls = opener.mock_calls
 
-def test_process(ptnbs, files):
-    # Temporary resource files.
-    files, _, output = files
-    file_1 = files["file_1"]
-    file_2 = files["file_2"]
+        header_path = ptnb.const.here / "notebook-sections" / "header"
+        assert mock.call(header_path) in list_of_calls
+        index1 = list_of_calls.index(mock.call(header_path))
+        assert list_of_calls[index1 + 2] == mock.call().read()
 
-    # Create expected output.
-    expected_output = [
-        {
-            "data": [
-                {"value": 101, "xaxis": 1, "file": file_1},
-                {"value": 102, "xaxis": 1, "file": file_1},
-                {"value": 103, "xaxis": 1, "file": file_1},
-                {"value": 201, "xaxis": 2, "file": file_2},
-                {"value": 202, "xaxis": 2, "file": file_2},
-                {"value": 203, "xaxis": 2, "file": file_2},
-            ],
-            "name": "group_1",
-            "subtest": "browserScripts.timings.firstPaint",
-        }
-    ]
+        template_upload_file_path = ptnb.const.here / "template_upload_file.html"
+        assert mock.call(template_upload_file_path) in list_of_calls
+        index2 = list_of_calls.index(mock.call(template_upload_file_path))
+        assert list_of_calls[index2 + 2] == mock.call().read()
 
-    ptnb = ptnbs["ptnb_str"]
+        upload_file_path = ptnb.const.here / "upload_file.html"
+        assert mock.call(upload_file_path, "w") in list_of_calls
+        index3 = list_of_calls.index(mock.call(upload_file_path, "w"))
+        assert list_of_calls[index3 + 2] == mock.call().write("")
+
+        assert index1 < index2 < index3
 
-    # Set a custom transformer.
-    ptnb.transformer = Transformer([], SingleJsonRetriever())
-
-    # Create expected result.
-    expected_result = {
-        "data": expected_output,
-        "file-output": output,
-    }
+        if analysis:
+            section_path = ptnb.const.here / "notebook-sections" / analysis[0]
+            assert mock.call(section_path) in list_of_calls
+            index4 = list_of_calls.index(mock.call(section_path))
+            assert index1 < index4 < index2
+        else:
+            assert list_of_calls.count(mock.call().__enter__()) == 3
 
-    # Check return value.
-    actual_result = ptnb.process()
-    assert actual_result == expected_result
-
-    # Check output file.
-    with pathlib.Path(output).open() as f:
-        actual_output = json.load(f)
-
-    assert expected_output == actual_output
+        browser.assert_called_with(str(upload_file_path))
+        server.assert_has_calls(
+            [mock.call().serve_forever(), mock.call().server_close()]
+        )
 
 
 if __name__ == "__main__":
     mozunit.main()