Bug 1509513 - [wdspec] Add tests for "New Window" command. r=ato
authorHenrik Skupin <mail@hskupin.info>
Wed, 16 Jan 2019 13:10:51 +0000
changeset 511185 330c558436dda0e7643a68a2d810b77f433ddc41
parent 511184 6b7ee8555b8f6e5b8db9d99d1720243bb6bf777a
child 511186 6e8f0fdbe2e21fbf8db2bbfe1cefd5ba13ea665b
push id10547
push userffxbld-merge
push dateMon, 21 Jan 2019 13:03:58 +0000
treeherdermozilla-beta@24ec1916bffe [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersato
bugs1509513
milestone66.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1509513 - [wdspec] Add tests for "New Window" command. r=ato Depends on D16505 Differential Revision: https://phabricator.services.mozilla.com/D16506
testing/web-platform/meta/webdriver/tests/new_window/new_window.py.ini
testing/web-platform/tests/webdriver/tests/new_window/__init__.py
testing/web-platform/tests/webdriver/tests/new_window/new.py
testing/web-platform/tests/webdriver/tests/new_window/new_tab.py
testing/web-platform/tests/webdriver/tests/new_window/new_window.py
testing/web-platform/tests/webdriver/tests/new_window/user_prompts.py
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/meta/webdriver/tests/new_window/new_window.py.ini
@@ -0,0 +1,2 @@
+[new_window.py]
+  disabled: os == "android": Fennec doesn't support opening new windows
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/webdriver/tests/new_window/__init__.py
@@ -0,0 +1,10 @@
+def opener(session):
+    return session.execute_script("""
+        return window.opener;
+        """)
+
+
+def window_name(session):
+    return session.execute_script("""
+        return window.name;
+        """)
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/webdriver/tests/new_window/new.py
@@ -0,0 +1,52 @@
+import pytest
+
+from webdriver.transport import Response
+
+from tests.support.asserts import assert_error, assert_success
+
+
+def new_window(session, type_hint=None):
+    return session.transport.send(
+        "POST", "session/{session_id}/window/new".format(**vars(session)),
+        {"type": type_hint})
+
+
+def test_null_parameter_value(session, http):
+    path = "/session/{session_id}/window/new".format(**vars(session))
+    with http.post(path, None) as response:
+        assert_error(Response.from_http(response), "invalid argument")
+
+
+def test_no_browsing_context(session, closed_window):
+    response = new_window(session)
+    assert_error(response, "no such window")
+
+
+@pytest.mark.parametrize("type_hint", [True, 42, 4.2, [], {}])
+def test_type_with_invalid_type(session, type_hint):
+    response = new_window(session, type_hint)
+    assert_error(response, "invalid argument")
+
+
+def test_type_with_null_value(session):
+    original_handles = session.handles
+
+    response = new_window(session, type_hint=None)
+    value = assert_success(response)
+    handles = session.handles
+    assert len(handles) == len(original_handles) + 1
+    assert value["handle"] in handles
+    assert value["handle"] not in original_handles
+    assert value["type"] in ["tab", "window"]
+
+
+def test_type_with_unknown_value(session):
+    original_handles = session.handles
+
+    response = new_window(session, type_hint="foo")
+    value = assert_success(response)
+    handles = session.handles
+    assert len(handles) == len(original_handles) + 1
+    assert value["handle"] in handles
+    assert value["handle"] not in original_handles
+    assert value["type"] in ["tab", "window"]
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/webdriver/tests/new_window/new_tab.py
@@ -0,0 +1,48 @@
+from tests.support.asserts import assert_success
+
+from . import opener, window_name
+
+
+def new_window(session, type_hint=None):
+    return session.transport.send(
+        "POST", "session/{session_id}/window/new".format(**vars(session)),
+        {"type": type_hint})
+
+
+def test_new_tab(session):
+    original_handles = session.handles
+
+    response = new_window(session, type_hint="tab")
+    value = assert_success(response)
+    handles = session.handles
+    assert len(handles) == len(original_handles) + 1
+    assert value["handle"] in handles
+    assert value["handle"] not in original_handles
+    assert value["type"] == "tab"
+
+
+def test_new_tab_opens_about_blank(session):
+    response = new_window(session, type_hint="tab")
+    value = assert_success(response)
+    assert value["type"] == "tab"
+
+    session.handle = value["handle"]
+    assert session.url == "about:blank"
+
+
+def test_new_tab_sets_no_window_name(session):
+    response = new_window(session, type_hint="tab")
+    value = assert_success(response)
+    assert value["type"] == "tab"
+
+    session.handle = value["handle"]
+    assert window_name(session) == ""
+
+
+def test_new_tab_sets_no_opener(session):
+    response = new_window(session, type_hint="tab")
+    value = assert_success(response)
+    assert value["type"] == "tab"
+
+    session.handle = value["handle"]
+    assert opener(session) is None
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/webdriver/tests/new_window/new_window.py
@@ -0,0 +1,48 @@
+from tests.support.asserts import assert_success
+
+from . import opener, window_name
+
+
+def new_window(session, type_hint=None):
+    return session.transport.send(
+        "POST", "session/{session_id}/window/new".format(**vars(session)),
+        {"type": type_hint})
+
+
+def test_type_with_window(session):
+    original_handles = session.handles
+
+    response = new_window(session, type_hint="window")
+    value = assert_success(response)
+    handles = session.handles
+    assert len(handles) == len(original_handles) + 1
+    assert value["handle"] in handles
+    assert value["handle"] not in original_handles
+    assert value["type"] == "window"
+
+
+def test_new_window_opens_about_blank(session):
+    response = new_window(session, type_hint="window")
+    value = assert_success(response)
+    assert value["type"] == "window"
+
+    session.handle = value["handle"]
+    assert session.url == "about:blank"
+
+
+def test_new_window_sets_no_window_name(session):
+    response = new_window(session, type_hint="window")
+    value = assert_success(response)
+    assert value["type"] == "window"
+
+    session.handle = value["handle"]
+    assert window_name(session) == ""
+
+
+def test_new_window_sets_no_opener(session):
+    response = new_window(session, type_hint="window")
+    value = assert_success(response)
+    assert value["type"] == "window"
+
+    session.handle = value["handle"]
+    assert opener(session) is None
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/webdriver/tests/new_window/user_prompts.py
@@ -0,0 +1,121 @@
+# META: timeout=long
+
+import pytest
+
+from tests.support.asserts import assert_dialog_handled, assert_error, assert_success
+
+
+def new_window(session, type_hint=None):
+    return session.transport.send(
+        "POST", "session/{session_id}/window/new".format(**vars(session)),
+        {"type": type_hint})
+
+
+@pytest.fixture
+def check_user_prompt_closed_without_exception(session, create_dialog):
+    def check_user_prompt_closed_without_exception(dialog_type, retval):
+        original_handles = session.handles
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = new_window(session)
+        value = assert_success(response)
+
+        handles = session.handles
+        assert len(handles) == len(original_handles) + 1
+        assert value["handle"] in handles
+        assert value["handle"] not in original_handles
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+    return check_user_prompt_closed_without_exception
+
+
+@pytest.fixture
+def check_user_prompt_closed_with_exception(session, create_dialog):
+    def check_user_prompt_closed_with_exception(dialog_type, retval):
+        original_handles = session.handles
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = new_window(session)
+        assert_error(response, "unexpected alert open")
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+        assert len(session.handles) == len(original_handles)
+
+    return check_user_prompt_closed_with_exception
+
+
+@pytest.fixture
+def check_user_prompt_not_closed_but_exception(session, create_dialog):
+    def check_user_prompt_not_closed_but_exception(dialog_type):
+        original_handles = session.handles
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = new_window(session)
+        assert_error(response, "unexpected alert open")
+
+        assert session.alert.text == dialog_type
+        session.alert.dismiss()
+
+        assert len(session.handles) == len(original_handles)
+
+    return check_user_prompt_not_closed_but_exception
+
+
+@pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", True),
+    ("prompt", ""),
+])
+def test_accept(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
+
+
+@pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", True),
+    ("prompt", ""),
+])
+def test_accept_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
+
+
+@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", False),
+    ("prompt", None),
+])
+def test_dismiss(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
+
+
+@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", False),
+    ("prompt", None),
+])
+def test_dismiss_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
+
+
+@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_ignore(check_user_prompt_not_closed_but_exception, dialog_type):
+    check_user_prompt_not_closed_but_exception(dialog_type)
+
+
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", False),
+    ("prompt", None),
+])
+def test_default(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)