Bug 1479850 - [wdspec] Add remaining unhandled prompt behavior tests to existent user prompt tests. r=ato
authorHenrik Skupin <mail@hskupin.info>
Tue, 31 Jul 2018 16:15:19 +0200
changeset 826074 32a8875a1721cd9369931bb5e1b74da448455bff
parent 826073 60d153b3f3c85a76755e42ec5d73c2c80dead451
child 826075 4088993297c4ce754df796d2d0a0bc107b6073e6
push id118238
push userbmo:mh+mozilla@glandium.org
push dateThu, 02 Aug 2018 22:02:22 +0000
reviewersato
bugs1479850
milestone63.0a1
Bug 1479850 - [wdspec] Add remaining unhandled prompt behavior tests to existent user prompt tests. r=ato MozReview-Commit-ID: ALsDccEyRsq
testing/web-platform/meta/MANIFEST.json
testing/web-platform/meta/webdriver/tests/element_send_keys/user_prompts.py.ini
testing/web-platform/tests/webdriver/tests/back/user_prompts.py
testing/web-platform/tests/webdriver/tests/close_window/user_prompts.py
testing/web-platform/tests/webdriver/tests/delete_cookie/user_prompts.py
testing/web-platform/tests/webdriver/tests/element_send_keys/user_prompts.py
testing/web-platform/tests/webdriver/tests/execute_async_script/user_prompts.py
testing/web-platform/tests/webdriver/tests/execute_script/user_prompts.py
testing/web-platform/tests/webdriver/tests/forward/user_prompts.py
testing/web-platform/tests/webdriver/tests/fullscreen_window/user_prompts.py
testing/web-platform/tests/webdriver/tests/get_current_url/user_prompts.py
testing/web-platform/tests/webdriver/tests/get_element_property/user_prompts.py
testing/web-platform/tests/webdriver/tests/get_element_tag_name/user_prompts.py
testing/web-platform/tests/webdriver/tests/get_title/user_prompts.py
testing/web-platform/tests/webdriver/tests/get_window_rect/user_prompts.py
testing/web-platform/tests/webdriver/tests/is_element_selected/user_prompts.py
testing/web-platform/tests/webdriver/tests/maximize_window/user_prompts.py
testing/web-platform/tests/webdriver/tests/minimize_window/user_prompts.py
testing/web-platform/tests/webdriver/tests/refresh/user_prompts.py
testing/web-platform/tests/webdriver/tests/set_window_rect/user_prompts.py
testing/web-platform/tests/webdriver/tests/support/asserts.py
--- a/testing/web-platform/meta/MANIFEST.json
+++ b/testing/web-platform/meta/MANIFEST.json
@@ -418351,17 +418351,19 @@
     [
      "/webdriver/tests/close_window/close.py",
      {}
     ]
    ],
    "webdriver/tests/close_window/user_prompts.py": [
     [
      "/webdriver/tests/close_window/user_prompts.py",
-     {}
+     {
+      "timeout": "long"
+     }
     ]
    ],
    "webdriver/tests/delete_all_cookies/delete.py": [
     [
      "/webdriver/tests/delete_all_cookies/delete.py",
      {}
     ]
    ],
@@ -418369,17 +418371,19 @@
     [
      "/webdriver/tests/delete_cookie/delete.py",
      {}
     ]
    ],
    "webdriver/tests/delete_cookie/user_prompts.py": [
     [
      "/webdriver/tests/delete_cookie/user_prompts.py",
-     {}
+     {
+      "timeout": "long"
+     }
     ]
    ],
    "webdriver/tests/delete_session/delete.py": [
     [
      "/webdriver/tests/delete_session/delete.py",
      {}
     ]
    ],
@@ -418485,17 +418489,19 @@
     [
      "/webdriver/tests/element_send_keys/send_keys.py",
      {}
     ]
    ],
    "webdriver/tests/element_send_keys/user_prompts.py": [
     [
      "/webdriver/tests/element_send_keys/user_prompts.py",
-     {}
+     {
+      "timeout": "long"
+     }
     ]
    ],
    "webdriver/tests/execute_async_script/collections.py": [
     [
      "/webdriver/tests/execute_async_script/collections.py",
      {}
     ]
    ],
@@ -418587,17 +418593,19 @@
     [
      "/webdriver/tests/fullscreen_window/fullscreen.py",
      {}
     ]
    ],
    "webdriver/tests/fullscreen_window/user_prompts.py": [
     [
      "/webdriver/tests/fullscreen_window/user_prompts.py",
-     {}
+     {
+      "timeout": "long"
+     }
     ]
    ],
    "webdriver/tests/get_active_element/get.py": [
     [
      "/webdriver/tests/get_active_element/get.py",
      {}
     ]
    ],
@@ -418611,17 +418619,19 @@
     [
      "/webdriver/tests/get_current_url/get.py",
      {}
     ]
    ],
    "webdriver/tests/get_current_url/user_prompts.py": [
     [
      "/webdriver/tests/get_current_url/user_prompts.py",
-     {}
+     {
+      "timeout": "long"
+     }
     ]
    ],
    "webdriver/tests/get_element_attribute/get.py": [
     [
      "/webdriver/tests/get_element_attribute/get.py",
      {}
     ]
    ],
@@ -418629,29 +418639,33 @@
     [
      "/webdriver/tests/get_element_property/get.py",
      {}
     ]
    ],
    "webdriver/tests/get_element_property/user_prompts.py": [
     [
      "/webdriver/tests/get_element_property/user_prompts.py",
-     {}
+     {
+      "timeout": "long"
+     }
     ]
    ],
    "webdriver/tests/get_element_tag_name/get.py": [
     [
      "/webdriver/tests/get_element_tag_name/get.py",
      {}
     ]
    ],
    "webdriver/tests/get_element_tag_name/user_prompts.py": [
     [
      "/webdriver/tests/get_element_tag_name/user_prompts.py",
-     {}
+     {
+      "timeout": "long"
+     }
     ]
    ],
    "webdriver/tests/get_element_text/get.py": [
     [
      "/webdriver/tests/get_element_text/get.py",
      {}
     ]
    ],
@@ -418671,65 +418685,75 @@
     [
      "/webdriver/tests/get_title/get.py",
      {}
     ]
    ],
    "webdriver/tests/get_title/user_prompts.py": [
     [
      "/webdriver/tests/get_title/user_prompts.py",
-     {}
+     {
+      "timeout": "long"
+     }
     ]
    ],
    "webdriver/tests/get_window_rect/get.py": [
     [
      "/webdriver/tests/get_window_rect/get.py",
      {}
     ]
    ],
    "webdriver/tests/get_window_rect/user_prompts.py": [
     [
      "/webdriver/tests/get_window_rect/user_prompts.py",
-     {}
+     {
+      "timeout": "long"
+     }
     ]
    ],
    "webdriver/tests/is_element_selected/selected.py": [
     [
      "/webdriver/tests/is_element_selected/selected.py",
      {}
     ]
    ],
    "webdriver/tests/is_element_selected/user_prompts.py": [
     [
      "/webdriver/tests/is_element_selected/user_prompts.py",
-     {}
+     {
+      "timeout": "long"
+     }
     ]
    ],
    "webdriver/tests/maximize_window/maximize.py": [
     [
      "/webdriver/tests/maximize_window/maximize.py",
      {}
     ]
    ],
    "webdriver/tests/maximize_window/user_prompts.py": [
     [
      "/webdriver/tests/maximize_window/user_prompts.py",
-     {}
+     {
+      "timeout": "long"
+     }
     ]
    ],
    "webdriver/tests/minimize_window/minimize.py": [
     [
      "/webdriver/tests/minimize_window/minimize.py",
      {}
     ]
    ],
    "webdriver/tests/minimize_window/user_prompts.py": [
     [
      "/webdriver/tests/minimize_window/user_prompts.py",
-     {}
+     {
+      "timeout": "long"
+     }
     ]
    ],
    "webdriver/tests/navigate_to/navigate.py": [
     [
      "/webdriver/tests/navigate_to/navigate.py",
      {}
     ]
    ],
@@ -418833,17 +418857,19 @@
      {
       "timeout": "long"
      }
     ]
    ],
    "webdriver/tests/set_window_rect/user_prompts.py": [
     [
      "/webdriver/tests/set_window_rect/user_prompts.py",
-     {}
+     {
+      "timeout": "long"
+     }
     ]
    ],
    "webdriver/tests/status/status.py": [
     [
      "/webdriver/tests/status/status.py",
      {}
     ]
    ],
@@ -636780,17 +636806,17 @@
    "ad2bb513a6a801b29a791a6d65b7b730b86e9a64",
    "support"
   ],
   "tools/wptrunner/wptrunner/browsers/fennec.py": [
    "5e8ed28e2ee0e10acaa3e4ddf1e79164e795a2b2",
    "support"
   ],
   "tools/wptrunner/wptrunner/browsers/firefox.py": [
-   "26291078ea4245a7ea81a262a7990ca16d4d86ed",
+   "14813b55fd5fae2b42a32eefd7a08a852913f4d4",
    "support"
   ],
   "tools/wptrunner/wptrunner/browsers/ie.py": [
    "a0730f8ba29ea8b65b56709a4f96ac5894078d94",
    "support"
   ],
   "tools/wptrunner/wptrunner/browsers/opera.py": [
    "5c0109832be2da6fdbf224b64a9f8fc3b98045da",
@@ -641432,29 +641458,29 @@
    "0f94ab431ae012b0abc8406f6d372a6fafcf8048",
    "wdspec"
   ],
   "webdriver/tests/back/conftest.py": [
    "d5fd320a696a507718299c765278edde6205608a",
    "support"
   ],
   "webdriver/tests/back/user_prompts.py": [
-   "b7f05462167335097724676beb1f31624336087f",
+   "7121f7c6b2a63da99c8e4ca1956c181d8d675684",
    "wdspec"
   ],
   "webdriver/tests/close_window/__init__.py": [
    "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
    "support"
   ],
   "webdriver/tests/close_window/close.py": [
    "f4e3b0481d3b3cbc93ba8b44c639934cf8eef9de",
    "wdspec"
   ],
   "webdriver/tests/close_window/user_prompts.py": [
-   "4661ba513db73f79bf58046951ee8c8c2fc0c488",
+   "22df9155e8e7bd3aa32fc96c7dd482972c490fd2",
    "wdspec"
   ],
   "webdriver/tests/conftest.py": [
    "c39671797f6bdf176d6b5d20b13412ee3a92cee4",
    "support"
   ],
   "webdriver/tests/delete_all_cookies/__init__.py": [
    "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
@@ -641468,17 +641494,17 @@
    "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
    "support"
   ],
   "webdriver/tests/delete_cookie/delete.py": [
    "0b6adade156cecdfacf9f58be07bf5f697329124",
    "wdspec"
   ],
   "webdriver/tests/delete_cookie/user_prompts.py": [
-   "31ae497c193ee33a0b204d0b58d29c01e19579dd",
+   "1ed7db6e8e320575ffa99ec2f7b7cf2cfeb0ee6a",
    "wdspec"
   ],
   "webdriver/tests/delete_session/__init__.py": [
    "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
    "support"
   ],
   "webdriver/tests/delete_session/delete.py": [
    "835f2525792136e7aa0082e9b32165b662e8cdd8",
@@ -641576,33 +641602,33 @@
    "9ff0a9e25788921eb6d01add0e279c50d7de6134",
    "wdspec"
   ],
   "webdriver/tests/element_send_keys/send_keys.py": [
    "0d281d7f5cb3c2f3386504aedd66afa6269e65d3",
    "wdspec"
   ],
   "webdriver/tests/element_send_keys/user_prompts.py": [
-   "742b62438254914f946b3d49ae2eadfc56c7ac33",
+   "621bdffa6642817684f2a919d3c07103793522dd",
    "wdspec"
   ],
   "webdriver/tests/execute_async_script/__init__.py": [
    "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
    "support"
   ],
   "webdriver/tests/execute_async_script/collections.py": [
    "8376fdfe061ca615ed12f632d878221ec5cb0e78",
    "wdspec"
   ],
   "webdriver/tests/execute_async_script/execute_async.py": [
    "2b6e61543176ea1ba15f4c399028ceb24878e3c9",
    "wdspec"
   ],
   "webdriver/tests/execute_async_script/user_prompts.py": [
-   "9486a5b28a48feb5d8d24e511b0c8632ad1996ae",
+   "5c873935519716a3f4933b710828118218f7220c",
    "wdspec"
   ],
   "webdriver/tests/execute_script/__init__.py": [
    "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
    "support"
   ],
   "webdriver/tests/execute_script/collections.py": [
    "edee1e5532b81f07f19be09b40fc99c2df9ef6f8",
@@ -641616,17 +641642,17 @@
    "386afe0e1db172c310ff3aefa2bfb0078a79a966",
    "wdspec"
   ],
   "webdriver/tests/execute_script/json_serialize_windowproxy.py": [
    "9864227374e328514d54596f611c474c6f0fe3f3",
    "wdspec"
   ],
   "webdriver/tests/execute_script/user_prompts.py": [
-   "32146824e2c3a8c211bbc2eb9cce48ebccd5a28a",
+   "ec6895b74f8e110120f2e3067e2c35b94d2a532d",
    "wdspec"
   ],
   "webdriver/tests/find_element/__init__.py": [
    "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
    "support"
   ],
   "webdriver/tests/find_element/find.py": [
    "ad25e78e9e47fa7ef45bbd1546eae9c7da78f436",
@@ -641664,29 +641690,29 @@
    "d5fd320a696a507718299c765278edde6205608a",
    "support"
   ],
   "webdriver/tests/forward/forward.py": [
    "a8c7e00b88dbf38db2e31f139e0e3853cc11a47a",
    "wdspec"
   ],
   "webdriver/tests/forward/user_prompts.py": [
-   "f5d2ef97e4d6e93ce305b67c436e2e463a109bd2",
+   "e5eea8f4457145eb5619fc28b625c7042b07e0b3",
    "wdspec"
   ],
   "webdriver/tests/fullscreen_window/__init__.py": [
    "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
    "support"
   ],
   "webdriver/tests/fullscreen_window/fullscreen.py": [
    "5391114c67676ddf2e1aa907d1bd00da68edbe2f",
    "wdspec"
   ],
   "webdriver/tests/fullscreen_window/user_prompts.py": [
-   "f7b4eadaefa120547c29c5b45ac0c5fdafabaad4",
+   "04b8bc5337db7b9c7c5c9f4ea31eee1d3d681261",
    "wdspec"
   ],
   "webdriver/tests/get_active_element/__init__.py": [
    "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
    "support"
   ],
   "webdriver/tests/get_active_element/get.py": [
    "ac4f14d37c0b9b8cf4afa1d1af4ce1c8027c77c9",
@@ -641704,45 +641730,45 @@
    "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
    "support"
   ],
   "webdriver/tests/get_current_url/get.py": [
    "2758ba35474e508bb495579aa9e443781da496e6",
    "wdspec"
   ],
   "webdriver/tests/get_current_url/user_prompts.py": [
-   "1bda1ef489d41114316fa959431db8252b6a9c2f",
+   "f0e95d12f14e2fb4337476a8ccf883a06c0e5c5d",
    "wdspec"
   ],
   "webdriver/tests/get_element_attribute/get.py": [
    "02a155bcee6626037f424fbf387458ea46e6c206",
    "wdspec"
   ],
   "webdriver/tests/get_element_property/__init__.py": [
    "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
    "support"
   ],
   "webdriver/tests/get_element_property/get.py": [
    "fde0f2a0cd6dd89b59287510f7049ceabac8d371",
    "wdspec"
   ],
   "webdriver/tests/get_element_property/user_prompts.py": [
-   "699a1002a1094797d948ecdcf8899ab8d56d8dde",
+   "16a098536fde814b5ac8bcb35dac01ac38649d2e",
    "wdspec"
   ],
   "webdriver/tests/get_element_tag_name/__init__.py": [
    "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
    "support"
   ],
   "webdriver/tests/get_element_tag_name/get.py": [
    "d705ae1118f13c2472b6fa4b1c2fc44bb4967ec2",
    "wdspec"
   ],
   "webdriver/tests/get_element_tag_name/user_prompts.py": [
-   "b9c41a9b85ad2a76ddb2c04e5ce0817853086888",
+   "12a7d448421efde32b276d531b91089c4584a7e5",
    "wdspec"
   ],
   "webdriver/tests/get_element_text/__init__.py": [
    "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
    "support"
   ],
   "webdriver/tests/get_element_text/get.py": [
    "9f660308bc29725299a083b0b71db80ed26ff8e8",
@@ -641768,69 +641794,69 @@
    "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
    "support"
   ],
   "webdriver/tests/get_title/get.py": [
    "ed5b401c564ec7375b3be8ebd9124388f40062a7",
    "wdspec"
   ],
   "webdriver/tests/get_title/user_prompts.py": [
-   "8c010b484b860b52c29104dd54a331239fa6ab0c",
+   "76d65c088a0b94e8cefb976e91edf5492a5aae20",
    "wdspec"
   ],
   "webdriver/tests/get_window_rect/__init__.py": [
    "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
    "support"
   ],
   "webdriver/tests/get_window_rect/get.py": [
    "ee2c9eefd3f4ff81ea1cb8f33be58cc17f332030",
    "wdspec"
   ],
   "webdriver/tests/get_window_rect/user_prompts.py": [
-   "17618c58e74d7495391a23ca0e7c0c8ed97b0735",
+   "37c8da6bd3838422441fc089d8191930dd2b3da6",
    "wdspec"
   ],
   "webdriver/tests/interface.html": [
    "d049d835f2fc60df7296da08a013793ae7e398ca",
    "testharness"
   ],
   "webdriver/tests/is_element_selected/__init__.py": [
    "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
    "support"
   ],
   "webdriver/tests/is_element_selected/selected.py": [
    "76361e50563d1589d867d7b7ddd45a287ea1eb6c",
    "wdspec"
   ],
   "webdriver/tests/is_element_selected/user_prompts.py": [
-   "31c3920c8368ec5243386a17ad9b6d3102a15041",
+   "674f80c5c75b8aa66172af89c151a3e0dfccfc61",
    "wdspec"
   ],
   "webdriver/tests/maximize_window/__init__.py": [
    "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
    "support"
   ],
   "webdriver/tests/maximize_window/maximize.py": [
    "77af0b99169a91eb99ff81236e6fa61a7f1399f7",
    "wdspec"
   ],
   "webdriver/tests/maximize_window/user_prompts.py": [
-   "6b588d63a8c1ab10bdfab2a29fd7b3cbc12d28ad",
+   "032edc893a53a05dd0272cc081265bde6f0b5d53",
    "wdspec"
   ],
   "webdriver/tests/minimize_window/__init__.py": [
    "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
    "support"
   ],
   "webdriver/tests/minimize_window/minimize.py": [
    "be161a62e20191d41cd83ea2289cc5954af4892f",
    "wdspec"
   ],
   "webdriver/tests/minimize_window/user_prompts.py": [
-   "4fab864c52dfb5e5ba449450c97d4f979a3500cc",
+   "1551e3af62668971eeb5681301b6833cf6fb619b",
    "wdspec"
   ],
   "webdriver/tests/navigate_to/__init__.py": [
    "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
    "support"
   ],
   "webdriver/tests/navigate_to/navigate.py": [
    "9ac1be00b84eeae73aec8a1facb43b329bae6310",
@@ -641900,17 +641926,17 @@
    "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
    "support"
   ],
   "webdriver/tests/refresh/refresh.py": [
    "30bd369bba1bd9395cd70f0d6b8b70a3a955cb5d",
    "wdspec"
   ],
   "webdriver/tests/refresh/user_prompts.py": [
-   "6ec502747937cb5297b834e1137f56ff7cd75785",
+   "b8ef418e115fcdafa861dcb8f61b01116e9e81f6",
    "wdspec"
   ],
   "webdriver/tests/send_alert_text/__init__.py": [
    "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
    "support"
   ],
   "webdriver/tests/send_alert_text/send.py": [
    "9d4532a190c717ad7997a5b9ccb6d8d32a2cb1fc",
@@ -641928,33 +641954,33 @@
    "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
    "support"
   ],
   "webdriver/tests/set_window_rect/set.py": [
    "f7965681744745458296864fda6718ebe4b75a26",
    "wdspec"
   ],
   "webdriver/tests/set_window_rect/user_prompts.py": [
-   "9097931b0b30ccabf83d41a21bfa5bfd2da618e8",
+   "908a9d920f36d0b8664c6b906bc71b0aefba96e6",
    "wdspec"
   ],
   "webdriver/tests/status/__init__.py": [
    "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391",
    "support"
   ],
   "webdriver/tests/status/status.py": [
    "f0df16a1ee17d22e6293af049876810bb4073929",
    "wdspec"
   ],
   "webdriver/tests/support/__init__.py": [
    "e5e43c4e655170d57d3de7a85d4ebb639c31aee0",
    "support"
   ],
   "webdriver/tests/support/asserts.py": [
-   "1a7bbc2697c0cde846a59aaa08e656bd23cfaa43",
+   "44c76a96b0997100a0201a5ffafd20e9967758ec",
    "support"
   ],
   "webdriver/tests/support/fixtures.py": [
    "b4a29968b58cac2dfbe6b2ee28abdc3501cf4e01",
    "support"
   ],
   "webdriver/tests/support/http_request.py": [
    "ae28a6486b1333acc472ab2d37ac3f890249f1a2",
--- a/testing/web-platform/meta/webdriver/tests/element_send_keys/user_prompts.py.ini
+++ b/testing/web-platform/meta/webdriver/tests/element_send_keys/user_prompts.py.ini
@@ -1,12 +1,24 @@
 [user_prompts.py]
-  [test_handle_prompt_accept[capabilities0-alert-None\]]
+  [test_accept[capabilities0-alert-None\]]
+    expected: FAIL
+    bug: 1479368
+
+  [test_accept[capabilities0-confirm-True\]]
     expected: FAIL
     bug: 1479368
 
-  [test_handle_prompt_accept[capabilities0-confirm-True\]]
+  [test_accept[capabilities0-prompt-\]]
+    expected: FAIL
+    bug: 1479368
+
+  [test_dismiss[capabilities0-alert-None\]]
     expected: FAIL
     bug: 1479368
 
-  [test_handle_prompt_accept[capabilities0-prompt-\]]
+  [test_dismiss[capabilities0-confirm-False\]]
     expected: FAIL
     bug: 1479368
+
+  [test_dismiss[capabilities0-prompt-None\]]
+    expected: FAIL
+    bug: 1479368
--- a/testing/web-platform/tests/webdriver/tests/back/user_prompts.py
+++ b/testing/web-platform/tests/webdriver/tests/back/user_prompts.py
@@ -1,106 +1,119 @@
 # META: timeout=long
 
 import pytest
 
 from tests.support.asserts import assert_dialog_handled, assert_error, assert_success
 from tests.support.inline import inline
 
 
+def back(session):
+    return session.transport.send(
+        "POST", "session/{session_id}/back".format(**vars(session)))
+
+
 @pytest.fixture
 def pages(session):
     pages = [
         inline("<p id=1>"),
         inline("<p id=2>"),
     ]
 
     for page in pages:
         session.url = page
 
     return pages
 
 
-def back(session):
-    return session.transport.send(
-        "POST", "session/{session_id}/back".format(**vars(session)))
+@pytest.fixture
+def check_user_prompt_closed_without_exception(session, create_dialog, pages):
+    def check_user_prompt_closed_without_exception(dialog_type, retval):
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = back(session)
+        assert_success(response)
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+        assert session.url == pages[0]
+
+    return check_user_prompt_closed_without_exception
+
+
+@pytest.fixture
+def check_user_prompt_closed_with_exception(session, create_dialog, pages):
+    def check_user_prompt_closed_with_exception(dialog_type, retval):
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = back(session)
+        assert_error(response, "unexpected alert open")
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+        assert session.url == pages[1]
+
+    return check_user_prompt_closed_with_exception
+
+
+@pytest.fixture
+def check_user_prompt_not_closed_but_exception(session, create_dialog, pages):
+    def check_user_prompt_not_closed_but_exception(dialog_type):
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = back(session)
+        assert_error(response, "unexpected alert open")
+
+        assert session.alert.text == dialog_type
+        session.alert.dismiss()
+
+        assert session.url == pages[1]
+
+    return check_user_prompt_not_closed_but_exception
 
 
 @pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
 @pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
-def test_handle_prompt_accept(session, create_dialog, dialog_type, pages):
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = back(session)
-    assert_success(response)
-
+def test_accept(check_user_prompt_closed_without_exception, dialog_type):
     # retval not testable for confirm and prompt because window is gone
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=None)
-
-    assert session.url == pages[0]
+    check_user_prompt_closed_without_exception(dialog_type, None)
 
 
 @pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", True),
     ("prompt", ""),
 ])
-def test_handle_prompt_accept_and_notify(session, create_dialog, dialog_type, retval, pages):
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = back(session)
-    assert_error(response, "unexpected alert open")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert session.url == pages[1]
+def test_accept_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
 @pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
 @pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
-def test_handle_prompt_dismiss(session, create_dialog, dialog_type, pages):
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = back(session)
-    assert_success(response)
-
+def test_dismiss(check_user_prompt_closed_without_exception, dialog_type):
     # retval not testable for confirm and prompt because window is gone
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=None)
-
-    assert session.url == pages[0]
+    check_user_prompt_closed_without_exception(dialog_type, None)
 
 
 @pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_dissmiss_and_notify(session, create_dialog, dialog_type, retval, pages):
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = back(session)
-    assert_error(response, "unexpected alert open")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert session.url == pages[1]
+def test_dismiss_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
-def test_handle_prompt_ignore():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_ignore(check_user_prompt_not_closed_but_exception, dialog_type):
+    check_user_prompt_not_closed_but_exception(dialog_type)
 
 
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_default(session, create_dialog, dialog_type, retval, pages):
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = back(session)
-    assert_error(response, "unexpected alert open")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert session.url == pages[1]
+def test_default(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
--- a/testing/web-platform/tests/webdriver/tests/close_window/user_prompts.py
+++ b/testing/web-platform/tests/webdriver/tests/close_window/user_prompts.py
@@ -1,65 +1,119 @@
+# META: timeout=long
+
 import pytest
 
 from tests.support.asserts import assert_dialog_handled, assert_error, assert_success
 
 
 def close(session):
     return session.transport.send(
         "DELETE", "session/{session_id}/window".format(**vars(session)))
 
 
+@pytest.fixture
+def check_user_prompt_closed_without_exception(session, create_dialog, create_window):
+    def check_user_prompt_closed_without_exception(dialog_type, retval):
+        original_handle = session.window_handle
+        new_handle = create_window()
+        session.window_handle = new_handle
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = close(session)
+        assert_success(response)
+
+        # Asserting that the dialog was handled requires valid top-level browsing
+        # context, so we must switch to the original window.
+        session.window_handle = original_handle
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+        assert new_handle not in session.handles
+
+    return check_user_prompt_closed_without_exception
+
+
+@pytest.fixture
+def check_user_prompt_closed_with_exception(session, create_dialog, create_window):
+    def check_user_prompt_closed_with_exception(dialog_type, retval):
+        new_handle = create_window()
+        session.window_handle = new_handle
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = close(session)
+        assert_error(response, "unexpected alert open")
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+        assert new_handle in session.handles
+
+    return check_user_prompt_closed_with_exception
+
+
+@pytest.fixture
+def check_user_prompt_not_closed_but_exception(session, create_dialog, create_window):
+    def check_user_prompt_not_closed_but_exception(dialog_type):
+        new_handle = create_window()
+        session.window_handle = new_handle
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = close(session)
+        assert_error(response, "unexpected alert open")
+
+        assert session.alert.text == dialog_type
+        session.alert.dismiss()
+
+        assert new_handle in session.handles
+
+    return check_user_prompt_not_closed_but_exception
+
+
 @pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
 @pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
-def test_handle_prompt_accept(session, create_dialog, create_window, dialog_type):
-    original_handle = session.window_handle
-    new_handle = create_window()
-    session.window_handle = new_handle
+def test_accept(check_user_prompt_closed_without_exception, dialog_type):
+    # retval not testable for confirm and prompt because window is gone
+    check_user_prompt_closed_without_exception(dialog_type, None)
 
-    create_dialog(dialog_type, text=dialog_type)
 
-    response = close(session)
-    assert_success(response)
-
-    # Asserting that the dialog was handled requires valid top-level browsing
-    # context, so we must switch to the original window.
-    session.window_handle = original_handle
-
-    # retval not testable for confirm and prompt because window is gone
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=None)
-
-    assert new_handle not in session.handles
+@pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", True),
+    ("prompt", ""),
+])
+def test_accept_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
-def test_handle_prompt_accept_and_notify():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_dismiss(check_user_prompt_closed_without_exception, dialog_type):
+    # retval not testable for confirm and prompt because window is gone
+    check_user_prompt_closed_without_exception(dialog_type, None)
 
 
-def test_handle_prompt_dismiss():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", False),
+    ("prompt", None),
+])
+def test_dismiss_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
-def test_handle_prompt_dismiss_and_notify():
-    """TODO"""
-
-
-def test_handle_prompt_ignore():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_ignore(check_user_prompt_not_closed_but_exception, dialog_type):
+    check_user_prompt_not_closed_but_exception(dialog_type)
 
 
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_default(session, create_dialog, create_window, dialog_type, retval):
-    new_handle = create_window()
-    session.window_handle = new_handle
-
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = close(session)
-    assert_error(response, "unexpected alert open")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert new_handle in session.handles
+def test_default(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
--- a/testing/web-platform/tests/webdriver/tests/delete_cookie/user_prompts.py
+++ b/testing/web-platform/tests/webdriver/tests/delete_cookie/user_prompts.py
@@ -1,63 +1,119 @@
+# META: timeout=long
+
 import pytest
 
 from webdriver.error import NoSuchCookieException
 
 from tests.support.asserts import assert_dialog_handled, assert_error, assert_success
 
 
 def delete_cookie(session, name):
     return session.transport.send("DELETE", "/session/%s/cookie/%s" % (session.session_id, name))
 
 
+@pytest.fixture
+def check_user_prompt_closed_without_exception(session, create_dialog, create_cookie):
+    def check_user_prompt_closed_without_exception(dialog_type, retval):
+        create_cookie("foo", value="bar", path="/common/blank.html")
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = delete_cookie(session, "foo")
+        assert_success(response)
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+        with pytest.raises(NoSuchCookieException):
+            assert session.cookies("foo")
+
+    return check_user_prompt_closed_without_exception
+
+
+@pytest.fixture
+def check_user_prompt_closed_with_exception(session, create_dialog, create_cookie):
+    def check_user_prompt_closed_with_exception(dialog_type, retval):
+        create_cookie("foo", value="bar", path="/common/blank.html")
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = delete_cookie(session, "foo")
+        assert_error(response, "unexpected alert open")
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+        assert session.cookies("foo")
+
+    return check_user_prompt_closed_with_exception
+
+
+@pytest.fixture
+def check_user_prompt_not_closed_but_exception(session, create_dialog, create_cookie):
+    def check_user_prompt_not_closed_but_exception(dialog_type):
+        create_cookie("foo", value="bar", path="/common/blank.html")
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = delete_cookie(session, "foo")
+        assert_error(response, "unexpected alert open")
+
+        assert session.alert.text == dialog_type
+        session.alert.dismiss()
+
+        assert session.cookies("foo")
+
+    return check_user_prompt_not_closed_but_exception
+
+
 @pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", True),
     ("prompt", ""),
 ])
-def test_handle_prompt_accept(session, create_cookie, create_dialog, dialog_type, retval):
-    create_cookie("foo", value="bar", path="/common/blank.html")
+def test_accept(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
 
-    create_dialog(dialog_type, text=dialog_type)
 
-    response = delete_cookie(session, "foo")
-    assert_success(response)
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    with pytest.raises(NoSuchCookieException):
-        assert session.cookies("foo")
+@pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", True),
+    ("prompt", ""),
+])
+def test_accept_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
-def test_handle_prompt_accept_and_notify():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", False),
+    ("prompt", None),
+])
+def test_dismiss(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
 
 
-def test_handle_prompt_dismiss():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", False),
+    ("prompt", None),
+])
+def test_dismiss_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
-def test_handle_prompt_dismiss_and_notify():
-    """TODO"""
-
-
-def test_handle_prompt_ignore():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_ignore(check_user_prompt_not_closed_but_exception, dialog_type):
+    check_user_prompt_not_closed_but_exception(dialog_type)
 
 
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_default(session, create_cookie, create_dialog, dialog_type, retval):
-    cookie = create_cookie("foo", value="bar", path="/common/blank.html")
-
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = delete_cookie(session, "foo")
-    assert_error(response, "unexpected alert open")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert session.cookies("foo") == cookie
+def test_default(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
--- a/testing/web-platform/tests/webdriver/tests/element_send_keys/user_prompts.py
+++ b/testing/web-platform/tests/webdriver/tests/element_send_keys/user_prompts.py
@@ -1,67 +1,124 @@
+# META: timeout=long
+
 import pytest
 
 from tests.support.asserts import assert_dialog_handled, assert_error, assert_success
 from tests.support.inline import inline
 
 
 def element_send_keys(session, element, text):
     return session.transport.send(
         "POST", "/session/{session_id}/element/{element_id}/value".format(
             session_id=session.session_id,
             element_id=element.id),
         {"text": text})
 
 
+@pytest.fixture
+def check_user_prompt_closed_without_exception(session, create_dialog):
+    def check_user_prompt_closed_without_exception(dialog_type, retval):
+        session.url = inline("<input type=text>")
+        element = session.find.css("input", all=False)
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = element_send_keys(session, element, "foo")
+        assert_success(response)
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+        assert element.property("value") == "foo"
+
+    return check_user_prompt_closed_without_exception
+
+
+@pytest.fixture
+def check_user_prompt_closed_with_exception(session, create_dialog):
+    def check_user_prompt_closed_with_exception(dialog_type, retval):
+        session.url = inline("<input type=text>")
+        element = session.find.css("input", all=False)
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = element_send_keys(session, element, "foo")
+        assert_error(response, "unexpected alert open")
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+        assert element.property("value") == ""
+
+    return check_user_prompt_closed_with_exception
+
+
+@pytest.fixture
+def check_user_prompt_not_closed_but_exception(session, create_dialog):
+    def check_user_prompt_not_closed_but_exception(dialog_type):
+        session.url = inline("<input type=text>")
+        element = session.find.css("input", all=False)
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = element_send_keys(session, element, "foo")
+        assert_error(response, "unexpected alert open")
+
+        assert session.alert.text == dialog_type
+        session.alert.dismiss()
+
+        assert element.property("value") == ""
+
+    return check_user_prompt_not_closed_but_exception
+
+
 @pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", True),
     ("prompt", ""),
 ])
-def test_handle_prompt_accept(session, create_dialog, dialog_type, retval):
-    session.url = inline("<input type=text>")
-    element = session.find.css("input", all=False)
+def test_accept(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
 
-    create_dialog(dialog_type, text=dialog_type)
 
-    response = element_send_keys(session, element, "foo")
-    assert_success(response)
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert element.property("value") == "foo"
+@pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", True),
+    ("prompt", ""),
+])
+def test_accept_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
-def test_handle_prompt_accept_and_notify():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", False),
+    ("prompt", None),
+])
+def test_dismiss(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
 
 
-def test_handle_prompt_dismiss():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", False),
+    ("prompt", None),
+])
+def test_dismiss_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
-def test_handle_prompt_dismiss_and_notify():
-    """TODO"""
-
-
-def test_handle_prompt_ignore():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_ignore(check_user_prompt_not_closed_but_exception, dialog_type):
+    check_user_prompt_not_closed_but_exception(dialog_type)
 
 
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_default(session, create_dialog, dialog_type, retval):
-    session.url = inline("<input type=text>")
-    element = session.find.css("input", all=False)
-
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = element_send_keys(session, element, "foo")
-    assert_error(response, "unexpected alert open")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert element.property("value") == ""
+def test_default(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
--- a/testing/web-platform/tests/webdriver/tests/execute_async_script/user_prompts.py
+++ b/testing/web-platform/tests/webdriver/tests/execute_async_script/user_prompts.py
@@ -10,94 +10,109 @@ def execute_async_script(session, script
         args = []
     body = {"script": script, "args": args}
 
     return session.transport.send(
         "POST", "/session/{session_id}/execute/async".format(**vars(session)),
         body)
 
 
+@pytest.fixture
+def check_user_prompt_closed_without_exception(session, create_dialog):
+    def check_user_prompt_closed_without_exception(dialog_type, retval):
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = execute_async_script(session, "window.result = 1; arguments[0](1);")
+        assert_success(response, 1)
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+        assert session.execute_script("return window.result;") == 1
+
+        return check_user_prompt_closed_without_exception
+
+    return check_user_prompt_closed_without_exception
+
+
+@pytest.fixture
+def check_user_prompt_closed_with_exception(session, create_dialog):
+    def check_user_prompt_closed_with_exception(dialog_type, retval):
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = execute_async_script(session, "window.result = 1; arguments[0](1);")
+        assert_error(response, "unexpected alert open")
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+        assert session.execute_script("return window.result;") is None
+
+    return check_user_prompt_closed_with_exception
+
+
+@pytest.fixture
+def check_user_prompt_not_closed_but_exception(session, create_dialog):
+    def check_user_prompt_not_closed_but_exception(dialog_type):
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = execute_async_script(session, "window.result = 1; arguments[0](1);")
+        assert_error(response, "unexpected alert open")
+
+        assert session.alert.text == dialog_type
+        session.alert.dismiss()
+
+        assert session.execute_script("return window.result;") is None
+
+    return check_user_prompt_not_closed_but_exception
+
+
 @pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", True),
     ("prompt", ""),
 ])
-def test_handle_prompt_accept(session, create_dialog, dialog_type, retval):
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = execute_async_script(session, "window.result = 1; arguments[0](1);")
-    assert_success(response, 1)
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert session.execute_script("return window.result;") == 1
+def test_accept(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
 
 
 @pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", True),
     ("prompt", ""),
 ])
-def test_handle_prompt_accept_and_notify(session, create_dialog, dialog_type, retval):
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = execute_async_script(session, "window.result = 1; arguments[0](1);")
-    assert_error(response, "unexpected alert open")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert session.execute_script("return window.result;") is None
+def test_accept_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
 @pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_dismiss(session, create_dialog, dialog_type, retval):
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = execute_async_script(session, "window.result = 1; arguments[0](1);")
-    assert_success(response, 1)
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert session.execute_script("return window.result;") == 1
+def test_dismiss(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
 
 
 @pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_dissmiss_and_notify(session, create_dialog, dialog_type, retval):
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = execute_async_script(session, "window.result = 1; arguments[0](1);")
-    assert_error(response, "unexpected alert open")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert session.execute_script("return window.result;") is None
+def test_dismiss_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
-def test_handle_prompt_ignore():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_ignore(check_user_prompt_not_closed_but_exception, dialog_type):
+    check_user_prompt_not_closed_but_exception(dialog_type)
 
 
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_default(session, create_dialog, dialog_type, retval):
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = execute_async_script(session, "window.result = 1; arguments[0](1);")
-    assert_error(response, "unexpected alert open")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert session.execute_script("return window.result;") is None
+def test_default(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
--- a/testing/web-platform/tests/webdriver/tests/execute_script/user_prompts.py
+++ b/testing/web-platform/tests/webdriver/tests/execute_script/user_prompts.py
@@ -11,94 +11,107 @@ def execute_script(session, script, args
     body = {"script": script, "args": args}
 
     return session.transport.send(
         "POST", "/session/{session_id}/execute/sync".format(
             session_id=session.session_id),
         body)
 
 
+@pytest.fixture
+def check_user_prompt_closed_without_exception(session, create_dialog):
+    def check_user_prompt_closed_without_exception(dialog_type, retval):
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = execute_script(session, "window.result = 1; return 1;")
+        assert_success(response, 1)
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+        assert session.execute_script("return window.result;") == 1
+
+    return check_user_prompt_closed_without_exception
+
+
+@pytest.fixture
+def check_user_prompt_closed_with_exception(session, create_dialog):
+    def check_user_prompt_closed_with_exception(dialog_type, retval):
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = execute_script(session, "window.result = 1; return 1;")
+        assert_error(response, "unexpected alert open")
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+        assert session.execute_script("return window.result;") is None
+
+    return check_user_prompt_closed_with_exception
+
+
+@pytest.fixture
+def check_user_prompt_not_closed_but_exception(session, create_dialog):
+    def check_user_prompt_not_closed_but_exception(dialog_type):
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = execute_script(session, "window.result = 1; return 1;")
+        assert_error(response, "unexpected alert open")
+
+        assert session.alert.text == dialog_type
+        session.alert.dismiss()
+
+        assert session.execute_script("return window.result;") is None
+
+    return check_user_prompt_not_closed_but_exception
+
+
 @pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", True),
     ("prompt", ""),
 ])
-def test_handle_prompt_accept(session, create_dialog, dialog_type, retval):
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = execute_script(session, "window.result = 1; return 1;")
-    assert_success(response, 1)
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert session.execute_script("return window.result;") == 1
+def test_accept(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
 
 
 @pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", True),
     ("prompt", ""),
 ])
-def test_handle_prompt_accept_and_notify(session, create_dialog, dialog_type, retval):
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = execute_script(session, "window.result = 1; return 1;")
-    assert_error(response, "unexpected alert open")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert session.execute_script("return window.result;") is None
+def test_accept_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
 @pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_dismiss(session, create_dialog, dialog_type, retval):
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = execute_script(session, "window.result = 1; return 1;")
-    assert_success(response, 1)
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert session.execute_script("return window.result;") == 1
+def test_dismiss(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
 
 
 @pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_dissmiss_and_notify(session, create_dialog, dialog_type, retval):
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = execute_script(session, "window.result = 1; return 1;")
-    assert_error(response, "unexpected alert open")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert session.execute_script("return window.result;") is None
+def test_dismiss_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
-def test_handle_prompt_ignore():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_ignore(check_user_prompt_not_closed_but_exception, dialog_type):
+    check_user_prompt_not_closed_but_exception(dialog_type)
 
 
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_default(session, create_dialog, dialog_type, retval):
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = execute_script(session, "window.result = 1; return 1;")
-    assert_error(response, "unexpected alert open")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert session.execute_script("return window.result;") is None
+def test_default(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
--- a/testing/web-platform/tests/webdriver/tests/forward/user_prompts.py
+++ b/testing/web-platform/tests/webdriver/tests/forward/user_prompts.py
@@ -1,108 +1,122 @@
 # META: timeout=long
 
 import pytest
 
 from tests.support.asserts import assert_dialog_handled, assert_error, assert_success
 from tests.support.inline import inline
 
 
+def forward(session):
+    return session.transport.send(
+        "POST", "session/{session_id}/forward".format(**vars(session)))
+
+
 @pytest.fixture
 def pages(session):
     pages = [
         inline("<p id=1>"),
         inline("<p id=2>"),
     ]
 
     for page in pages:
         session.url = page
 
     session.back()
 
     return pages
 
 
-def forward(session):
-    return session.transport.send(
-        "POST", "session/{session_id}/forward".format(**vars(session)))
+@pytest.fixture
+def check_user_prompt_closed_without_exception(session, create_dialog, pages):
+    def check_user_prompt_closed_without_exception(dialog_type, retval):
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = forward(session)
+        assert_success(response)
+
+        # retval not testable for confirm and prompt because window is gone
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=None)
+
+        assert session.url == pages[1]
+
+    return check_user_prompt_closed_without_exception
+
+
+@pytest.fixture
+def check_user_prompt_closed_with_exception(session, create_dialog, pages):
+    def check_user_prompt_closed_with_exception(dialog_type, retval):
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = forward(session)
+        assert_error(response, "unexpected alert open")
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+        assert session.url == pages[0]
+
+    return check_user_prompt_closed_with_exception
+
+
+@pytest.fixture
+def check_user_prompt_not_closed_but_exception(session, create_dialog, pages):
+    def check_user_prompt_not_closed_but_exception(dialog_type):
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = forward(session)
+        assert_error(response, "unexpected alert open")
+
+        assert session.alert.text == dialog_type
+        session.alert.dismiss()
+
+        assert session.url == pages[0]
+
+    return check_user_prompt_not_closed_but_exception
 
 
 @pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
 @pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
-def test_handle_prompt_accept(session, create_dialog, dialog_type, pages):
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = forward(session)
-    assert_success(response)
-
+def test_accept(check_user_prompt_closed_without_exception, dialog_type):
     # retval not testable for confirm and prompt because window is gone
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=None)
-
-    assert session.url == pages[1]
+    check_user_prompt_closed_without_exception(dialog_type, None)
 
 
 @pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", True),
     ("prompt", ""),
 ])
-def test_handle_prompt_accept_and_notify(session, create_dialog, dialog_type, retval, pages):
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = forward(session)
-    assert_error(response, "unexpected alert open")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert session.url == pages[0]
+def test_accept_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
 @pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
 @pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
-def test_handle_prompt_dismiss(session, create_dialog, dialog_type, pages):
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = forward(session)
-    assert_success(response)
-
+def test_dismiss(check_user_prompt_closed_without_exception, dialog_type):
     # retval not testable for confirm and prompt because window is gone
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=None)
-
-    assert session.url == pages[1]
+    check_user_prompt_closed_without_exception(dialog_type, None)
 
 
 @pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_dissmiss_and_notify(session, create_dialog, dialog_type, retval, pages):
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = forward(session)
-    assert_error(response, "unexpected alert open")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert session.url == pages[0]
+def test_dismiss_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
-def test_handle_prompt_ignore():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_ignore(check_user_prompt_not_closed_but_exception, dialog_type):
+    check_user_prompt_not_closed_but_exception(dialog_type)
 
 
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_default(session, create_dialog, dialog_type, retval, pages):
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = forward(session)
-    assert_error(response, "unexpected alert open")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert session.url == pages[0]
+def test_default(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
--- a/testing/web-platform/tests/webdriver/tests/fullscreen_window/user_prompts.py
+++ b/testing/web-platform/tests/webdriver/tests/fullscreen_window/user_prompts.py
@@ -1,8 +1,10 @@
+# META: timeout=long
+
 import pytest
 
 from tests.support.asserts import assert_dialog_handled, assert_error, assert_success
 
 
 def fullscreen(session):
     return session.transport.send(
         "POST", "session/{session_id}/window/fullscreen".format(**vars(session)))
@@ -14,59 +16,113 @@ def is_fullscreen(session):
     #
     # Remove the prefixed fallback when
     # https://bugs.webkit.org/show_bug.cgi?id=158125 is fixed.
     return session.execute_script("""
         return !!(window.fullScreen || document.webkitIsFullScreen)
         """)
 
 
+@pytest.fixture
+def check_user_prompt_closed_without_exception(session, create_dialog):
+    def check_user_prompt_closed_without_exception(dialog_type, retval):
+        assert is_fullscreen(session) is False
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = fullscreen(session)
+        assert_success(response)
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+        assert is_fullscreen(session) is True
+
+    return check_user_prompt_closed_without_exception
+
+
+@pytest.fixture
+def check_user_prompt_closed_with_exception(session, create_dialog):
+    def check_user_prompt_closed_with_exception(dialog_type, retval):
+        assert is_fullscreen(session) is False
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = fullscreen(session)
+        assert_error(response, "unexpected alert open")
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+        assert is_fullscreen(session) is False
+
+    return check_user_prompt_closed_with_exception
+
+
+@pytest.fixture
+def check_user_prompt_not_closed_but_exception(session, create_dialog):
+    def check_user_prompt_not_closed_but_exception(dialog_type):
+        assert is_fullscreen(session) is False
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = fullscreen(session)
+        assert_error(response, "unexpected alert open")
+
+        assert session.alert.text == dialog_type
+        session.alert.dismiss()
+
+        assert is_fullscreen(session) is False
+
+    return check_user_prompt_not_closed_but_exception
+
+
 @pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", True),
     ("prompt", ""),
 ])
-def test_handle_prompt_accept(session, create_dialog, dialog_type, retval):
-    assert is_fullscreen(session) is False
+def test_accept(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
 
-    create_dialog(dialog_type, text=dialog_type)
 
-    response = fullscreen(session)
-    assert_success(response)
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert is_fullscreen(session) is True
+@pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", True),
+    ("prompt", ""),
+])
+def test_accept_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
-def test_handle_prompt_accept_and_notify():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", False),
+    ("prompt", None),
+])
+def test_dismiss(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
 
 
-def test_handle_prompt_dismiss():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", False),
+    ("prompt", None),
+])
+def test_dismiss_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
-def test_handle_prompt_dismiss_and_notify():
-    """TODO"""
-
-
-def test_handle_prompt_ignore():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_ignore(check_user_prompt_not_closed_but_exception, dialog_type):
+    check_user_prompt_not_closed_but_exception(dialog_type)
 
 
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_default(session, create_dialog, dialog_type, retval):
-    assert is_fullscreen(session) is False
-
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = fullscreen(session)
-    assert_error(response, "unexpected alert open")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert is_fullscreen(session) is False
+def test_default(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
--- a/testing/web-platform/tests/webdriver/tests/get_current_url/user_prompts.py
+++ b/testing/web-platform/tests/webdriver/tests/get_current_url/user_prompts.py
@@ -1,58 +1,112 @@
+# META: timeout=long
+
 import pytest
 
 from tests.support.asserts import assert_dialog_handled, assert_error, assert_success
 from tests.support.inline import inline
 
 
 def get_current_url(session):
     return session.transport.send("GET", "session/%s/url" % session.session_id)
 
 
+@pytest.fixture
+def check_user_prompt_closed_without_exception(session, create_dialog):
+    def check_user_prompt_closed_without_exception(dialog_type, retval):
+        session.url = inline("<p id=1>")
+        expected_url = session.url
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = get_current_url(session)
+        assert_success(response, expected_url)
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+    return check_user_prompt_closed_without_exception
+
+
+@pytest.fixture
+def check_user_prompt_closed_with_exception(session, create_dialog):
+    def check_user_prompt_closed_with_exception(dialog_type, retval):
+        session.url = inline("<p id=1>")
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = get_current_url(session)
+        assert_error(response, "unexpected alert open")
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+    return check_user_prompt_closed_with_exception
+
+
+@pytest.fixture
+def check_user_prompt_not_closed_but_exception(session, create_dialog):
+    def check_user_prompt_not_closed_but_exception(dialog_type):
+        session.url = inline("<p id=1>")
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = get_current_url(session)
+        assert_error(response, "unexpected alert open")
+
+        assert session.alert.text == dialog_type
+        session.alert.dismiss()
+
+    return check_user_prompt_not_closed_but_exception
+
+
 @pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", True),
     ("prompt", ""),
 ])
-def test_handle_prompt_accept(session, create_dialog, dialog_type, retval):
-    session.url = inline("<p id=1>")
-    expected_url = session.url
+def test_accept(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
+
 
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = get_current_url(session)
-    assert_success(response, expected_url)
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+@pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", True),
+    ("prompt", ""),
+])
+def test_accept_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
-def test_handle_prompt_accept_and_notify():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", False),
+    ("prompt", None),
+])
+def test_dismiss(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
 
 
-def test_handle_prompt_dismiss():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", False),
+    ("prompt", None),
+])
+def test_dismiss_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
-def test_handle_prompt_dismiss_and_notify():
-    """TODO"""
-
-
-def test_handle_prompt_ignore():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_ignore(check_user_prompt_not_closed_but_exception, dialog_type):
+    check_user_prompt_not_closed_but_exception(dialog_type)
 
 
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_default(session, create_dialog, dialog_type, retval):
-    session.url = inline("<p id=1>")
-
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = get_current_url(session)
-    assert_error(response, "unexpected alert open")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+def test_default(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
--- a/testing/web-platform/tests/webdriver/tests/get_element_property/user_prompts.py
+++ b/testing/web-platform/tests/webdriver/tests/get_element_property/user_prompts.py
@@ -1,75 +1,116 @@
+# META: timeout=long
+
 import pytest
 
 from tests.support.asserts import assert_error, assert_success, assert_dialog_handled
 from tests.support.inline import inline
 
 
-def get_property(session, element_id, name):
+def get_element_property(session, element_id, name):
     return session.transport.send(
         "GET", "session/{session_id}/element/{element_id}/property/{name}".format(
             session_id=session.session_id, element_id=element_id, name=name))
 
 
+@pytest.fixture
+def check_user_prompt_closed_without_exception(session, create_dialog):
+    def check_user_prompt_closed_without_exception(dialog_type, retval):
+        session.url = inline("<input id=foo>")
+        element = session.find.css("#foo", all=False)
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = get_element_property(session, element.id, "id")
+        assert_success(response, "foo")
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+    return check_user_prompt_closed_without_exception
+
+
+@pytest.fixture
+def check_user_prompt_closed_with_exception(session, create_dialog):
+    def check_user_prompt_closed_with_exception(dialog_type, retval):
+        session.url = inline("<input id=foo>")
+        element = session.find.css("#foo", all=False)
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = get_element_property(session, element.id, "id")
+        assert_error(response, "unexpected alert open")
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+    return check_user_prompt_closed_with_exception
+
+
+@pytest.fixture
+def check_user_prompt_not_closed_but_exception(session, create_dialog):
+    def check_user_prompt_not_closed_but_exception(dialog_type):
+        session.url = inline("<input id=foo>")
+        element = session.find.css("#foo", all=False)
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = get_element_property(session, element.id, "id")
+        assert_error(response, "unexpected alert open")
+
+        assert session.alert.text == dialog_type
+        session.alert.dismiss()
+
+    return check_user_prompt_not_closed_but_exception
+
+
 @pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", True),
     ("prompt", ""),
 ])
-def test_handle_prompt_accept(session, create_dialog, dialog_type, retval):
-    session.url = inline("<input id=foo>")
-    element = session.find.css("#foo", all=False)
-
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = get_property(session, element.id, "id")
-    assert_success(response, "foo")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+def test_accept(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
 
 
-def test_handle_prompt_accept_and_notify():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", True),
+    ("prompt", ""),
+])
+def test_accept_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
 @pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_dismiss(session, create_dialog, dialog_type, retval):
-    session.url = inline("<input id=foo>")
-    element = session.find.css("#foo", all=False)
-
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = get_property(session, element.id, "id")
-    assert_success(response, "foo")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+def test_dismiss(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
 
 
-def test_handle_prompt_dismiss_and_notify():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", False),
+    ("prompt", None),
+])
+def test_dismiss_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
-def test_handle_prompt_ignore():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_ignore(check_user_prompt_not_closed_but_exception, dialog_type):
+    check_user_prompt_not_closed_but_exception(dialog_type)
 
 
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_default(session, create_dialog, dialog_type, retval):
-    session.url = inline("<input id=foo>")
-    element = session.find.css("#foo", all=False)
-
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = get_property(session, element.id, "id")
-    assert_error(response, "unexpected alert open")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+def test_default(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
--- a/testing/web-platform/tests/webdriver/tests/get_element_tag_name/user_prompts.py
+++ b/testing/web-platform/tests/webdriver/tests/get_element_tag_name/user_prompts.py
@@ -1,74 +1,115 @@
+# META: timeout=long
+
 import pytest
 
 from tests.support.asserts import assert_error, assert_success, assert_dialog_handled
 from tests.support.inline import inline
 
 
-def get_tag_name(session, element_id):
+def get_element_tag_name(session, element_id):
     return session.transport.send("GET", "session/{session_id}/element/{element_id}/name".format(
         session_id=session.session_id, element_id=element_id))
 
 
+@pytest.fixture
+def check_user_prompt_closed_without_exception(session, create_dialog):
+    def check_user_prompt_closed_without_exception(dialog_type, retval):
+        session.url = inline("<input id=foo>")
+        element = session.find.css("#foo", all=False)
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = get_element_tag_name(session, element.id)
+        assert_success(response, "input")
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+    return check_user_prompt_closed_without_exception
+
+
+@pytest.fixture
+def check_user_prompt_closed_with_exception(session, create_dialog):
+    def check_user_prompt_closed_with_exception(dialog_type, retval):
+        session.url = inline("<input id=foo>")
+        element = session.find.css("#foo", all=False)
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = get_element_tag_name(session, element.id)
+        assert_error(response, "unexpected alert open")
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+    return check_user_prompt_closed_with_exception
+
+
+@pytest.fixture
+def check_user_prompt_not_closed_but_exception(session, create_dialog):
+    def check_user_prompt_not_closed_but_exception(dialog_type):
+        session.url = inline("<input id=foo>")
+        element = session.find.css("#foo", all=False)
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = get_element_tag_name(session, element.id)
+        assert_error(response, "unexpected alert open")
+
+        assert session.alert.text == dialog_type
+        session.alert.dismiss()
+
+    return check_user_prompt_not_closed_but_exception
+
+
 @pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", True),
     ("prompt", ""),
 ])
-def test_handle_prompt_accept(session, create_dialog, dialog_type, retval):
-    session.url = inline("<input id=foo>")
-    element = session.find.css("#foo", all=False)
-
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = get_tag_name(session, element.id)
-    assert_success(response, "input")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+def test_accept(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
 
 
-def test_handle_prompt_accept_and_notify():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", True),
+    ("prompt", ""),
+])
+def test_accept_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
 @pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_dismiss(session, create_dialog, dialog_type, retval):
-    session.url = inline("<input id=foo>")
-    element = session.find.css("#foo", all=False)
-
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = get_tag_name(session, element.id)
-    assert_success(response, "input")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+def test_dismiss(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
 
 
-def test_handle_prompt_dismiss_and_notify():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", False),
+    ("prompt", None),
+])
+def test_dismiss_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
-def test_handle_prompt_ignore():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_ignore(check_user_prompt_not_closed_but_exception, dialog_type):
+    check_user_prompt_not_closed_but_exception(dialog_type)
 
 
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_default(session, create_dialog, dialog_type, retval):
-    session.url = inline("<input id=foo>")
-    element = session.find.css("#foo", all=False)
-
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = get_tag_name(session, element.id)
-    assert_error(response, "unexpected alert open")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+def test_default(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
--- a/testing/web-platform/tests/webdriver/tests/get_title/user_prompts.py
+++ b/testing/web-platform/tests/webdriver/tests/get_title/user_prompts.py
@@ -1,81 +1,121 @@
+# META: timeout=long
+
 import pytest
 
 from tests.support.asserts import assert_dialog_handled, assert_error, assert_success
 from tests.support.inline import inline
 
 
 def get_title(session):
     return session.transport.send(
         "GET", "session/{session_id}/title".format(**vars(session)))
 
 
+@pytest.fixture
+def check_user_prompt_closed_without_exception(session, create_dialog):
+    def check_user_prompt_closed_without_exception(dialog_type, retval):
+        session.url = inline("<title>Foo</title>")
+        expected_title = session.title
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = get_title(session)
+        assert_success(response, expected_title)
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+    return check_user_prompt_closed_without_exception
+
+
+@pytest.fixture
+def check_user_prompt_closed_with_exception(session, create_dialog):
+    def check_user_prompt_closed_with_exception(dialog_type, retval):
+        session.url = inline("<title>Foo</title>")
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = get_title(session)
+        assert_error(response, "unexpected alert open")
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+    return check_user_prompt_closed_with_exception
+
+
+@pytest.fixture
+def check_user_prompt_not_closed_but_exception(session, create_dialog):
+    def check_user_prompt_not_closed_but_exception(dialog_type):
+        session.url = inline("<title>Foo</title>")
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = get_title(session)
+        assert_error(response, "unexpected alert open")
+
+        assert session.alert.text == dialog_type
+        session.alert.dismiss()
+
+    return check_user_prompt_not_closed_but_exception
+
+
 @pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", True),
     ("prompt", ""),
 ])
-def test_handle_prompt_accept(session, create_dialog, dialog_type, retval):
-    session.url = inline("<title>Foo</title>")
-    expected_title = session.title
-
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = get_title(session)
-    assert_success(response, expected_title)
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+def test_accept(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
 
 
-def test_handle_prompt_accept_and_notify():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", True),
+    ("prompt", ""),
+])
+def test_accept_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
 @pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_dismiss(session, create_dialog, dialog_type, retval):
-    session.url = inline("<title>Foo</title>")
-    expected_title = session.title
-
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = get_title(session)
-    assert_success(response, expected_title)
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+def test_dismiss(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
 
 
-def test_handle_prompt_dismiss_and_notify():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", False),
+    ("prompt", None),
+])
+def test_dismiss_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
-def test_handle_prompt_ignore():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_ignore(check_user_prompt_not_closed_but_exception, dialog_type):
+    check_user_prompt_not_closed_but_exception(dialog_type)
 
 
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_default(session, create_dialog, dialog_type, retval):
-    session.url = inline("<title>Foo</title>")
-
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = get_title(session)
-    assert_error(response, "unexpected alert open")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+def test_default(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
 # The behavior of the `window.print` function is platform-dependent and may not
 # trigger the creation of a dialog at all. Therefore, this test should only be
 # run in contexts that support the dialog (a condition that may not be
 # determined automatically).
 # def test_title_with_non_simple_dialog(session):
 #    document = "<title>With non-simple dialog</title><h2>Hello</h2>"
--- a/testing/web-platform/tests/webdriver/tests/get_window_rect/user_prompts.py
+++ b/testing/web-platform/tests/webdriver/tests/get_window_rect/user_prompts.py
@@ -1,59 +1,113 @@
+# META: timeout=long
+
 import pytest
 
 from tests.support.asserts import assert_dialog_handled, assert_error, assert_success
 
 
 def get_window_rect(session):
     return session.transport.send(
         "GET", "session/{session_id}/window/rect".format(**vars(session)))
 
 
+@pytest.fixture
+def check_user_prompt_closed_without_exception(session, create_dialog):
+    def check_user_prompt_closed_without_exception(dialog_type, retval):
+        original_rect = session.window.rect
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = get_window_rect(session)
+        assert_success(response, original_rect)
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+        return check_user_prompt_closed_without_exception
+
+    return check_user_prompt_closed_without_exception
+
+
+@pytest.fixture
+def check_user_prompt_closed_with_exception(session, create_dialog):
+    def check_user_prompt_closed_with_exception(dialog_type, retval):
+        original_rect = session.window.rect
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = get_window_rect(session)
+        assert_error(response, "unexpected alert open")
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+        assert session.window.rect == original_rect
+
+    return check_user_prompt_closed_with_exception
+
+
+@pytest.fixture
+def check_user_prompt_not_closed_but_exception(session, create_dialog):
+    def check_user_prompt_not_closed_but_exception(dialog_type):
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = get_window_rect(session)
+        assert_error(response, "unexpected alert open")
+
+        assert session.alert.text == dialog_type
+        session.alert.dismiss()
+
+    return check_user_prompt_not_closed_but_exception
+
+
 @pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", True),
     ("prompt", ""),
 ])
-def test_handle_prompt_accept(session, create_dialog, dialog_type, retval):
-    original_rect = session.window.rect
+def test_accept(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
 
-    create_dialog(dialog_type, text=dialog_type)
 
-    response = get_window_rect(session)
-    assert_success(response, original_rect)
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+@pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", True),
+    ("prompt", ""),
+])
+def test_accept_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
-def test_handle_prompt_accept_and_notify():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", False),
+    ("prompt", None),
+])
+def test_dismiss(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
 
 
-def test_handle_prompt_dismiss():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", False),
+    ("prompt", None),
+])
+def test_dismiss_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
-def test_handle_prompt_dismiss_and_notify():
-    """TODO"""
-
-
-def test_handle_prompt_ignore():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_ignore(check_user_prompt_not_closed_but_exception, dialog_type):
+    check_user_prompt_not_closed_but_exception(dialog_type)
 
 
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_default(session, create_dialog, dialog_type, retval):
-    original_rect = session.window.rect
-
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = get_window_rect(session)
-    assert_error(response, "unexpected alert open")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert session.window.rect == original_rect
+def test_default(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
--- a/testing/web-platform/tests/webdriver/tests/is_element_selected/user_prompts.py
+++ b/testing/web-platform/tests/webdriver/tests/is_element_selected/user_prompts.py
@@ -1,77 +1,118 @@
+# META: timeout=long
+
 import pytest
 
 from tests.support.asserts import assert_error, assert_dialog_handled, assert_success
 from tests.support.inline import inline
 
 
 def is_element_selected(session, element_id):
     return session.transport.send(
         "GET", "session/{session_id}/element/{element_id}/selected".format(
             session_id=session.session_id,
             element_id=element_id))
 
 
+@pytest.fixture
+def check_user_prompt_closed_without_exception(session, create_dialog):
+    def check_user_prompt_closed_without_exception(dialog_type, retval):
+        session.url = inline("<input id=foo type=checkbox checked>")
+        element = session.find.css("#foo", all=False)
+        element.send_keys("foo")
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = is_element_selected(session, element.id)
+        assert_success(response, True)
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+    return check_user_prompt_closed_without_exception
+
+
+@pytest.fixture
+def check_user_prompt_closed_with_exception(session, create_dialog):
+    def check_user_prompt_closed_with_exception(dialog_type, retval):
+        session.url = inline("<input id=foo type=checkbox checked>")
+        element = session.find.css("#foo", all=False)
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = is_element_selected(session, element.id)
+        assert_error(response, "unexpected alert open")
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+    return check_user_prompt_closed_with_exception
+
+
+@pytest.fixture
+def check_user_prompt_not_closed_but_exception(session, create_dialog):
+    def check_user_prompt_not_closed_but_exception(dialog_type):
+        session.url = inline("<input id=foo type=checkbox checked>")
+        element = session.find.css("#foo", all=False)
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = is_element_selected(session, element.id)
+        assert_error(response, "unexpected alert open")
+
+        assert session.alert.text == dialog_type
+        session.alert.dismiss()
+
+    return check_user_prompt_not_closed_but_exception
+
+
 @pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", True),
     ("prompt", ""),
 ])
-def test_handle_prompt_accept(session, create_dialog, dialog_type, retval):
-    session.url = inline("<input id=foo type=checkbox checked>")
-    element = session.find.css("#foo", all=False)
-    element.send_keys("foo")
-
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = is_element_selected(session, element.id)
-    assert_success(response, True)
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+def test_accept(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
 
 
-def test_handle_prompt_accept_and_notify():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", True),
+    ("prompt", ""),
+])
+def test_accept_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
 @pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_dismiss(session, create_dialog, dialog_type, retval):
-    session.url = inline("<input id=foo type=checkbox checked>")
-    element = session.find.css("#foo", all=False)
-
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = is_element_selected(session, element.id)
-    assert_success(response, True)
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+def test_dismiss(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
 
 
-def test_handle_prompt_dismiss_and_notify():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", False),
+    ("prompt", None),
+])
+def test_dismiss_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
-def test_handle_prompt_ignore():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_ignore(check_user_prompt_not_closed_but_exception, dialog_type):
+    check_user_prompt_not_closed_but_exception(dialog_type)
 
 
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_default(session, create_dialog, dialog_type, retval):
-    session.url = inline("<input id=foo type=checkbox checked>")
-    element = session.find.css("#foo", all=False)
-
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = is_element_selected(session, element.id)
-    assert_error(response, "unexpected alert open")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+def test_default(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
--- a/testing/web-platform/tests/webdriver/tests/maximize_window/user_prompts.py
+++ b/testing/web-platform/tests/webdriver/tests/maximize_window/user_prompts.py
@@ -1,61 +1,117 @@
+# META: timeout=long
+
 import pytest
 
 from tests.support.asserts import assert_dialog_handled, assert_error, assert_success
 
 
 def maximize(session):
     return session.transport.send(
         "POST", "session/{session_id}/window/maximize".format(**vars(session)))
 
 
+@pytest.fixture
+def check_user_prompt_closed_without_exception(session, create_dialog):
+    def check_user_prompt_closed_without_exception(dialog_type, retval):
+        original_size = session.window.size
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = maximize(session)
+        assert_success(response)
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+        assert session.window.size != original_size
+
+    return check_user_prompt_closed_without_exception
+
+
+@pytest.fixture
+def check_user_prompt_closed_with_exception(session, create_dialog):
+    def check_user_prompt_closed_with_exception(dialog_type, retval):
+        original_size = session.window.size
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = maximize(session)
+        assert_error(response, "unexpected alert open")
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+        assert session.window.size == original_size
+
+    return check_user_prompt_closed_with_exception
+
+
+@pytest.fixture
+def check_user_prompt_not_closed_but_exception(session, create_dialog):
+    def check_user_prompt_not_closed_but_exception(dialog_type):
+        original_size = session.window.size
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = maximize(session)
+        assert_error(response, "unexpected alert open")
+
+        assert session.alert.text == dialog_type
+        session.alert.dismiss()
+
+        assert session.window.size == original_size
+
+    return check_user_prompt_not_closed_but_exception
+
+
 @pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", True),
     ("prompt", ""),
 ])
-def test_handle_prompt_accept(session, create_dialog, dialog_type, retval):
-    original_size = session.window.size
+def test_accept(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
 
-    create_dialog(dialog_type, text=dialog_type)
 
-    response = maximize(session)
-    assert_success(response)
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert session.window.size != original_size
+@pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", True),
+    ("prompt", ""),
+])
+def test_accept_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
-def test_handle_prompt_accept_and_notify():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", False),
+    ("prompt", None),
+])
+def test_dismiss(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
 
 
-def test_handle_prompt_dismiss():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", False),
+    ("prompt", None),
+])
+def test_dismiss_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
-def test_handle_prompt_dismiss_and_notify():
-    """TODO"""
-
-
-def test_handle_prompt_ignore():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_ignore(check_user_prompt_not_closed_but_exception, dialog_type):
+    check_user_prompt_not_closed_but_exception(dialog_type)
 
 
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_default(session, create_dialog, dialog_type, retval):
-    original_size = session.window.size
-
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = maximize(session)
-    assert_error(response, "unexpected alert open")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert session.window.size == original_size
+def test_default(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
--- a/testing/web-platform/tests/webdriver/tests/minimize_window/user_prompts.py
+++ b/testing/web-platform/tests/webdriver/tests/minimize_window/user_prompts.py
@@ -1,65 +1,121 @@
+# META: timeout=long
+
 import pytest
 
 from tests.support.asserts import assert_dialog_handled, assert_error, assert_success
 
 
 def minimize(session):
     return session.transport.send(
         "POST", "session/{session_id}/window/minimize".format(**vars(session)))
 
 
 def is_minimized(session):
     return session.execute_script("return document.hidden")
 
 
+@pytest.fixture
+def check_user_prompt_closed_without_exception(session, create_dialog):
+    def check_user_prompt_closed_without_exception(dialog_type, retval):
+        assert not is_minimized(session)
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = minimize(session)
+        assert_success(response)
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+        assert is_minimized(session)
+
+    return check_user_prompt_closed_without_exception
+
+
+@pytest.fixture
+def check_user_prompt_closed_with_exception(session, create_dialog):
+    def check_user_prompt_closed_with_exception(dialog_type, retval):
+        assert not is_minimized(session)
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = minimize(session)
+        assert_error(response, "unexpected alert open")
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+        assert not is_minimized(session)
+
+    return check_user_prompt_closed_with_exception
+
+
+@pytest.fixture
+def check_user_prompt_not_closed_but_exception(session, create_dialog):
+    def check_user_prompt_not_closed_but_exception(dialog_type):
+        assert not is_minimized(session)
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = minimize(session)
+        assert_error(response, "unexpected alert open")
+
+        assert session.alert.text == dialog_type
+        session.alert.dismiss()
+
+        assert not is_minimized(session)
+
+    return check_user_prompt_not_closed_but_exception
+
+
 @pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", True),
     ("prompt", ""),
 ])
-def test_handle_prompt_accept(session, create_dialog, dialog_type, retval):
-    assert not is_minimized(session)
+def test_accept(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
 
-    create_dialog(dialog_type, text=dialog_type)
 
-    response = minimize(session)
-    assert_success(response)
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert is_minimized(session)
+@pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", True),
+    ("prompt", ""),
+])
+def test_accept_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
-def test_handle_prompt_accept_and_notify():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", False),
+    ("prompt", None),
+])
+def test_dismiss(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
 
 
-def test_handle_prompt_dismiss():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", False),
+    ("prompt", None),
+])
+def test_dismiss_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
-def test_handle_prompt_dismiss_and_notify():
-    """TODO"""
-
-
-def test_handle_prompt_ignore():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_ignore(check_user_prompt_not_closed_but_exception, dialog_type):
+    check_user_prompt_not_closed_but_exception(dialog_type)
 
 
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_default(session, create_dialog, dialog_type, retval):
-    assert not is_minimized(session)
-
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = minimize(session)
-    assert_error(response, "unexpected alert open")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert not is_minimized(session)
+def test_default(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
--- a/testing/web-platform/tests/webdriver/tests/refresh/user_prompts.py
+++ b/testing/web-platform/tests/webdriver/tests/refresh/user_prompts.py
@@ -8,105 +8,111 @@ from tests.support.inline import inline
 from tests.support.asserts import assert_dialog_handled, assert_error, assert_success
 
 
 def refresh(session):
     return session.transport.send(
         "POST", "session/{session_id}/refresh".format(**vars(session)))
 
 
+@pytest.fixture
+def check_user_prompt_closed_without_exception(session, create_dialog):
+    def check_user_prompt_closed_without_exception(dialog_type, retval):
+        session.url = inline("<div id=foo>")
+        element = session.find.css("#foo", all=False)
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = refresh(session)
+        assert_success(response)
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+        with pytest.raises(StaleElementReferenceException):
+            element.property("id")
+
+    return check_user_prompt_closed_without_exception
+
+
+@pytest.fixture
+def check_user_prompt_closed_with_exception(session, create_dialog):
+    def check_user_prompt_closed_with_exception(dialog_type, retval):
+        session.url = inline("<div id=foo>")
+        element = session.find.css("#foo", all=False)
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = refresh(session)
+        assert_error(response, "unexpected alert open")
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+        assert element.property("id") == "foo"
+
+    return check_user_prompt_closed_with_exception
+
+
+@pytest.fixture
+def check_user_prompt_not_closed_but_exception(session, create_dialog):
+    def check_user_prompt_not_closed_but_exception(dialog_type):
+        session.url = inline("<div id=foo>")
+        element = session.find.css("#foo", all=False)
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = refresh(session)
+        assert_error(response, "unexpected alert open")
+
+        assert session.alert.text == dialog_type
+        session.alert.dismiss()
+
+        assert element.property("id") == "foo"
+
+    return check_user_prompt_not_closed_but_exception
+
+
 @pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
 @pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
-def test_handle_prompt_accept(session, create_dialog, dialog_type):
-    session.url = inline("<div id=foo>")
-    element = session.find.css("#foo", all=False)
-
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = refresh(session)
-    assert_success(response)
-
+def test_accept(check_user_prompt_closed_without_exception, dialog_type):
     # retval not testable for confirm and prompt because window has been reloaded
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=None)
-
-    with pytest.raises(StaleElementReferenceException):
-        element.property("id")
+    check_user_prompt_closed_without_exception(dialog_type, None)
 
 
 @pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", True),
     ("prompt", ""),
 ])
-def test_handle_prompt_accept_and_notify(session, create_dialog, dialog_type, retval):
-    session.url = inline("<div id=foo>")
-    element = session.find.css("#foo", all=False)
-
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = refresh(session)
-    assert_error(response, "unexpected alert open")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert element.property("id") == "foo"
+def test_accept_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
 @pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
 @pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
-def test_handle_prompt_dismiss(session, create_dialog, dialog_type):
-    session.url = inline("<div id=foo>")
-    element = session.find.css("#foo", all=False)
-
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = refresh(session)
-    assert_success(response)
-
+def test_dismiss(check_user_prompt_closed_without_exception, dialog_type):
     # retval not testable for confirm and prompt because window has been reloaded
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=None)
-
-    with pytest.raises(StaleElementReferenceException):
-        element.property("id")
+    check_user_prompt_closed_without_exception(dialog_type, None)
 
 
 @pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_dissmiss_and_notify(session, create_dialog, dialog_type, retval):
-    session.url = inline("<div id=foo>")
-    element = session.find.css("#foo", all=False)
-
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = refresh(session)
-    assert_error(response, "unexpected alert open")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert element.property("id") == "foo"
+def test_dismiss_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
-def test_handle_prompt_ignore():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_ignore(check_user_prompt_not_closed_but_exception, dialog_type):
+    check_user_prompt_not_closed_but_exception(dialog_type)
 
 
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_default(session, create_dialog, dialog_type, retval):
-    session.url = inline("<div id=foo>")
-    element = session.find.css("#foo", all=False)
-
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = refresh(session)
-    assert_error(response, "unexpected alert open")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert element.property("id") == "foo"
+def test_default(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
--- a/testing/web-platform/tests/webdriver/tests/set_window_rect/user_prompts.py
+++ b/testing/web-platform/tests/webdriver/tests/set_window_rect/user_prompts.py
@@ -1,64 +1,121 @@
+# META: timeout=long
+
 import pytest
 
 from tests.support.asserts import assert_dialog_handled, assert_error, assert_success
 
 
 def set_window_rect(session, rect):
     return session.transport.send(
         "POST", "session/{session_id}/window/rect".format(**vars(session)),
         rect)
 
 
+@pytest.fixture
+def check_user_prompt_closed_without_exception(session, create_dialog):
+    def check_user_prompt_closed_without_exception(dialog_type, retval):
+        original_rect = session.window.rect
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = set_window_rect(session, {
+            "x": original_rect["x"] + 10, "y": original_rect["y"] + 10})
+        assert_success(response)
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+        assert session.window.rect != original_rect
+
+    return check_user_prompt_closed_without_exception
+
+
+@pytest.fixture
+def check_user_prompt_closed_with_exception(session, create_dialog):
+    def check_user_prompt_closed_with_exception(dialog_type, retval):
+        original_rect = session.window.rect
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = set_window_rect(session, {
+            "x": original_rect["x"] + 10, "y": original_rect["y"] + 10})
+        assert_error(response, "unexpected alert open")
+
+        assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
+
+        assert session.window.rect == original_rect
+
+    return check_user_prompt_closed_with_exception
+
+
+@pytest.fixture
+def check_user_prompt_not_closed_but_exception(session, create_dialog):
+    def check_user_prompt_not_closed_but_exception(dialog_type):
+        original_rect = session.window.rect
+
+        create_dialog(dialog_type, text=dialog_type)
+
+        response = set_window_rect(session, {
+            "x": original_rect["x"] + 10, "y": original_rect["y"] + 10})
+        assert_error(response, "unexpected alert open")
+
+        assert session.alert.text == dialog_type
+        session.alert.dismiss()
+
+        assert session.window.rect == original_rect
+
+    return check_user_prompt_not_closed_but_exception
+
+
 @pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", True),
     ("prompt", ""),
 ])
-def test_handle_prompt_accept(session, create_dialog, dialog_type, retval):
-    original_rect = session.window.rect
+def test_accept(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
 
-    create_dialog(dialog_type, text=dialog_type)
 
-    response = set_window_rect(session, {
-        "x": original_rect["x"] + 10, "y": original_rect["y"] + 10})
-    assert_success(response)
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert session.window.rect != original_rect
+@pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", True),
+    ("prompt", ""),
+])
+def test_accept_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
-def test_handle_prompt_accept_and_notify():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", False),
+    ("prompt", None),
+])
+def test_dismiss(check_user_prompt_closed_without_exception, dialog_type, retval):
+    check_user_prompt_closed_without_exception(dialog_type, retval)
 
 
-def test_handle_prompt_dismiss():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
+@pytest.mark.parametrize("dialog_type, retval", [
+    ("alert", None),
+    ("confirm", False),
+    ("prompt", None),
+])
+def test_dismiss_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
 
 
-def test_handle_prompt_dismiss_and_notify():
-    """TODO"""
-
-
-def test_handle_prompt_ignore():
-    """TODO"""
+@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
+@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
+def test_ignore(check_user_prompt_not_closed_but_exception, dialog_type):
+    check_user_prompt_not_closed_but_exception(dialog_type)
 
 
 @pytest.mark.parametrize("dialog_type, retval", [
     ("alert", None),
     ("confirm", False),
     ("prompt", None),
 ])
-def test_handle_prompt_default(session, create_dialog, dialog_type, retval):
-    original_rect = session.window.rect
-
-    create_dialog(dialog_type, text=dialog_type)
-
-    response = set_window_rect(session, {
-        "x": original_rect["x"] + 10, "y": original_rect["y"] + 10})
-    assert_error(response, "unexpected alert open")
-
-    assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
-
-    assert session.window.rect == original_rect
+def test_default(check_user_prompt_closed_with_exception, dialog_type, retval):
+    check_user_prompt_closed_with_exception(dialog_type, retval)
--- a/testing/web-platform/tests/webdriver/tests/support/asserts.py
+++ b/testing/web-platform/tests/webdriver/tests/support/asserts.py
@@ -68,17 +68,17 @@ def assert_success(response, value=None)
 
 def assert_dialog_handled(session, expected_text, expected_retval):
     # If there were any existing dialogs prior to the creation of this
     # fixture's dialog, then the "Get Alert Text" command will return
     # successfully. In that case, the text must be different than that
     # of this fixture's dialog.
     try:
         assert session.alert.text != expected_text, (
-            "User prompt with text '%s' was not handled." % expected_text)
+            "User prompt with text '{}' was not handled.".format(expected_text))
 
     except NoSuchAlertException:
         # If dialog has been closed and no other one is open, check its return value
         prompt_retval = session.execute_script(" return window.dialog_return_value;")
         assert prompt_retval == expected_retval
 
 
 def assert_files_uploaded(session, element, files):