Bug 1482083 [wpt PR 12377] - Replace document.origin with self.origin in cookie-helper.sub.js, a=testonly
authorPhilip Jägenstedt <philip@foolip.org>
Wed, 15 Aug 2018 01:00:36 +0000
changeset 486808 d27af016f4fc054d00c7cd424d7f6392362b62e8
parent 486807 1ee204d546351e6558e57d091f36c57cecfd1ef1
child 486809 0af3cd1d119b4aca7292fe54ed0985fa9494bb58
push id9719
push userffxbld-merge
push dateFri, 24 Aug 2018 17:49:46 +0000
treeherdermozilla-beta@719ec98fba77 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerstestonly
bugs1482083, 12377
milestone63.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1482083 [wpt PR 12377] - Replace document.origin with self.origin in cookie-helper.sub.js, a=testonly Automatic update from web-platform-testsReplace document.origin with self.origin in cookie-helper.sub.js (#12377) In anticipation of https://github.com/whatwg/dom/issues/410 -- wpt-commits: 0e1ac363581d4bf0851a00a5563619bfef622fe4 wpt-pr: 12377
testing/web-platform/meta/MANIFEST.json
testing/web-platform/tests/cookies/resources/cookie-helper.sub.js
testing/web-platform/tests/docs/_writing-tests/testharness-api.md
testing/web-platform/tests/lint.whitelist
testing/web-platform/tests/resources/test/conftest.py
testing/web-platform/tests/resources/test/tests/functional/add_cleanup_async.html
testing/web-platform/tests/resources/test/tests/functional/add_cleanup_async_bad_return.html
testing/web-platform/tests/resources/test/tests/functional/add_cleanup_async_rejection.html
testing/web-platform/tests/resources/test/tests/functional/add_cleanup_async_rejection_after_load.html
testing/web-platform/tests/resources/test/tests/functional/add_cleanup_async_timeout.html
testing/web-platform/tests/resources/test/tests/functional/add_cleanup_bad_return.html
testing/web-platform/tests/resources/test/tests/functional/add_cleanup_count.html
testing/web-platform/tests/resources/test/tests/functional/add_cleanup_err.html
testing/web-platform/tests/resources/test/tests/functional/add_cleanup_err_multi.html
testing/web-platform/tests/resources/test/tests/functional/add_cleanup_sync_queue.html
testing/web-platform/tests/resources/test/tests/functional/iframe-consolidate-errors.html
testing/web-platform/tests/resources/test/tests/functional/promise-with-sync.html
testing/web-platform/tests/resources/test/tests/functional/queue.html
testing/web-platform/tests/resources/test/tests/functional/task-scheduling-promise-test.html
testing/web-platform/tests/resources/test/tests/functional/task-scheduling-test.html
testing/web-platform/tests/resources/test/tests/functional/worker-dedicated.html
testing/web-platform/tests/resources/test/tests/functional/worker-dedicated.sub.html
testing/web-platform/tests/resources/test/tests/unit/exceptional-cases.html
testing/web-platform/tests/resources/testharness.js
--- a/testing/web-platform/meta/MANIFEST.json
+++ b/testing/web-platform/meta/MANIFEST.json
@@ -294953,31 +294953,66 @@
      {}
     ]
    ],
    "resources/test/tests/functional/add_cleanup.html": [
     [
      {}
     ]
    ],
+   "resources/test/tests/functional/add_cleanup_async.html": [
+    [
+     {}
+    ]
+   ],
+   "resources/test/tests/functional/add_cleanup_async_bad_return.html": [
+    [
+     {}
+    ]
+   ],
+   "resources/test/tests/functional/add_cleanup_async_rejection.html": [
+    [
+     {}
+    ]
+   ],
+   "resources/test/tests/functional/add_cleanup_async_rejection_after_load.html": [
+    [
+     {}
+    ]
+   ],
+   "resources/test/tests/functional/add_cleanup_async_timeout.html": [
+    [
+     {}
+    ]
+   ],
+   "resources/test/tests/functional/add_cleanup_bad_return.html": [
+    [
+     {}
+    ]
+   ],
    "resources/test/tests/functional/add_cleanup_count.html": [
     [
      {}
     ]
    ],
    "resources/test/tests/functional/add_cleanup_err.html": [
     [
      {}
     ]
    ],
    "resources/test/tests/functional/add_cleanup_err_multi.html": [
     [
      {}
     ]
    ],
+   "resources/test/tests/functional/add_cleanup_sync_queue.html": [
+    [
+     {}
+    ]
+   ],
    "resources/test/tests/functional/api-tests-1.html": [
     [
      {}
     ]
    ],
    "resources/test/tests/functional/api-tests-2.html": [
     [
      {}
@@ -295063,21 +295098,31 @@
      {}
     ]
    ],
    "resources/test/tests/functional/promise-async.html": [
     [
      {}
     ]
    ],
+   "resources/test/tests/functional/promise-with-sync.html": [
+    [
+     {}
+    ]
+   ],
    "resources/test/tests/functional/promise.html": [
     [
      {}
     ]
    ],
+   "resources/test/tests/functional/queue.html": [
+    [
+     {}
+    ]
+   ],
    "resources/test/tests/functional/single-page-test-fail.html": [
     [
      {}
     ]
    ],
    "resources/test/tests/functional/single-page-test-no-assertions.html": [
     [
      {}
@@ -295088,32 +295133,42 @@
      {}
     ]
    ],
    "resources/test/tests/functional/single-page-test-pass.html": [
     [
      {}
     ]
    ],
+   "resources/test/tests/functional/task-scheduling-promise-test.html": [
+    [
+     {}
+    ]
+   ],
+   "resources/test/tests/functional/task-scheduling-test.html": [
+    [
+     {}
+    ]
+   ],
    "resources/test/tests/functional/uncaught-exception-handle.html": [
     [
      {}
     ]
    ],
    "resources/test/tests/functional/uncaught-exception-ignore.html": [
     [
      {}
     ]
    ],
    "resources/test/tests/functional/worker-dedicated-uncaught-single.html": [
     [
      {}
     ]
    ],
-   "resources/test/tests/functional/worker-dedicated.html": [
+   "resources/test/tests/functional/worker-dedicated.sub.html": [
     [
      {}
     ]
    ],
    "resources/test/tests/functional/worker-error.js": [
     [
      {}
     ]
@@ -295198,16 +295253,21 @@
      {}
     ]
    ],
    "resources/test/tests/unit/basic.html": [
     [
      {}
     ]
    ],
+   "resources/test/tests/unit/exceptional-cases.html": [
+    [
+     {}
+    ]
+   ],
    "resources/test/tox.ini": [
     [
      {}
     ]
    ],
    "resources/test/variants.js": [
     [
      {}
@@ -453631,17 +453691,17 @@
    "4b413e9ce3ab5accf7b51844a8d963b810d751dd",
    "testharness"
   ],
   "cookies/prefix/document-cookie.non-secure.html": [
    "bc6832b15ea88463d0b0fdb5cea4b1987e6a6c0d",
    "testharness"
   ],
   "cookies/resources/cookie-helper.sub.js": [
-   "852fbb6bb64726bec094a2b3ec5cf17d289116b0",
+   "428cab042e4fd0ac3f67180587f4f42e42643049",
    "support"
   ],
   "cookies/resources/drop.py": [
    "7491dad2fb52a0b73104746e98dd0ca6de685cec",
    "support"
   ],
   "cookies/resources/dropSameSite.py": [
    "803dbeb82cdaffa415b47dbc5560617d3c71f0bc",
@@ -578839,17 +578899,17 @@
    "7bf9e9aa9e6c6c7a8c057c9dbe4151194ca3edd0",
    "support"
   ],
   "docs/_writing-tests/testdriver.md": [
    "5cbe0bc377b4dafde60bec06c77aba83baebe9db",
    "support"
   ],
   "docs/_writing-tests/testharness-api.md": [
-   "92a9536b69030fd8bb930f6c325a4e2bf4a2eaf7",
+   "bb5524532915a58e4fab3c3bb89a41bbe2a46b4a",
    "support"
   ],
   "docs/_writing-tests/testharness.md": [
    "e91302f1d214fb7fd05a9dce2676569321fc2206",
    "support"
   ],
   "docs/_writing-tests/visual.md": [
    "2d46fbe10dd1be3a6e16dcc75c8b2f8d2b695d82",
@@ -605711,17 +605771,17 @@
    "403d01060821820b9d3a772b28a826698c8796e6",
    "support"
   ],
   "lifecycle/resources/window.html": [
    "69fdbc0986633793d501a4bddfee9e88f76e1348",
    "support"
   ],
   "lint.whitelist": [
-   "de9e36cf12ff4b5b3158f2f383aae70753043d2f",
+   "bd9bd46c52250849267c8a9fa6c8f72129f20140",
    "support"
   ],
   "longtask-timing/META.yml": [
    "91c07f9fd3f3097367f2ad87a2ebb0d98b11d4e2",
    "support"
   ],
   "longtask-timing/idlharness.window.js": [
    "96d939f04e7fbba348bf1edccf64c17c7af0535d",
@@ -625639,41 +625699,69 @@
    "9d7fa76a7d65f63ce8a3844920388ffb39b83bc3",
    "support"
   ],
   "resources/test/README.md": [
    "b756b91797f71ca3b6c58b31d112b8bdab112fe6",
    "support"
   ],
   "resources/test/conftest.py": [
-   "801c97a1f9299805abe8f25726a2d4feb374f7d0",
+   "8765bf835dfc241d5cbd15e9697de465d4289bfb",
    "support"
   ],
   "resources/test/harness.html": [
    "5ee0f285e83bc7c110db61d3554f2f186c204edf",
    "support"
   ],
   "resources/test/idl-helper.js": [
    "2b73527ff2becedfa7d191d7d82e76c1c378ff25",
    "support"
   ],
   "resources/test/tests/functional/add_cleanup.html": [
    "31fe19c4bceb96a1a6904706a7369d7c10e1ed12",
    "support"
   ],
+  "resources/test/tests/functional/add_cleanup_async.html": [
+   "9d0ade4150a25ef60bde2b09881398c226cea703",
+   "support"
+  ],
+  "resources/test/tests/functional/add_cleanup_async_bad_return.html": [
+   "0b45362f1643bae215b22137eb7fc2f586993f65",
+   "support"
+  ],
+  "resources/test/tests/functional/add_cleanup_async_rejection.html": [
+   "0528b4254f671b0c629fb1df7b7e81aed2b41af5",
+   "support"
+  ],
+  "resources/test/tests/functional/add_cleanup_async_rejection_after_load.html": [
+   "bd8fb379c787457675a41c141c2cddf886450682",
+   "support"
+  ],
+  "resources/test/tests/functional/add_cleanup_async_timeout.html": [
+   "5cb04d5a8ba7f895568dddbbf12987cd0ff3a83e",
+   "support"
+  ],
+  "resources/test/tests/functional/add_cleanup_bad_return.html": [
+   "1f1c6fbf44b5d505d172e213b93261bd20c45fb6",
+   "support"
+  ],
   "resources/test/tests/functional/add_cleanup_count.html": [
-   "03f6f11e3294a7940638d59f914819f1e7293a2b",
+   "38fd8fd7a10e9e52e7738902dd0751dccc581e79",
    "support"
   ],
   "resources/test/tests/functional/add_cleanup_err.html": [
-   "d9fd1375e9bf738a9eaf98eb84e15a20aa141a79",
+   "9997281242a613ea14e6e36b4151129d4e058d7e",
    "support"
   ],
   "resources/test/tests/functional/add_cleanup_err_multi.html": [
-   "7891c12d77a28493501951e9cbb3bece2ddda39b",
+   "a489b96659066fb5db98c3146f3ae90b53cc53f5",
+   "support"
+  ],
+  "resources/test/tests/functional/add_cleanup_sync_queue.html": [
+   "1e058f150136cf4ab9a3e5e1b4c16a28685bd94f",
    "support"
   ],
   "resources/test/tests/functional/api-tests-1.html": [
    "a9d92b12f81fd2ec7e70ded78f4697cfba6fc12e",
    "support"
   ],
   "resources/test/tests/functional/api-tests-2.html": [
    "08a2b893c244dae40518749ac4b6bdccaa284a03",
@@ -625723,17 +625811,17 @@
    "c327a8b001d27e542b6b5a858a0400a042b3d3f7",
    "support"
   ],
   "resources/test/tests/functional/iframe-callback.html": [
    "403628fa91f431e5908bcf449c4fd26855e7d00d",
    "support"
   ],
   "resources/test/tests/functional/iframe-consolidate-errors.html": [
-   "9ba6e179faffc44f6caea124654a9e421289c3be",
+   "e382c6e6fd96bc40ea057a941f13816c947fa3eb",
    "support"
   ],
   "resources/test/tests/functional/iframe-consolidate-tests.html": [
    "ccc9eb765c4f31be9eafe99d139a840a0173d296",
    "support"
   ],
   "resources/test/tests/functional/iframe-msg.html": [
    "cd2c26c278ec36a07f7d0da051d6cb44bf33c5f0",
@@ -625742,50 +625830,66 @@
   "resources/test/tests/functional/order.html": [
    "ec9978d898082193a11b646bc23e96f029f5ebe6",
    "support"
   ],
   "resources/test/tests/functional/promise-async.html": [
    "d4c62794c4f77abf460cd484fd548a59e1ed16e3",
    "support"
   ],
+  "resources/test/tests/functional/promise-with-sync.html": [
+   "234f5476e9cdaf8c388cdaaa2e6464bc9120fe3d",
+   "support"
+  ],
   "resources/test/tests/functional/promise.html": [
    "bdf6dc3ec2af07a9799243cbc7b15da939961363",
    "support"
   ],
+  "resources/test/tests/functional/queue.html": [
+   "4ea32a2bc8ee64b5841596f240291ec7fa514274",
+   "support"
+  ],
   "resources/test/tests/functional/single-page-test-fail.html": [
    "5826a2ef15c00d817197333de1f444cf1ac51e8b",
    "support"
   ],
   "resources/test/tests/functional/single-page-test-no-assertions.html": [
    "f3140dd492c0c6442d03bc3fe063029892c45dcd",
    "support"
   ],
   "resources/test/tests/functional/single-page-test-no-body.html": [
    "82f3e18a44cdc78cabd6b55406449d18eae59f20",
    "support"
   ],
   "resources/test/tests/functional/single-page-test-pass.html": [
    "9d5f776d541454cdcff985bb2ad050036d358b81",
    "support"
   ],
+  "resources/test/tests/functional/task-scheduling-promise-test.html": [
+   "fb4cc2dd27d52573c4113aa1a6f8d833ce80c9be",
+   "support"
+  ],
+  "resources/test/tests/functional/task-scheduling-test.html": [
+   "134bdb2ea8d629afaac79b0fe84b3ae570445b17",
+   "support"
+  ],
   "resources/test/tests/functional/uncaught-exception-handle.html": [
    "4c960186e0d29885aebeb379181ed181ccc26d1d",
    "support"
   ],
   "resources/test/tests/functional/uncaught-exception-ignore.html": [
    "b6d30eab0f0ca1fa4024b8576e7cbfb8a4c7bad3",
    "support"
   ],
   "resources/test/tests/functional/worker-dedicated-uncaught-single.html": [
    "760151832e81f8ef61d510b252d0cd1d7d843495",
    "support"
   ],
-  "resources/test/tests/functional/worker-dedicated.html": [
-   "a790a1520ceed96f254b1f5415d0415f7bf3a456",
+  "resources/test/tests/functional/worker-dedicated.sub.html": [
+   "586326d876119da03413a6473b042885abee7741",
    "support"
   ],
   "resources/test/tests/functional/worker-error.js": [
    "7b89602f04b5104871a4da08c13856f74558111f",
    "support"
   ],
   "resources/test/tests/functional/worker-service.html": [
    "c271e6599c0e264ea2da31eeb3d5acec78e0e105",
@@ -625850,16 +625954,20 @@
   "resources/test/tests/unit/META.yml": [
    "cb9e3f87620a634a3e9e92ac788a62751b061aa5",
    "support"
   ],
   "resources/test/tests/unit/basic.html": [
    "7eef4a8fa5b50547bce915170a9b3e1e0312adf4",
    "support"
   ],
+  "resources/test/tests/unit/exceptional-cases.html": [
+   "df9e1239a2ec48dd8b489fb7001a5295e334f963",
+   "support"
+  ],
   "resources/test/tox.ini": [
    "d3a30f870a1572d4423ae99f64c67d63afa345da",
    "support"
   ],
   "resources/test/variants.js": [
    "611d27803447a174bd68b27515d468af0849a13c",
    "support"
   ],
@@ -625883,17 +625991,17 @@
    "5e8f640c6659d176eaca4c71cc1798b7285540b7",
    "support"
   ],
   "resources/testharness.css.headers": [
    "e828b629858d07afd989b80894986315bac16cc7",
    "support"
   ],
   "resources/testharness.js": [
-   "0ea7a2a7f48424708ef661e2dbcecdb8b916c81a",
+   "f0c24635017dad6275c99dc149ab1739470eeb36",
    "support"
   ],
   "resources/testharness.js.headers": [
    "5e8f640c6659d176eaca4c71cc1798b7285540b7",
    "support"
   ],
   "resources/testharnessreport.js": [
    "e5cb40fe0ef652be407d4c48b1c59391864cec7b",
--- a/testing/web-platform/tests/cookies/resources/cookie-helper.sub.js
+++ b/testing/web-platform/tests/cookies/resources/cookie-helper.sub.js
@@ -43,29 +43,29 @@ function assert_dom_cookie(name, value, 
   assert_equals(re.test(document.cookie), present, "`" + name + "=" + value + "` in `document.cookie`");
 }
 
 function assert_cookie(origin, obj, name, value, present) {
   assert_equals(obj[name], present ? value : undefined, "`" + name + "=" + value + "` in request to `" + origin + "`.");
 }
 
 // Remove the cookie named |name| from |origin|, then set it on |origin| anew.
-// If |origin| matches `document.origin`, also assert (via `document.cookie`) that
+// If |origin| matches `self.origin`, also assert (via `document.cookie`) that
 // the cookie was correctly removed and reset.
 function create_cookie(origin, name, value, extras) {
   alert("Create_cookie: " + origin + "/cookies/resources/drop.py?name=" + name);
   return credFetch(origin + "/cookies/resources/drop.py?name=" + name)
     .then(_ => {
-      if (origin == document.origin)
+      if (origin == self.origin)
         assert_dom_cookie(name, value, false);
     })
     .then(_ => {
       return credFetch(origin + "/cookies/resources/set.py?" + name + "=" + value + ";path=/;" + extras)
         .then(_ => {
-          if (origin == document.origin)
+          if (origin == self.origin)
             assert_dom_cookie(name, value, true);
         });
     });
 }
 
 //
 // Prefix-specific test helpers
 //
@@ -91,17 +91,17 @@ function set_prefixed_cookie_via_http_te
       return credFetch(options.origin + "/cookies/resources/set.py?" + name + "=" + value + ";" + options.params)
         .then(_ => credFetch(options.origin + "/cookies/resources/list.py"))
         .then(r => r.json())
         .then(cookies => assert_equals(cookies[name], options.shouldExistViaHTTP ? value : undefined));
     };
 
     var name = options.prefix + "prefixtestcookie";
     if (!options.origin) {
-      options.origin = document.origin;
+      options.origin = self.origin;
       erase_cookie_from_js(name);
       return postDelete;
     } else {
       return credFetch(options.origin + "/cookies/resources/drop.py?name=" + name)
         .then(_ => postDelete());
     }
   }, options.title);
 }
@@ -111,31 +111,31 @@ function set_prefixed_cookie_via_http_te
 //
 
 window.SameSiteStatus = {
   CROSS_SITE: "cross-site",
   LAX: "lax",
   STRICT: "strict"
 };
 
-// Reset SameSite test cookies on |origin|. If |origin| matches `document.origin`, assert
+// Reset SameSite test cookies on |origin|. If |origin| matches `self.origin`, assert
 // (via `document.cookie`) that they were properly removed and reset.
 function resetSameSiteCookies(origin, value) {
   return credFetch(origin + "/cookies/resources/dropSameSite.py")
     .then(_ => {
-      if (origin == document.origin) {
+      if (origin == self.origin) {
         assert_dom_cookie("samesite_strict", value, false);
         assert_dom_cookie("samesite_lax", value, false);
         assert_dom_cookie("samesite_none", value, false);
       }
     })
     .then(_ => {
       return credFetch(origin + "/cookies/resources/setSameSite.py?" + value)
         .then(_ => {
-          if (origin == document.origin) {
+          if (origin == self.origin) {
             assert_dom_cookie("samesite_strict", value, true);
             assert_dom_cookie("samesite_lax", value, true);
             assert_dom_cookie("samesite_none", value, true);
           }
         })
     })
 }
 
@@ -159,22 +159,22 @@ function verifySameSiteCookieState(expec
 // LeaveSecureCookiesAlone-specific test helpers:
 //
 
 window.SecureStatus = {
   INSECURE_COOKIE_ONLY: "1",
   BOTH_COOKIES: "2",
 };
 
-//Reset SameSite test cookies on |origin|. If |origin| matches `document.origin`, assert
+//Reset SameSite test cookies on |origin|. If |origin| matches `self.origin`, assert
 //(via `document.cookie`) that they were properly removed and reset.
 function resetSecureCookies(origin, value) {
 return credFetch(origin + "/cookies/resources/dropSecure.py")
  .then(_ => {
-   if (origin == document.origin) {
+   if (origin == self.origin) {
      assert_dom_cookie("alone_secure", value, false);
      assert_dom_cookie("alone_insecure", value, false);
    }
  })
  .then(_ => {
      return credFetch(origin + "/cookie/resources/setSecure.py?" + value)
  })
 }
--- a/testing/web-platform/tests/docs/_writing-tests/testharness-api.md
+++ b/testing/web-platform/tests/docs/_writing-tests/testharness-api.md
@@ -313,16 +313,26 @@ the test result is known. For example:
     var element = document.createElement("div");
     element.setAttribute("id", "null");
     document.body.appendChild(element);
     this.add_cleanup(function() { document.body.removeChild(element) });
     assert_equals(document.getElementById(null), element);
   }, "Calling document.getElementById with a null argument.");
 ```
 
+If the test was created using the `promise_test` API, then cleanup functions
+may optionally return a "thenable" value (i.e. an object which defines a `then`
+method). `testharness.js` will assume that such values conform to [the
+ECMAScript standard for
+Promises](https://tc39.github.io/ecma262/#sec-promise-objects) and delay the
+completion of the test until all "thenables" provided in this way have settled.
+All callbacks will be invoked synchronously; tests that require more complex
+cleanup behavior should manage execution order explicitly. If any of the
+eventual values are rejected, the test runner will report an error.
+
 ## Timeouts in Tests ##
 
 In general the use of timeouts in tests is discouraged because this is
 an observed source of instability in real tests when run on CI
 infrastructure. In particular if a test should fail when something
 doesn't happen, it is good practice to simply let the test run to the
 full timeout rather than trying to guess an appropriate shorter
 timeout to use.
--- a/testing/web-platform/tests/lint.whitelist
+++ b/testing/web-platform/tests/lint.whitelist
@@ -288,18 +288,22 @@ SET TIMEOUT: css/css-fonts/font-display/
 SET TIMEOUT: css/css-fonts/font-display/font-display-change.html
 SET TIMEOUT: css/css-fonts/font-display/font-display-change-ref.html
 SET TIMEOUT: css/css-fonts/font-display/font-display-preload.html
 SET TIMEOUT: html/browsers/windows/auxiliary-browsing-contexts/resources/close-opener.html
 SET TIMEOUT: html/dom/documents/dom-tree-accessors/Document.currentScript.html
 SET TIMEOUT: html/webappapis/timers/*
 SET TIMEOUT: resources/chromium/*
 SET TIMEOUT: resources/test/tests/functional/add_cleanup.html
+SET TIMEOUT: resources/test/tests/functional/add_cleanup_async.html
+SET TIMEOUT: resources/test/tests/functional/add_cleanup_async_rejection.html
+SET TIMEOUT: resources/test/tests/functional/add_cleanup_async_rejection_after_load.html
 SET TIMEOUT: resources/test/tests/functional/api-tests-1.html
 SET TIMEOUT: resources/test/tests/functional/worker.js
+SET TIMEOUT: resources/test/tests/unit/exceptional-cases.html
 SET TIMEOUT: resources/testharness.js
 
 # setTimeout use in reftests
 SET TIMEOUT: acid/acid3/test.html
 
 # Travis
 WEB-PLATFORM.TEST: .travis.yml
 
--- a/testing/web-platform/tests/resources/test/conftest.py
+++ b/testing/web-platform/tests/resources/test/conftest.py
@@ -1,19 +1,20 @@
 import io
 import json
 import os
+import ssl
+import urllib2
 
 import html5lib
 import pytest
 from selenium import webdriver
 
 from wptserver import WPTServer
 
-ENC = 'utf8'
 HERE = os.path.dirname(os.path.abspath(__file__))
 WPT_ROOT = os.path.normpath(os.path.join(HERE, '..', '..'))
 HARNESS = os.path.join(HERE, 'harness.html')
 TEST_TYPES = ('functional', 'unit')
 
 def pytest_addoption(parser):
     parser.addoption("--binary", action="store", default=None, help="path to browser binary")
 
@@ -25,41 +26,57 @@ def pytest_collect_file(path, parent):
     test_type = os.path.relpath(str(path), HERE).split(os.path.sep)[1]
 
     return HTMLItem(str(path), test_type, parent)
 
 def pytest_configure(config):
     config.driver = webdriver.Firefox(firefox_binary=config.getoption("--binary"))
     config.server = WPTServer(WPT_ROOT)
     config.server.start()
+    # Although the name of the `_create_unverified_context` method suggests
+    # that it is not intended for external consumption, the standard library's
+    # documentation explicitly endorses its use:
+    #
+    # > To revert to the previous, unverified, behavior
+    # > ssl._create_unverified_context() can be passed to the context
+    # > parameter.
+    #
+    # https://docs.python.org/2/library/httplib.html#httplib.HTTPSConnection
+    config.ssl_context = ssl._create_unverified_context()
     config.add_cleanup(config.server.stop)
     config.add_cleanup(config.driver.quit)
 
 def resolve_uri(context, uri):
     if uri.startswith('/'):
         base = WPT_ROOT
         path = uri[1:]
     else:
         base = os.path.dirname(context)
         path = uri
 
     return os.path.exists(os.path.join(base, path))
 
 class HTMLItem(pytest.Item, pytest.Collector):
     def __init__(self, filename, test_type, parent):
-        self.filename = filename
+        self.url = parent.session.config.server.url(filename)
         self.type = test_type
         self.variants = []
+        # Some tests are reliant on the WPT servers substitution functionality,
+        # so tests must be retrieved from the server rather than read from the
+        # file system directly.
+        handle = urllib2.urlopen(self.url,
+                                 context=parent.session.config.ssl_context)
+        try:
+            markup = handle.read()
+        finally:
+            handle.close()
 
         if test_type not in TEST_TYPES:
             raise ValueError('Unrecognized test type: "%s"' % test_type)
 
-        with io.open(filename, encoding=ENC) as f:
-            markup = f.read()
-
         parsed = html5lib.parse(markup, namespaceHTMLElements=False)
         name = None
         includes_variants_script = False
         self.expected = None
 
         for element in parsed.getiterator():
             if not name and element.tag == 'title':
                 name = element.text
@@ -89,17 +106,17 @@ class HTMLItem(pytest.Item, pytest.Colle
                 raise ValueError('No test variants specified in file %s' % filename)
         elif self.type == 'unit' and self.expected:
             raise ValueError('Unit tests must not specify expected report data')
 
         super(HTMLItem, self).__init__(name, parent)
 
 
     def reportinfo(self):
-        return self.fspath, None, self.filename
+        return self.fspath, None, self.url
 
     def repr_failure(self, excinfo):
         return pytest.Collector.repr_failure(self, excinfo)
 
     def runtest(self):
         if self.type == 'unit':
             self._run_unit_test()
         elif self.type == 'functional':
@@ -108,17 +125,19 @@ class HTMLItem(pytest.Item, pytest.Colle
             raise NotImplementedError
 
     def _run_unit_test(self):
         driver = self.session.config.driver
         server = self.session.config.server
 
         driver.get(server.url(HARNESS))
 
-        actual = driver.execute_async_script('runTest("%s", "foo", arguments[0])' % server.url(str(self.filename)))
+        actual = driver.execute_async_script(
+            'runTest("%s", "foo", arguments[0])' % self.url
+        )
 
         summarized = self._summarize(actual)
 
         assert summarized[u'summarized_status'][u'status_string'] == u'OK', summarized[u'summarized_status'][u'message']
         for test in summarized[u'summarized_tests']:
             msg = "%s\n%s" % (test[u'name'], test[u'message'])
             assert test[u'status_string'] == u'PASS', msg
 
@@ -127,17 +146,17 @@ class HTMLItem(pytest.Item, pytest.Colle
             self._run_functional_test_variant(variant)
 
     def _run_functional_test_variant(self, variant):
         driver = self.session.config.driver
         server = self.session.config.server
 
         driver.get(server.url(HARNESS))
 
-        test_url = server.url(str(self.filename) + variant)
+        test_url = self.url + variant
         actual = driver.execute_async_script('runTest("%s", "foo", arguments[0])' % test_url)
 
         # Test object ordering is not guaranteed. This weak assertion verifies
         # that the indices are unique and sequential
         indices = [test_obj.get('index') for test_obj in actual['tests']]
         self._assert_sequence(indices)
 
         summarized = self._summarize(actual)
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/resources/test/tests/functional/add_cleanup_async.html
@@ -0,0 +1,87 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+<meta name="variant" content="?keep-promise">
+<title>Test#add_cleanup with Promise-returning functions</title>
+<script src="../../variants.js"></script>
+<script src="../../../testharness.js"></script>
+<script src="../../../testharnessreport.js"></script>
+</head>
+<body>
+<div id="log"></div>
+
+<script>
+"use strict";
+var completeCount = 0;
+var counts = {
+    afterTick: null,
+    afterFirst: null
+};
+
+add_result_callback(function(result_t) {
+  completeCount += 1;
+});
+
+promise_test(function(t) {
+  t.add_cleanup(function() {
+    return new Promise(function(resolve) {
+      setTimeout(function() {
+        counts.afterTick = completeCount;
+        resolve();
+      }, 0);
+    });
+  });
+  t.add_cleanup(function() {
+    return new Promise(function(resolve) {
+
+      setTimeout(function() {
+        counts.afterFirst = completeCount;
+        resolve();
+      }, 0);
+    });
+  });
+
+  return Promise.resolve();
+}, 'promise_test with asynchronous cleanup');
+
+promise_test(function() {
+  assert_equals(
+    counts.afterTick,
+    0,
+    "test is not asynchronously considered 'complete'"
+  );
+  assert_equals(
+    counts.afterFirst,
+    0,
+    "test is not considered 'complete' following fulfillment of first promise"
+  );
+  assert_equals(completeCount, 1);
+
+  return Promise.resolve();
+}, "synchronously-defined promise_test");
+</script>
+<script type="text/json" id="expected">
+{
+  "summarized_status": {
+    "status_string": "OK",
+    "message": null
+  },
+  "summarized_tests": [
+    {
+      "status_string": "PASS",
+      "name": "promise_test with asynchronous cleanup",
+      "message": null,
+      "properties": {}
+    },
+    {
+      "status_string": "PASS",
+      "name": "synchronously-defined promise_test",
+      "message": null,
+      "properties": {}
+    }
+  ],
+  "type": "complete"
+}
+</script>
+</body>
+</html>
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/resources/test/tests/functional/add_cleanup_async_bad_return.html
@@ -0,0 +1,52 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+<meta name="variant" content="?keep-promise">
+<title>Test#add_cleanup with non-thenable-returning function</title>
+<script src="../../variants.js"></script>
+<script src="../../../testharness.js"></script>
+<script src="../../../testharnessreport.js"></script>
+</head>
+<body>
+<div id="log"></div>
+
+<script>
+"use strict";
+
+promise_test(function(t) {
+  t.add_cleanup(function() {});
+  t.add_cleanup(function() {
+    return { then: 9 };
+  });
+  t.add_cleanup(function() { return Promise.resolve(); });
+
+  return Promise.resolve();
+}, "promise_test that returns a non-thenable object in one \"cleanup\" callback");
+
+promise_test(function() {}, "The test runner is in an unpredictable state ('NOT RUN')");
+</script>
+<script type="text/json" id="expected">
+{
+  "summarized_status": {
+    "status_string": "ERROR",
+    "message": "Test named 'promise_test that returns a non-thenable object in one \"cleanup\" callback' specified 3 'cleanup' functions, and 1 returned a non-thenable value."
+  },
+  "summarized_tests": [
+    {
+      "status_string": "NOTRUN",
+      "name": "The test runner is in an unpredictable state ('NOT RUN')",
+      "message": null,
+      "properties": {}
+    },
+    {
+      "status_string": "PASS",
+      "name": "promise_test that returns a non-thenable object in one \"cleanup\" callback",
+      "message": null,
+      "properties": {}
+    }
+  ],
+  "type": "complete"
+}
+</script>
+</body>
+</html>
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/resources/test/tests/functional/add_cleanup_async_rejection.html
@@ -0,0 +1,96 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+<meta name="variant" content="?keep-promise">
+<title>Test#add_cleanup with Promise-returning functions (rejection handling)</title>
+<script src="../../variants.js"></script>
+<script src="../../../testharness.js"></script>
+<script src="../../../testharnessreport.js"></script>
+</head>
+<body>
+<div id="log"></div>
+
+<script>
+"use strict";
+var resolve, reject;
+var completeCount = 0;
+add_result_callback(function(result_t) {
+  completeCount += 1;
+});
+promise_test(function(t) {
+  t.add_cleanup(function() {
+    return new Promise(function(_, _reject) { reject = _reject; });
+  });
+  t.add_cleanup(function() {
+    return new Promise(function(_resolve) { resolve = _resolve; });
+  });
+
+  // The following cleanup function defines empty tests so that the reported
+  // data demonstrates the intended run-time behavior without relying on the
+  // test harness's handling of errors during test cleanup (which is tested
+  // elsewhere).
+  t.add_cleanup(function() {
+    if (completeCount === 0) {
+      promise_test(
+        function() {},
+        "test is not asynchronously considered 'complete' ('NOT RUN')"
+      );
+    }
+
+    reject();
+
+    setTimeout(function() {
+      if (completeCount === 0) {
+        promise_test(
+          function() {},
+          "test is not considered 'complete' following rejection of first " +
+          "promise ('NOT RUN')"
+        );
+      }
+
+      resolve();
+    }, 0);
+  });
+
+  return Promise.resolve();
+}, "promise_test with asynchronous cleanup including rejection");
+
+promise_test(function() {}, "synchronously-defined test ('NOT RUN')");
+</script>
+<script type="text/json" id="expected">
+{
+  "summarized_status": {
+    "status_string": "ERROR",
+    "message": "Test named 'promise_test with asynchronous cleanup including rejection' specified 3 'cleanup' functions, and 1 failed."
+  },
+  "summarized_tests": [
+    {
+      "status_string": "PASS",
+      "name": "promise_test with asynchronous cleanup including rejection",
+      "message": null,
+      "properties": {}
+    },
+    {
+      "status_string": "NOTRUN",
+      "name": "synchronously-defined test ('NOT RUN')",
+      "message": null,
+      "properties": {}
+    },
+    {
+      "status_string": "NOTRUN",
+      "name": "test is not asynchronously considered 'complete' ('NOT RUN')",
+      "message": null,
+      "properties": {}
+    },
+    {
+      "status_string": "NOTRUN",
+      "name": "test is not considered 'complete' following rejection of first promise ('NOT RUN')",
+      "message": null,
+      "properties": {}
+    }
+  ],
+  "type": "complete"
+}
+</script>
+</body>
+</html>
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/resources/test/tests/functional/add_cleanup_async_rejection_after_load.html
@@ -0,0 +1,54 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+<meta name="variant" content="?keep-promise">
+<title>Test#add_cleanup with Promise-returning functions (rejection handling following "load" event)</title>
+<script src="../../variants.js"></script>
+</head>
+<body>
+<h1>Promise Tests</h1>
+<p>This test demonstrates the use of <tt>promise_test</tt>. Assumes ECMAScript 6
+Promise support. Some failures are expected.</p>
+<div id="log"></div>
+<script src="../../../testharness.js"></script>
+<script src="../../../testharnessreport.js"></script>
+<script>
+promise_test(function(t) {
+  t.add_cleanup(function() {
+    return Promise.reject(new Error("foo"));
+  });
+
+  return new Promise((resolve) => {
+    document.addEventListener("DOMContentLoaded", function() {
+      setTimeout(resolve, 0)
+    });
+  });
+}, "Test with failing cleanup that completes after DOMContentLoaded event");
+
+promise_test(function(t) {
+  return Promise.resolve();
+}, "Test that should not be run due to invalid harness state ('NOT RUN')");
+</script>
+<script type="text/json" id="expected">
+{
+  "summarized_status": {
+    "status_string": "ERROR",
+    "message": "Test named 'Test with failing cleanup that completes after DOMContentLoaded event' specified 1 'cleanup' function, and 1 failed."
+  },
+  "summarized_tests": [
+    {
+      "status_string": "NOTRUN",
+      "name": "Test that should not be run due to invalid harness state ('NOT RUN')",
+      "message": null,
+      "properties": {}
+    },
+    {
+      "status_string": "PASS",
+      "name": "Test with failing cleanup that completes after DOMContentLoaded event",
+      "message": null,
+      "properties": {}
+    }
+  ],
+  "type": "complete"
+}
+</script>
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/resources/test/tests/functional/add_cleanup_async_timeout.html
@@ -0,0 +1,59 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+<meta name="variant" content="?keep-promise">
+<title>Test#add_cleanup with Promise-returning functions (timeout handling)</title>
+<script src="../../variants.js"></script>
+<script src="../../../testharness.js"></script>
+<script src="../../../testharnessreport.js"></script>
+</head>
+<body>
+<div id="log"></div>
+<script>
+"use strict";
+
+promise_test(function(t) {
+  t.add_cleanup(function() {
+    return Promise.resolve();
+  });
+
+  t.add_cleanup(function() {
+    return new Promise(function() {});
+  });
+
+  t.add_cleanup(function() {});
+
+  t.add_cleanup(function() {
+    return new Promise(function() {});
+  });
+
+  return Promise.resolve();
+}, "promise_test with asynchronous cleanup");
+
+promise_test(function() {}, "promise_test following timed out cleanup ('NOT RUN')");
+</script>
+<script type="text/json" id="expected">
+{
+  "summarized_status": {
+    "status_string": "ERROR",
+    "message": "Timeout while running cleanup for test named \"promise_test with asynchronous cleanup\"."
+  },
+  "summarized_tests": [
+    {
+      "status_string": "NOTRUN",
+      "name": "promise_test following timed out cleanup ('NOT RUN')",
+      "message": null,
+      "properties": {}
+    },
+    {
+      "status_string": "PASS",
+      "name": "promise_test with asynchronous cleanup",
+      "message": null,
+      "properties": {}
+    }
+  ],
+  "type": "complete"
+}
+</script>
+</body>
+</html>
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/resources/test/tests/functional/add_cleanup_bad_return.html
@@ -0,0 +1,64 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+<meta name="variant" content="">
+<meta name="variant" content="?keep-promise">
+<title>Test#add_cleanup with value-returning function</title>
+<script src="../../variants.js"></script>
+<script src="../../../testharness.js"></script>
+<script src="../../../testharnessreport.js"></script>
+</head>
+<body>
+<div id="log"></div>
+<script>
+"use strict";
+
+test(function(t) {
+  t.add_cleanup(function() {});
+  t.add_cleanup(function() { return null; });
+  t.add_cleanup(function() {
+    test(
+      function() {},
+      "The test runner is in an unpredictable state #1 ('NOT RUN')"
+    );
+
+    throw new Error();
+  });
+  t.add_cleanup(function() { return 4; });
+  t.add_cleanup(function() { return { then: function() {} }; });
+  t.add_cleanup(function() {});
+}, "Test that returns a value in three \"cleanup\" functions");
+
+test(function() {}, "The test runner is in an unpredictable state #2 ('NOT RUN')");
+</script>
+<script type="text/json" id="expected">
+{
+  "summarized_status": {
+    "status_string": "ERROR",
+    "message": "Test named 'Test that returns a value in three \"cleanup\" functions' specified 6 'cleanup' functions, and 1 failed, and 3 returned a non-undefined value."
+  },
+  "summarized_tests": [
+    {
+      "status_string": "PASS",
+      "name": "Test that returns a value in three \"cleanup\" functions",
+      "properties": {},
+      "message": null
+    },
+    {
+      "status_string": "NOTRUN",
+      "name": "The test runner is in an unpredictable state #1 ('NOT RUN')",
+      "message": null,
+      "properties": {}
+    },
+    {
+      "status_string": "NOTRUN",
+      "name": "The test runner is in an unpredictable state #2 ('NOT RUN')",
+      "message": null,
+      "properties": {}
+    }
+  ],
+  "type": "complete"
+}
+</script>
+</body>
+</html>
--- a/testing/web-platform/tests/resources/test/tests/functional/add_cleanup_count.html
+++ b/testing/web-platform/tests/resources/test/tests/functional/add_cleanup_count.html
@@ -1,16 +1,16 @@
 <!DOCTYPE HTML>
 <html>
 <head>
 <meta name="variant" content="?keep-promise">
 <title>Test#add_cleanup reported count</title>
 <script src="../../variants.js"></script>
-<script src="/resources/testharness.js"></script>
-<script src="/resources/testharnessreport.js"></script>
+<script src="../../../testharness.js"></script>
+<script src="../../../testharnessreport.js"></script>
 </head>
 <body>
 <div id="log"></div>
 
 <script>
 promise_test(function(t) {
     t.add_cleanup(function() {});
     t.add_cleanup(function() {});
--- a/testing/web-platform/tests/resources/test/tests/functional/add_cleanup_err.html
+++ b/testing/web-platform/tests/resources/test/tests/functional/add_cleanup_err.html
@@ -1,17 +1,17 @@
 <!DOCTYPE HTML>
 <html>
 <head>
 <meta name="variant" content="">
 <meta name="variant" content="?keep-promise">
 <title>Test#add_cleanup: error</title>
 <script src="../../variants.js"></script>
-<script src="/resources/testharness.js"></script>
-<script src="/resources/testharnessreport.js"></script>
+<script src="../../../testharness.js"></script>
+<script src="../../../testharnessreport.js"></script>
 </head>
 <body>
 <div id="log"></div>
 <script>
 "use strict";
 
 test(function(t) {
   t.add_cleanup(function() {
--- a/testing/web-platform/tests/resources/test/tests/functional/add_cleanup_err_multi.html
+++ b/testing/web-platform/tests/resources/test/tests/functional/add_cleanup_err_multi.html
@@ -1,17 +1,17 @@
 <!DOCTYPE HTML>
 <html>
 <head>
 <meta name="variant" content="">
 <meta name="variant" content="?keep-promise">
 <title>Test#add_cleanup: multiple functions with one in error</title>
 <script src="../../variants.js"></script>
-<script src="/resources/testharness.js"></script>
-<script src="/resources/testharnessreport.js"></script>
+<script src="../../../testharness.js"></script>
+<script src="../../../testharnessreport.js"></script>
 </head>
 <body>
 <div id="log"></div>
 
 <script>
 "use strict";
 
 test(function(t) {
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/resources/test/tests/functional/add_cleanup_sync_queue.html
@@ -0,0 +1,57 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+<meta name="variant" content="?keep-promise">
+<title>Test#add_cleanup: queuing tests</title>
+<script src="../../variants.js"></script>
+<script src="../../../testharness.js"></script>
+<script src="../../../testharnessreport.js"></script>
+</head>
+<body>
+<div id="log"></div>
+
+<script>
+"use strict";
+var firstCleaned = false;
+
+promise_test(function(t) {
+  promise_test(function() {
+    assert_true(
+      firstCleaned, "should not execute until first test is complete"
+    );
+
+    return Promise.resolve();
+  }, "test defined when no tests are queued, but one test is executing");
+
+  t.add_cleanup(function() {
+    firstCleaned = true;
+  });
+
+  return Promise.resolve();
+}, "Test with a 'cleanup' function");
+</script>
+<script type="text/json" id="expected">
+{
+  "summarized_status": {
+    "message": null,
+    "status_string": "OK"
+  },
+  "summarized_tests": [
+    {
+      "message": null,
+      "name": "Test with a 'cleanup' function",
+      "status_string": "PASS",
+      "properties": {}
+    },
+    {
+      "message": null,
+      "name": "test defined when no tests are queued, but one test is executing",
+      "status_string": "PASS",
+      "properties": {}
+    }
+  ],
+  "type": "complete"
+}
+</script>
+</body>
+</html>
--- a/testing/web-platform/tests/resources/test/tests/functional/iframe-consolidate-errors.html
+++ b/testing/web-platform/tests/resources/test/tests/functional/iframe-consolidate-errors.html
@@ -33,15 +33,21 @@ child context.</p>
     "message": "Error in remote: Error: Example Error"
   },
   "summarized_tests": [
     {
       "status_string": "PASS",
       "name": "Test executing in parent context",
       "properties": {},
       "message": null
+    },
+    {
+      "status_string": "NOTRUN",
+      "name": "This should show a harness status of 'Error' and a test status of 'Not Run'",
+      "properties": {},
+      "message": null
     }
   ],
   "type": "complete"
 }
 </script>
 </body>
 </html>
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/resources/test/tests/functional/promise-with-sync.html
@@ -0,0 +1,81 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+<meta name="variant" content="?keep-promise">
+<title>Promise Tests and Synchronous Tests</title>
+<script src="../../variants.js"></script>
+</head>
+<body>
+<h1>Promise Tests</h1>
+<p>This test demonstrates the use of <tt>promise_test</tt> alongside synchronous tests.</p>
+<div id="log"></div>
+<script src="../../../testharness.js"></script>
+<script src="../../../testharnessreport.js"></script>
+<script>
+"use strict";
+var sequence = [];
+
+test(function(t) {
+  assert_array_equals(sequence, []);
+  sequence.push(1);
+}, "first synchronous test");
+
+promise_test(function() {
+  assert_array_equals(sequence, [1, 2]);
+
+  return Promise.resolve()
+    .then(function() {
+        assert_array_equals(sequence, [1, 2]);
+        sequence.push(3);
+      });
+}, "first promise_test");;
+
+test(function(t) {
+  assert_array_equals(sequence, [1]);
+  sequence.push(2);
+}, "second synchronous test");
+
+promise_test(function() {
+  assert_array_equals(sequence, [1, 2, 3]);
+
+  return Promise.resolve()
+    .then(function() {
+        assert_array_equals(sequence, [1, 2, 3]);
+      });
+}, "second promise_test");;
+</script>
+<script type="text/json" id="expected">
+{
+  "summarized_status": {
+    "message": null,
+    "status_string": "OK"
+  },
+  "summarized_tests": [
+    {
+      "message": null,
+      "properties": {},
+      "name": "first promise_test",
+      "status_string": "PASS"
+    },
+    {
+      "message": null,
+      "properties": {},
+      "name": "first synchronous test",
+      "status_string": "PASS"
+    },
+    {
+      "message": null,
+      "properties": {},
+      "name": "second promise_test",
+      "status_string": "PASS"
+    },
+    {
+      "message": null,
+      "properties": {},
+      "name": "second synchronous test",
+      "status_string": "PASS"
+    }
+  ],
+  "type": "complete"
+}
+</script>
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/resources/test/tests/functional/queue.html
@@ -0,0 +1,132 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+<meta name="variant" content="?keep-promise">
+<title>Test queuing synchronous tests</title>
+<script src="../../variants.js"></script>
+<script src="../../../testharness.js"></script>
+<script src="../../../testharnessreport.js"></script>
+</head>
+<body>
+<div id="log"></div>
+
+<script>
+"use strict";
+var inInitialTurn = true;
+
+test(function(t) {
+  assert_true(
+    inInitialTurn, "should execute in the initial turn of the event loop"
+  );
+}, "First synchronous test");
+
+test(function(t) {
+  assert_true(
+    inInitialTurn, "should execute in the initial turn of the event loop"
+  );
+}, "Second synchronous test");
+
+async_test(function(t) {
+  assert_true(
+    inInitialTurn, "should execute in the initial turn of the event loop"
+  );
+  t.done();
+}, "First async_test (run in parallel)");
+
+async_test(function(t) {
+  assert_true(
+    inInitialTurn, "should execute in the initial turn of the event loop"
+  );
+  t.done();
+}, "Second async_test (run in parallel)");
+
+test(function(t) {
+  assert_true(
+    inInitialTurn, "should execute in the initial turn of the event loop"
+  );
+}, "Third synchronous test");
+
+promise_test(function(t) {
+  assert_false(
+    inInitialTurn, "should not execute in the initial turn of the event loop"
+  );
+
+  return Promise.resolve();
+}, "promise_test");
+
+async_test(function(t) {
+  assert_true(
+    inInitialTurn, "should execute in the initial turn of the event loop"
+  );
+  t.done();
+}, "Third async_test (run in parallel)");
+
+test(function(t) {
+  assert_true(
+    inInitialTurn, "should execute in the initial turn of the event loop"
+  );
+}, "Fourth synchronous test");
+
+inInitialTurn = false;
+</script>
+<script type="text/json" id="expected">
+{
+  "summarized_status": {
+    "status_string": "OK",
+    "message": null
+  },
+  "summarized_tests": [
+    {
+      "properties": {},
+      "name": "First async_test (run in parallel)",
+      "status_string": "PASS",
+      "message": null
+    },
+    {
+      "properties": {},
+      "name": "First synchronous test",
+      "status_string": "PASS",
+      "message": null
+    },
+    {
+      "properties": {},
+      "name": "Fourth synchronous test",
+      "status_string": "PASS",
+      "message": null
+    },
+    {
+      "properties": {},
+      "name": "Second async_test (run in parallel)",
+      "status_string": "PASS",
+      "message": null
+    },
+    {
+      "properties": {},
+      "name": "Second synchronous test",
+      "status_string": "PASS",
+      "message": null
+    },
+    {
+      "properties": {},
+      "name": "Third async_test (run in parallel)",
+      "status_string": "PASS",
+      "message": null
+    },
+    {
+      "properties": {},
+      "name": "Third synchronous test",
+      "status_string": "PASS",
+      "message": null
+    },
+    {
+      "properties": {},
+      "name": "promise_test",
+      "status_string": "PASS",
+      "message": null
+    }
+  ],
+  "type": "complete"
+}
+</script>
+</body>
+</html>
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/resources/test/tests/functional/task-scheduling-promise-test.html
@@ -0,0 +1,243 @@
+<!doctype html>
+<meta name="variant" content="?keep-promise">
+<title>testharness.js - task scheduling</title>
+<script src="../../variants.js"></script>
+<script src="../../../testharness.js"></script>
+<script src="../../../testharnessreport.js"></script>
+<script>
+var sameTask = null;
+var sameMicrotask = null;
+var expectedError = new Error('This error is expected');
+
+promise_test(function() {
+  return Promise.resolve()
+    .then(function() {
+        sameMirotask = true;
+        Promise.resolve().then(() => sameMicrotask = false);
+      });
+}, 'promise test without cleanup #1');
+
+promise_test(function() {
+  assert_false(sameMicrotask);
+
+  return Promise.resolve();
+}, 'sub-test with 0 cleanup functions executes in distinct microtask from a passing sub-test');
+
+promise_test(function() {
+  return Promise.resolve()
+    .then(function() {
+        sameMirotask = true;
+        Promise.resolve().then(() => sameMicrotask = false);
+        throw expectedError;
+      });
+}, 'failing promise test without cleanup #1');
+
+promise_test(function() {
+  assert_false(sameMicrotask);
+
+  return Promise.resolve();
+}, 'sub-test with 0 cleanup functions executes in distinct microtask from a failing sub-test');
+
+promise_test(function(t) {
+  t.add_cleanup(function() {});
+
+  return Promise.resolve()
+    .then(function() {
+        sameMirotask = true;
+        Promise.resolve().then(() => sameMicrotask = false);
+      });
+}, 'promise test with cleanup #1');
+
+promise_test(function() {
+  assert_false(sameMicrotask);
+
+  return Promise.resolve();
+}, 'sub-test with some cleanup functions executes in distinct microtask from a passing sub-test');
+
+promise_test(function(t) {
+  t.add_cleanup(function() {});
+
+  return Promise.resolve()
+    .then(function() {
+        sameMirotask = true;
+        Promise.resolve().then(() => sameMicrotask = false);
+        throw expectedError;
+      });
+}, 'failing promise test with cleanup #1');
+
+promise_test(function() {
+  assert_false(sameMicrotask);
+
+  return Promise.resolve();
+}, 'sub-test with some cleanup functions executes in distinct microtask from a failing sub-test');
+
+promise_test(function(t) {
+  return Promise.resolve()
+    .then(function() {
+        sameTask = true;
+        t.step_timeout(() => sameTask = false, 0);
+      });
+}, 'promise test without cleanup #2');
+
+promise_test(function() {
+  assert_true(sameTask);
+
+  return Promise.resolve();
+}, 'sub-test with 0 cleanup functions executes in the same task as a passing sub-test');
+
+promise_test(function(t) {
+  return Promise.resolve()
+    .then(function() {
+        sameTask = true;
+        t.step_timeout(() => sameTask = false, 0);
+        throw expectedError;
+      });
+}, 'failing promise test without cleanup #2');
+
+promise_test(function() {
+  assert_true(sameTask);
+
+  return Promise.resolve();
+}, 'sub-test with 0 cleanup functions executes in the same task as a failing sub-test');
+
+promise_test(function(t) {
+  t.add_cleanup(function() {});
+
+  return Promise.resolve()
+    .then(function() {
+        sameTask = true;
+        t.step_timeout(() => sameTask = false, 0);
+      });
+}, 'promise test with cleanup #2');
+
+promise_test(function() {
+  assert_true(sameTask);
+
+  return Promise.resolve();
+}, 'sub-test with some cleanup functions executes in the same task as a passing sub-test');
+
+promise_test(function(t) {
+  t.add_cleanup(function() {});
+
+  return Promise.resolve()
+    .then(function() {
+        sameTask = true;
+        t.step_timeout(() => sameTask = false, 0);
+        throw expectedError;
+      });
+}, 'failing promise test with cleanup #2');
+
+promise_test(function() {
+  assert_true(sameTask);
+
+  return Promise.resolve();
+}, 'sub-test with some cleanup functions executes in the same task as a failing sub-test');
+</script>
+
+<script type="text/json" id="expected">
+{
+    "summarized_status": {
+        "message": null,
+        "status_string": "OK"
+    },
+    "summarized_tests": [
+        {
+            "message": "promise_test: Unhandled rejection with value: object \"Error: This error is expected\"",
+            "name": "failing promise test with cleanup #1",
+            "properties": {},
+            "status_string": "FAIL"
+        },
+        {
+            "message": "promise_test: Unhandled rejection with value: object \"Error: This error is expected\"",
+            "name": "failing promise test with cleanup #2",
+            "properties": {},
+            "status_string": "FAIL"
+        },
+        {
+            "message": "promise_test: Unhandled rejection with value: object \"Error: This error is expected\"",
+            "name": "failing promise test without cleanup #1",
+            "properties": {},
+            "status_string": "FAIL"
+        },
+        {
+            "message": "promise_test: Unhandled rejection with value: object \"Error: This error is expected\"",
+            "name": "failing promise test without cleanup #2",
+            "properties": {},
+            "status_string": "FAIL"
+        },
+        {
+            "message": null,
+            "name": "promise test with cleanup #1",
+            "properties": {},
+            "status_string": "PASS"
+        },
+        {
+            "message": null,
+            "name": "promise test with cleanup #2",
+            "properties": {},
+            "status_string": "PASS"
+        },
+        {
+            "message": null,
+            "name": "promise test without cleanup #1",
+            "properties": {},
+            "status_string": "PASS"
+        },
+        {
+            "message": null,
+            "name": "promise test without cleanup #2",
+            "properties": {},
+            "status_string": "PASS"
+        },
+        {
+            "message": null,
+            "name": "sub-test with 0 cleanup functions executes in distinct microtask from a failing sub-test",
+            "properties": {},
+            "status_string": "PASS"
+        },
+        {
+            "message": null,
+            "name": "sub-test with 0 cleanup functions executes in distinct microtask from a passing sub-test",
+            "properties": {},
+            "status_string": "PASS"
+        },
+        {
+            "message": null,
+            "name": "sub-test with 0 cleanup functions executes in the same task as a failing sub-test",
+            "properties": {},
+            "status_string": "PASS"
+        },
+        {
+            "message": null,
+            "name": "sub-test with 0 cleanup functions executes in the same task as a passing sub-test",
+            "properties": {},
+            "status_string": "PASS"
+        },
+        {
+            "message": null,
+            "name": "sub-test with some cleanup functions executes in distinct microtask from a failing sub-test",
+            "properties": {},
+            "status_string": "PASS"
+        },
+        {
+            "message": null,
+            "name": "sub-test with some cleanup functions executes in distinct microtask from a passing sub-test",
+            "properties": {},
+            "status_string": "PASS"
+        },
+        {
+            "message": null,
+            "name": "sub-test with some cleanup functions executes in the same task as a failing sub-test",
+            "properties": {},
+            "status_string": "PASS"
+        },
+        {
+            "message": null,
+            "name": "sub-test with some cleanup functions executes in the same task as a passing sub-test",
+            "properties": {},
+            "status_string": "PASS"
+        }
+    ],
+    "type": "complete"
+}
+</script>
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/resources/test/tests/functional/task-scheduling-test.html
@@ -0,0 +1,144 @@
+<!doctype html>
+<meta name="variant" content="">
+<meta name="variant" content="?keep-promise">
+<title>testharness.js - task scheduling</title>
+<script src="../../variants.js"></script>
+<script src="../../../testharness.js"></script>
+<script src="../../../testharnessreport.js"></script>
+<script>
+var sameMicrotask = null;
+var expectedError = new Error('This error is expected');
+
+// Derived from `immediate`
+// https://github.com/calvinmetcalf/immediate/blob/c353bd2106648cee1d525bfda22cfc4456e69c0e/lib/mutation.js
+function microTask(callback) {
+  var observer = new MutationObserver(callback);
+  var element = document.createTextNode('');
+  observer.observe(element, {
+    characterData: true
+  });
+
+  element.data = true;
+};
+
+async_test(function(t) {
+  var microtask_ran = false;
+
+  t.step_timeout(t.step_func(function() {
+    assert_true(microtask_ran, 'function registered as a microtask was executed before task');
+    t.done();
+  }), 0);
+
+  microTask(function() {
+    microtask_ran = true;
+  });
+}, 'precondition: microtask creation logic functions as expected');
+
+test(function() {
+  sameMicrotask = true;
+  microTask(function() { sameMicrotask = false; });
+}, 'synchronous test without cleanup');
+
+test(function() {
+  assert_true(sameMicrotask);
+}, 'sub-test with 0 cleanup functions executes in the same microtask as a passing sub-test');
+
+test(function() {
+  sameMicrotask = true;
+  microTask(function() { sameMicrotask = false; });
+  throw expectedError;
+}, 'failing synchronous test without cleanup');
+
+test(function() {
+  assert_true(sameMicrotask);
+}, 'sub-test with 0 cleanup functions executes in the same microtask as a failing sub-test');
+
+test(function(t) {
+  t.add_cleanup(function() {});
+
+  sameMicrotask = true;
+  microTask(function() { sameMicrotask = false; });
+}, 'synchronous test with cleanup');
+
+test(function() {
+  assert_true(sameMicrotask);
+}, 'sub-test with some cleanup functions executes in the same microtask as a passing sub-test');
+
+test(function(t) {
+  t.add_cleanup(function() {});
+
+  sameMicrotask = true;
+  microTask(function() { sameMicrotask = false; });
+  throw expectedError;
+}, 'failing synchronous test with cleanup');
+
+test(function() {
+  assert_true(sameMicrotask);
+}, 'sub-test with some cleanup functions executes in the same microtask as a failing sub-test');
+</script>
+
+<script type="text/json" id="expected">
+{
+    "summarized_status": {
+        "message": null,
+        "status_string": "OK"
+    },
+    "summarized_tests": [
+        {
+            "message": "This error is expected",
+            "name": "failing synchronous test with cleanup",
+            "properties": {},
+            "status_string": "FAIL"
+        },
+        {
+            "message": "This error is expected",
+            "name": "failing synchronous test without cleanup",
+            "properties": {},
+            "status_string": "FAIL"
+        },
+        {
+            "message": null,
+            "name": "precondition: microtask creation logic functions as expected",
+            "properties": {},
+            "status_string": "PASS"
+        },
+        {
+            "message": null,
+            "name": "sub-test with 0 cleanup functions executes in the same microtask as a failing sub-test",
+            "properties": {},
+            "status_string": "PASS"
+        },
+        {
+            "message": null,
+            "name": "sub-test with 0 cleanup functions executes in the same microtask as a passing sub-test",
+            "properties": {},
+            "status_string": "PASS"
+        },
+        {
+            "message": null,
+            "name": "sub-test with some cleanup functions executes in the same microtask as a failing sub-test",
+            "properties": {},
+            "status_string": "PASS"
+        },
+        {
+            "message": null,
+            "name": "sub-test with some cleanup functions executes in the same microtask as a passing sub-test",
+            "properties": {},
+            "status_string": "PASS"
+        },
+        {
+            "message": null,
+            "name": "synchronous test with cleanup",
+            "properties": {},
+            "status_string": "PASS"
+        },
+        {
+            "message": null,
+            "name": "synchronous test without cleanup",
+            "properties": {},
+            "status_string": "PASS"
+        }
+    ],
+    "type": "complete"
+}
+</script>
deleted file mode 100644
--- a/testing/web-platform/tests/resources/test/tests/functional/worker-dedicated.html
+++ /dev/null
@@ -1,91 +0,0 @@
-<!DOCTYPE HTML>
-<html>
-<head>
-<meta name="variant" content="">
-<meta name="variant" content="?keep-promise">
-<title>Dedicated Worker Tests</title>
-<script src="../../variants.js"></script>
-<script src="/resources/testharness.js"></script>
-<script src="/resources/testharnessreport.js"></script>
-</head>
-<body>
-<h1>Dedicated Web Worker Tests</h1>
-<p>Demonstrates running <tt>testharness</tt> based tests inside a dedicated web worker.
-<p>The test harness is expected to fail due to an uncaught exception in one worker.</p>
-<div id="log"></div>
-
-<script>
-test(function(t) {
-        assert_true("Worker" in self, "Browser should support Workers");
-    },
-    "Browser supports Workers");
-
-fetch_tests_from_worker(new Worker("worker.js"));
-
-fetch_tests_from_worker(new Worker("worker-error.js"));
-
-test(function(t) {
-        assert_false(false, "False is false");
-    },
-    "Test running on main document.");
-</script>
-<script type="text/json" id="expected">
-{
-  "summarized_status": {
-    "status_string": "ERROR",
-    "message": "Error: This failure is expected."
-  },
-  "summarized_tests": [
-    {
-      "status_string": "PASS",
-      "name": "Browser supports Workers",
-      "properties": {},
-      "message": null
-    },
-    {
-      "status_string": "PASS",
-      "name": "Test running on main document.",
-      "properties": {},
-      "message": null
-    },
-    {
-      "status_string": "PASS",
-      "name": "Worker async_test that completes successfully",
-      "properties": {},
-      "message": null
-    },
-    {
-      "status_string": "PASS",
-      "name": "Worker test that completes successfully",
-      "properties": {},
-      "message": null
-    },
-    {
-      "status_string": "PASS",
-      "name": "worker test that completes successfully before exception",
-      "properties": {},
-      "message": null
-    },
-    {
-      "status_string": "NOTRUN",
-      "name": "Worker test that doesn't run ('NOT RUN')",
-      "properties": {},
-      "message": null
-    },
-    {
-      "status_string": "FAIL",
-      "name": "Worker test that fails ('FAIL')",
-      "properties": {},
-      "message": "assert_true: Failing test expected true got false"
-    },
-    {
-      "status_string": "TIMEOUT",
-      "name": "Worker test that times out ('TIMEOUT')",
-      "properties": {},
-      "message": "Test timed out"
-    }
-  ],
-  "type": "complete"
-}
-</script>
-</body>
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/resources/test/tests/functional/worker-dedicated.sub.html
@@ -0,0 +1,91 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+<meta name="variant" content="">
+<meta name="variant" content="?keep-promise">
+<title>Dedicated Worker Tests</title>
+<script src="../../variants.js"></script>
+<script src="../../../testharness.js"></script>
+<script src="../../../testharnessreport.js"></script>
+</head>
+<body>
+<h1>Dedicated Web Worker Tests</h1>
+<p>Demonstrates running <tt>testharness</tt> based tests inside a dedicated web worker.
+<p>The test harness is expected to fail due to an uncaught exception in one worker.</p>
+<div id="log"></div>
+
+<script>
+test(function(t) {
+        assert_true("Worker" in self, "Browser should support Workers");
+    },
+    "Browser supports Workers");
+
+fetch_tests_from_worker(new Worker("worker.js"));
+
+fetch_tests_from_worker(new Worker("worker-error.js"));
+
+test(function(t) {
+        assert_false(false, "False is false");
+    },
+    "Test running on main document.");
+</script>
+<script type="text/json" id="expected">
+{
+  "summarized_status": {
+    "status_string": "ERROR",
+    "message": "Error in remote https://{{domains[]}}:{{ports[https][0]}}/resources/test/tests/functional/worker-error.js: Error: This failure is expected."
+  },
+  "summarized_tests": [
+    {
+      "status_string": "PASS",
+      "name": "Browser supports Workers",
+      "properties": {},
+      "message": null
+    },
+    {
+      "status_string": "PASS",
+      "name": "Test running on main document.",
+      "properties": {},
+      "message": null
+    },
+    {
+      "status_string": "PASS",
+      "name": "Worker async_test that completes successfully",
+      "properties": {},
+      "message": null
+    },
+    {
+      "status_string": "PASS",
+      "name": "Worker test that completes successfully",
+      "properties": {},
+      "message": null
+    },
+    {
+      "status_string": "PASS",
+      "name": "worker test that completes successfully before exception",
+      "properties": {},
+      "message": null
+    },
+    {
+      "status_string": "NOTRUN",
+      "name": "Worker test that doesn't run ('NOT RUN')",
+      "properties": {},
+      "message": null
+    },
+    {
+      "status_string": "FAIL",
+      "name": "Worker test that fails ('FAIL')",
+      "properties": {},
+      "message": "assert_true: Failing test expected true got false"
+    },
+    {
+      "status_string": "TIMEOUT",
+      "name": "Worker test that times out ('TIMEOUT')",
+      "properties": {},
+      "message": "Test timed out"
+    }
+  ],
+  "type": "complete"
+}
+</script>
+</body>
new file mode 100644
--- /dev/null
+++ b/testing/web-platform/tests/resources/test/tests/unit/exceptional-cases.html
@@ -0,0 +1,404 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+  <meta charset="utf-8">
+  <meta name="timeout" content="long">
+  <script src="/resources/testharness.js"></script>
+  <title>Exceptional cases</title>
+</head>
+<body>
+<script>
+function makeTest(...bodies) {
+  const closeScript = '<' + '/script>';
+  let src = `
+<!DOCTYPE HTML>
+<html>
+<head>
+<title>Document title</title>
+<script src="/resources/testharness.js?${Math.random()}">${closeScript}
+</head>
+
+<body>
+<div id="log"></div>`;
+  bodies.forEach((body) => {
+    src += '<script>(' + body + ')();' + closeScript;
+  });
+
+  const iframe = document.createElement('iframe');
+
+  document.body.appendChild(iframe);
+  iframe.contentDocument.write(src);
+
+  return new Promise((resolve) => {
+    window.addEventListener('message', function onMessage(e) {
+      if (e.source !== iframe.contentWindow) {
+        return;
+      }
+      if (!e.data || e.data.type !=='complete') {
+        return;
+      }
+      window.removeEventListener('message', onMessage);
+      resolve(e.data);
+    });
+
+    iframe.contentDocument.close();
+  }).then(({ tests, status }) => {
+    const summary = {
+      harness: getEnumProp(status, status.status),
+      tests: {}
+    };
+
+    tests.forEach((test) => {
+      summary.tests[test.name] = getEnumProp(test, test.status);
+    });
+
+    return summary;
+  });
+}
+
+function getEnumProp(object, value) {
+  for (let property in object) {
+    if (!/^[A-Z]+$/.test(property)) {
+      continue;
+    }
+
+    if (object[property] === value) {
+      return property;
+    }
+  }
+}
+
+promise_test(() => {
+  return makeTest(
+      () => { throw new Error('this error is expected'); }
+    ).then(({harness, tests}) => {
+      assert_equals(harness, 'OK');
+      assert_equals(tests['Document title'], 'FAIL');
+    });
+}, 'uncaught exception during single-page test');
+
+promise_test(() => {
+  return makeTest(
+      () => {
+        async_test((t) => {
+          setTimeout(() => {
+            setTimeout(() => t.done(), 0);
+            async_test((t) => setTimeout(t.done.bind(t), 0), 'after');
+            throw new Error('this error is expected');
+          }, 0);
+        }, 'during');
+      }
+    ).then(({harness, tests}) => {
+      assert_equals(harness, 'ERROR');
+      assert_equals(tests.during, 'PASS');
+      assert_equals(tests.after, 'PASS');
+    });
+}, 'uncaught exception during async_test');
+
+promise_test(() => {
+  return makeTest(
+      () => {
+        promise_test(() => {
+          return new Promise((resolve) => {
+            setTimeout(() => {
+              resolve();
+              promise_test(() => Promise.resolve(), 'after');
+              throw new Error('this error is expected');
+            }, 0);
+          });
+        }, 'during');
+      }
+    ).then(({harness, tests}) => {
+      assert_equals(harness, 'ERROR');
+      assert_equals(tests.during, 'PASS');
+      assert_equals(tests.after, 'PASS');
+    });
+}, 'uncaught exception during promise_test');
+
+promise_test(() => {
+  window.asyncTestCleanupCount = 0;
+  return makeTest(
+      () => {
+        async_test((t) => {
+          t.add_cleanup(() => window.parent.asyncTestCleanupCount += 1);
+          setTimeout(() => {
+            throw new Error('this error is expected');
+          });
+        }, 'test');
+      }
+    ).then(({harness, tests}) => {
+      assert_equals(harness, 'ERROR');
+      assert_equals(tests.test, 'TIMEOUT');
+      assert_equals(window.asyncTestCleanupCount, 1);
+    });
+}, 'uncaught exception during async_test which times out');
+
+promise_test(() => {
+  window.promiseTestCleanupCount = 0;
+  return makeTest(
+      () => {
+        promise_test((t) => {
+          t.add_cleanup(() => window.parent.promiseTestCleanupCount += 1);
+          setTimeout(() => {
+            throw new Error('this error is expected');
+          });
+
+          return new Promise(() => {});
+        }, 'test');
+      }
+    ).then(({harness, tests}) => {
+      assert_equals(harness, 'ERROR');
+      assert_equals(tests.test, 'TIMEOUT');
+      assert_equals(window.promiseTestCleanupCount, 1);
+    });
+}, 'uncaught exception during promise_test which times out');
+
+promise_test(() => {
+  return makeTest(
+      () => { test(() => {}, 'before'); },
+      () => { throw new Error('this error is expected'); },
+      () => { test(() => {}, 'after'); }
+    ).then(({harness, tests}) => {
+      assert_equals(harness, 'ERROR');
+      assert_equals(tests.before, 'PASS');
+      assert_equals(tests.after, 'PASS');
+    });
+}, 'uncaught exception between tests');
+
+promise_test(() => {
+  return makeTest(
+      () => { async_test((t) => setTimeout(t.done.bind(t), 0), 'before'); },
+      () => { async_test('pending'); },
+      () => { throw new Error('this error is expected'); },
+      () => { async_test((t) => setTimeout(t.done.bind(t), 0), 'after'); }
+    ).then(({harness, tests}) => {
+      assert_equals(harness, 'ERROR');
+      assert_equals(tests.before, 'PASS');
+      assert_equals(tests.pending, 'NOTRUN');
+      assert_equals(tests.after, 'PASS');
+    });
+}, 'uncaught exception between async_tests');
+
+promise_test(() => {
+  return makeTest(
+      () => { promise_test(() => Promise.resolve(), 'before'); },
+      () => { throw new Error('this error is expected'); },
+      () => { promise_test(() => Promise.resolve(), 'after'); }
+    ).then(({harness, tests}) => {
+      assert_equals(harness, 'ERROR');
+      assert_equals(tests.before, 'PASS');
+      assert_equals(tests.after, 'PASS');
+    });
+}, 'uncaught exception between promise_tests');
+
+
+// This feature of testharness.js is only observable in browsers which
+// implement the `unhandledrejection` event.
+if ('onunhandledrejection' in window) {
+
+  promise_test(() => {
+    return makeTest(
+        () => { Promise.reject(new Error('this error is expected')); }
+      ).then(({harness, tests}) => {
+        assert_equals(harness, 'OK');
+        assert_equals(tests['Document title'], 'FAIL');
+      });
+  }, 'unhandled rejection during single-page test');
+
+  promise_test(() => {
+    return makeTest(
+        () => {
+          async_test('pending');
+          async_test((t) => {
+            Promise.reject(new Error('this error is expected'));
+
+            window.addEventListener('unhandledrejection', () => {
+            setTimeout(() => t.done(), 0);
+              async_test((t) => setTimeout(t.done.bind(t), 0), 'after');
+              t.done();
+            });
+          }, 'during');
+        }
+      ).then(({harness, tests}) => {
+        assert_equals(harness, 'ERROR');
+        assert_equals(tests.pending, 'NOTRUN');
+        assert_equals(tests.during, 'PASS');
+        assert_equals(tests.after, 'PASS');
+      });
+  }, 'unhandled rejection during async_test');
+
+  promise_test(() => {
+    return makeTest(
+        () => {
+          promise_test(() => {
+            return new Promise((resolve) => {
+              Promise.reject(new Error('this error is expected'));
+
+              window.addEventListener('unhandledrejection', () => {
+                resolve();
+                promise_test(() => Promise.resolve(), 'after');
+                throw new Error('this error is expected');
+              }, 0);
+            });
+          }, 'during');
+        }
+      ).then(({harness, tests}) => {
+        assert_equals(harness, 'ERROR');
+        assert_equals(tests.during, 'PASS');
+        assert_equals(tests.after, 'PASS');
+      });
+  }, 'unhandled rejection during promise_test');
+
+  promise_test(() => {
+    return makeTest(
+        () => {
+          setup({ explicit_done: true });
+          test(() => {}, 'before');
+          Promise.reject(new Error('this error is expected'));
+          window.addEventListener('unhandledrejection', () => {
+            test(() => {}, 'after');
+            done();
+          });
+        }
+      ).then(({harness, tests}) => {
+        assert_equals(harness, 'ERROR');
+        assert_equals(tests.before, 'PASS');
+        // TODO: investigate why this is not present
+        assert_false('after' in tests);
+      });
+  }, 'unhandled rejection between tests');
+
+  promise_test(() => {
+    return makeTest(
+        () => {
+          setup({ explicit_done: true });
+          async_test((t) => setTimeout(t.done.bind(t), 0), 'before');
+          Promise.reject(new Error('this error is expected'));
+          window.addEventListener('unhandledrejection', () => {
+            async_test((t) => setTimeout(t.done.bind(t), 0), 'after');
+            done();
+          });
+        }
+      ).then(({harness, tests}) => {
+        assert_equals(harness, 'ERROR');
+        assert_equals(tests.before, 'PASS');
+        assert_equals(tests.after, 'PASS');
+      });
+  }, 'unhandled rejection between async_tests');
+
+  promise_test(() => {
+    return makeTest(
+        () => {
+          setup({ explicit_done: true });
+          promise_test(() => Promise.resolve(), 'before');
+          Promise.reject(new Error('this error is expected'));
+          window.addEventListener('unhandledrejection', () => {
+            promise_test(() => Promise.resolve(), 'after');
+            done();
+          });
+        }
+      ).then(({harness, tests}) => {
+        assert_equals(harness, 'ERROR');
+        assert_equals(tests.before, 'PASS');
+        // TODO: investigate why this is not present
+        assert_false('after' in tests);
+      });
+  }, 'unhandled rejection between promise_tests');
+
+  promise_test(() => {
+    return makeTest(
+        () => {
+          test((t) => {
+            t.add_cleanup(() => { throw new Error('this error is expected'); });
+          }, 'during');
+          test((t) => {}, 'after');
+        }
+      ).then(({harness, tests}) => {
+        assert_equals(harness, 'ERROR');
+        assert_equals(tests.during, 'PASS');
+        assert_equals(tests.after, 'NOTRUN');
+      });
+  }, 'exception in `add_cleanup` of a test');
+
+}
+
+
+promise_test(() => {
+  return makeTest(
+      () => {
+        setup({explicit_done: true});
+        window.addEventListener('DOMContentLoaded', () => {
+          async_test((t) => {
+            t.add_cleanup(() => {
+              setTimeout(() => {
+                async_test((t) => t.done(), 'after');
+                done();
+              }, 0);
+              throw new Error('this error is expected');
+            });
+            setTimeout(t.done.bind(t), 0);
+          }, 'during');
+        });
+      }
+    ).then(({harness, tests}) => {
+      assert_equals(harness, 'ERROR');
+      assert_equals(tests.during, 'PASS');
+      assert_equals(tests.after, 'NOTRUN');
+    });
+}, 'exception in `add_cleanup` of an async_test');
+
+promise_test(() => {
+  return makeTest(
+      () => {
+        promise_test((t) => {
+          t.add_cleanup(() => { throw new Error('this error is expected'); });
+          return Promise.resolve();
+        }, 'test');
+      }
+    ).then(({harness, tests}) => {
+      assert_equals(harness, 'ERROR');
+      assert_equals(tests.test, 'PASS');
+    });
+}, 'exception in `add_cleanup` of a promise_test');
+
+promise_test(() => {
+  return makeTest(
+      () => {
+        promise_test((t) => {
+          t.step(() => {
+            throw new Error('this error is expected');
+          });
+        }, 'test');
+        async_test((t) => t.done(), 'after');
+      }
+    ).then(({harness, tests}) => {
+      assert_equals(harness, 'OK');
+      assert_equals(tests.test, 'FAIL');
+      assert_equals(tests.after, 'PASS');
+    });
+}, 'exception in `step` of an async_test');
+
+promise_test(() => {
+  return makeTest(
+      () => {
+        promise_test((t) => {
+          t.step(() => {
+            throw new Error('this error is expected');
+          });
+
+          return new Promise(() => {});
+        }, 'test');
+
+        // This following test should be run to completion despite the fact
+        // that the promise returned by the previous test never resolves.
+        promise_test((t) => Promise.resolve(), 'after');
+      }
+    ).then(({harness, tests}) => {
+      assert_equals(harness, 'OK');
+      assert_equals(tests.test, 'FAIL');
+      assert_equals(tests.after, 'PASS');
+    });
+}, 'exception in `step` of a promise_test');
+</script>
+</body>
+</html>
--- a/testing/web-platform/tests/resources/testharness.js
+++ b/testing/web-platform/tests/resources/testharness.js
@@ -531,17 +531,16 @@ policies and contribution forms [3].
         // so do not use instanceof here.
         return 'ServiceWorker' in global_scope &&
             Object.prototype.toString.call(worker) == '[object ServiceWorker]';
     }
 
     /*
      * API functions
      */
-
     function test(func, name, properties)
     {
         var test_name = name ? name : test_environment.next_default_test_name();
         properties = properties ? properties : {};
         var test_obj = new Test(test_name, properties);
         test_obj.step(func, test_obj, test_obj);
         if (test_obj.phase === test_obj.phases.STARTED) {
             test_obj.done();
@@ -561,41 +560,52 @@ policies and contribution forms [3].
         if (func) {
             test_obj.step(func, test_obj, test_obj);
         }
         return test_obj;
     }
 
     function promise_test(func, name, properties) {
         var test = async_test(name, properties);
+        test._is_promise_test = true;
+
         // If there is no promise tests queue make one.
         if (!tests.promise_tests) {
             tests.promise_tests = Promise.resolve();
         }
         tests.promise_tests = tests.promise_tests.then(function() {
-            var donePromise = new Promise(function(resolve) {
-                test._add_cleanup(resolve);
-            });
-            var promise = test.step(func, test, test);
-            test.step(function() {
-                assert_not_equals(promise, undefined);
-            });
-            Promise.resolve(promise).then(
-                    function() {
+            return new Promise(function(resolve) {
+                var promise = test.step(func, test, test);
+
+                test.step(function() {
+                    assert_not_equals(promise, undefined);
+                });
+
+                // Test authors may use the `step` method within a
+                // `promise_test` even though this reflects a mixture of
+                // asynchronous control flow paradigms. The "done" callback
+                // should be registered prior to the resolution of the
+                // user-provided Promise to avoid timeouts in cases where the
+                // Promise does not settle but a `step` function has thrown an
+                // error.
+                add_test_done_callback(test, resolve);
+
+                Promise.resolve(promise)
+                    .catch(test.step_func(
+                        function(value) {
+                            if (value instanceof AssertionError) {
+                                throw value;
+                            }
+                            assert(false, "promise_test", null,
+                                   "Unhandled rejection with value: ${value}", {value:value});
+                        }))
+                    .then(function() {
                         test.done();
-                    })
-                .catch(test.step_func(
-                    function(value) {
-                        if (value instanceof AssertionError) {
-                            throw value;
-                        }
-                        assert(false, "promise_test", null,
-                               "Unhandled rejection with value: ${value}", {value:value});
-                    }));
-            return donePromise;
+                    });
+                });
         });
     }
 
     function promise_rejects(test, expected, promise, description) {
         return promise.then(test.unreached_func("Should have rejected: " + description)).catch(function(e) {
             assert_throws(expected, function() { throw e }, description);
         });
     }
@@ -718,16 +728,18 @@ policies and contribution forms [3].
         test_environment.on_new_harness_properties(properties);
     }
 
     function done() {
         if (tests.tests.length === 0) {
             tests.set_file_is_test();
         }
         if (tests.file_is_test) {
+            // file is test files never have asynchronous cleanup logic,
+            // meaning the fully-sycnronous `done` funtion can be used here.
             tests.tests[0].done();
         }
         tests.end_wait();
     }
 
     function generate_tests(func, args, properties) {
         forEach(args, function(x, i)
                 {
@@ -1446,17 +1458,17 @@ policies and contribution forms [3].
 
     function Test(name, properties)
     {
         if (tests.file_is_test && tests.tests.length) {
             throw new Error("Tried to create a test with file_is_test");
         }
         this.name = name;
 
-        this.phase = tests.phase === tests.phases.ABORTED ?
+        this.phase = tests.is_aborted ?
             this.phases.COMPLETE : this.phases.INITIAL;
 
         this.status = this.NOTRUN;
         this.timeout_id = null;
         this.index = null;
 
         this.properties = properties;
         var timeout = properties.timeout ? properties.timeout : settings.test_timeout;
@@ -1465,19 +1477,21 @@ policies and contribution forms [3].
         } else {
             this.timeout_length = null;
         }
 
         this.message = null;
         this.stack = null;
 
         this.steps = [];
+        this._is_promise_test = false;
 
         this.cleanup_callbacks = [];
         this._user_defined_cleanup_count = 0;
+        this._done_callbacks = [];
 
         tests.push(this);
     }
 
     Test.statuses = {
         PASS:0,
         FAIL:1,
         TIMEOUT:2,
@@ -1485,17 +1499,18 @@ policies and contribution forms [3].
     };
 
     Test.prototype = merge({}, Test.statuses);
 
     Test.prototype.phases = {
         INITIAL:0,
         STARTED:1,
         HAS_RESULT:2,
-        COMPLETE:3
+        CLEANING:3,
+        COMPLETE:4
     };
 
     Test.prototype.structured_clone = function()
     {
         if (!this._structured_clone) {
             var msg = this.message;
             msg = msg ? String(msg) : msg;
             this._structured_clone = merge({
@@ -1641,65 +1656,156 @@ policies and contribution forms [3].
         this.timeout_id = null;
         this.set_status(this.TIMEOUT, "Test timed out");
         this.phase = this.phases.HAS_RESULT;
         this.done();
     };
 
     Test.prototype.force_timeout = Test.prototype.timeout;
 
+    /**
+     * Update the test status, initiate "cleanup" functions, and signal test
+     * completion.
+     */
     Test.prototype.done = function()
     {
-        if (this.phase == this.phases.COMPLETE) {
+        if (this.phase >= this.phases.CLEANING) {
             return;
         }
 
         if (this.phase <= this.phases.STARTED) {
             this.set_status(this.PASS, null);
         }
 
-        this.phase = this.phases.COMPLETE;
-
         if (global_scope.clearTimeout) {
             clearTimeout(this.timeout_id);
         }
-        tests.result(this);
+
         this.cleanup();
     };
 
+    function add_test_done_callback(test, callback)
+    {
+        if (test.phase === test.phases.COMPLETE) {
+            callback();
+            return;
+        }
+
+        test._done_callbacks.push(callback);
+    }
+
     /*
      * Invoke all specified cleanup functions. If one or more produce an error,
      * the context is in an unpredictable state, so all further testing should
      * be cancelled.
      */
     Test.prototype.cleanup = function() {
         var error_count = 0;
-        var total;
+        var bad_value_count = 0;
+        function on_error() {
+            error_count += 1;
+            // Abort tests immediately so that tests declared within subsequent
+            // cleanup functions are not run.
+            tests.abort();
+        }
+        var this_obj = this;
+        var results = [];
+
+        this.phase = this.phases.CLEANING;
 
         forEach(this.cleanup_callbacks,
                 function(cleanup_callback) {
+                    var result;
+
                     try {
-                        cleanup_callback();
+                        result = cleanup_callback();
                     } catch (e) {
-                        // Set test phase immediately so that tests declared
+                        on_error();
+                        return;
+                    }
+
+                    if (!is_valid_cleanup_result(this_obj, result)) {
+                        bad_value_count += 1;
+                        // Abort tests immediately so that tests declared
                         // within subsequent cleanup functions are not run.
-                        tests.phase = tests.phases.ABORTED;
-                        error_count += 1;
+                        tests.abort();
                     }
+
+                    results.push(result);
                 });
 
-        if (error_count > 0) {
-            total = this._user_defined_cleanup_count;
+        if (!this._is_promise_test) {
+            cleanup_done(this_obj, error_count, bad_value_count);
+        } else {
+            all_async(results,
+                      function(result, done) {
+                          if (result && typeof result.then === "function") {
+                              result
+                                  .then(null, on_error)
+                                  .then(done);
+                          } else {
+                              done();
+                          }
+                      },
+                      function() {
+                          cleanup_done(this_obj, error_count, bad_value_count);
+                      });
+        }
+    };
+
+    /**
+     * Determine if the return value of a cleanup function is valid for a given
+     * test. Any test may return the value `undefined`. Tests created with
+     * `promise_test` may alternatively return "thenable" object values.
+     */
+    function is_valid_cleanup_result(test, result) {
+        if (result === undefined) {
+            return true;
+        }
+
+        if (test._is_promise_test) {
+            return result && typeof result.then === "function";
+        }
+
+        return false;
+    }
+
+    function cleanup_done(test, error_count, bad_value_count) {
+        if (error_count || bad_value_count) {
+            var total = test._user_defined_cleanup_count;
+
             tests.status.status = tests.status.ERROR;
-            tests.status.message = "Test named '" + this.name +
-                "' specified " + total + " 'cleanup' function" +
-                (total > 1 ? "s" : "") + ", and " + error_count + " failed.";
+            tests.status.message = "Test named '" + test.name +
+                "' specified " + total +
+                " 'cleanup' function" + (total > 1 ? "s" : "");
+
+            if (error_count) {
+                tests.status.message += ", and " + error_count + " failed";
+            }
+
+            if (bad_value_count) {
+                var type = test._is_promise_test ?
+                   "non-thenable" : "non-undefined";
+                tests.status.message += ", and " + bad_value_count +
+                    " returned a " + type + " value";
+            }
+
+            tests.status.message += ".";
+
             tests.status.stack = null;
         }
-    };
+
+        test.phase = test.phases.COMPLETE;
+        tests.result(test);
+        forEach(test._done_callbacks,
+                function(callback) {
+                    callback();
+                });
+        test._done_callbacks.length = 0;
+    }
 
     /*
      * A RemoteTest object mirrors a Test object on a remote worker. The
      * associated RemoteWorker updates the RemoteTest object in response to
      * received events. In turn, the RemoteTest object replicates these events
      * on the local document. This allows listeners (test result reporting
      * etc..) to transparently handle local and remote events.
      */
@@ -1707,47 +1813,74 @@ policies and contribution forms [3].
         var this_obj = this;
         Object.keys(clone).forEach(
                 function(key) {
                     this_obj[key] = clone[key];
                 });
         this.index = null;
         this.phase = this.phases.INITIAL;
         this.update_state_from(clone);
+        this._done_callbacks = [];
         tests.push(this);
     }
 
     RemoteTest.prototype.structured_clone = function() {
         var clone = {};
         Object.keys(this).forEach(
                 (function(key) {
                     var value = this[key];
+                    // `RemoteTest` instances are responsible for managing
+                    // their own "done" callback functions, so those functions
+                    // are not relevant in other execution contexts. Because of
+                    // this (and because Function values cannot be serialized
+                    // for cross-realm transmittance), the property should not
+                    // be considered when cloning instances.
+                    if (key === '_done_callbacks' ) {
+                        return;
+                    }
 
                     if (typeof value === "object" && value !== null) {
                         clone[key] = merge({}, value);
                     } else {
                         clone[key] = value;
                     }
                 }).bind(this));
         clone.phases = merge({}, this.phases);
         return clone;
     };
 
-    RemoteTest.prototype.cleanup = function() {};
+    /**
+     * `RemoteTest` instances are objects which represent tests running in
+     * another realm. They do not define "cleanup" functions (if necessary,
+     * such functions are defined on the associated `Test` instance within the
+     * external realm). However, `RemoteTests` may have "done" callbacks (e.g.
+     * as attached by the `Tests` instance responsible for tracking the overall
+     * test status in the parent realm). The `cleanup` method delegates to
+     * `done` in order to ensure that such callbacks are invoked following the
+     * completion of the `RemoteTest`.
+     */
+    RemoteTest.prototype.cleanup = function() {
+        this.done();
+    };
     RemoteTest.prototype.phases = Test.prototype.phases;
     RemoteTest.prototype.update_state_from = function(clone) {
         this.status = clone.status;
         this.message = clone.message;
         this.stack = clone.stack;
         if (this.phase === this.phases.INITIAL) {
             this.phase = this.phases.STARTED;
         }
     };
     RemoteTest.prototype.done = function() {
         this.phase = this.phases.COMPLETE;
+
+        forEach(this._done_callbacks,
+                function(callback) {
+                    callback();
+                });
     }
 
     /*
      * A RemoteContext listens for test events from a remote test context, such
      * as another window or a worker. These events are then used to construct
      * and maintain RemoteTest objects that mirror the tests running in the
      * remote context.
      *
@@ -1769,16 +1902,21 @@ policies and contribution forms [3].
 
         // Keeping a reference to the remote object and the message handler until
         // remote_done() is seen prevents the remote object and its message channel
         // from going away before all the messages are dispatched.
         this.remote = remote;
         this.message_target = message_target;
         this.message_handler = function(message) {
             var passesFilter = !message_filter || message_filter(message);
+            // The reference to the `running` property in the following
+            // condition is unnecessary because that value is only set to
+            // `false` after the `message_handler` function has been
+            // unsubscribed.
+            // TODO: Simplify the condition by removing the reference.
             if (this_obj.running && message.data && passesFilter &&
                 (message.data.type in this_obj.message_handlers)) {
                 this_obj.message_handlers[message.data.type].call(this_obj, message.data);
             }
         };
 
         if (self.Promise) {
             this.done = new Promise(function(resolve) {
@@ -1789,23 +1927,19 @@ policies and contribution forms [3].
         this.message_target.addEventListener("message", this.message_handler);
     }
 
     RemoteContext.prototype.remote_error = function(error) {
         var message = error.message || String(error);
         var filename = (error.filename ? " " + error.filename: "");
         // FIXME: Display remote error states separately from main document
         // error state.
-        this.remote_done({
-            status: {
-                status: tests.status.ERROR,
-                message: "Error in remote" + filename + ": " + message,
-                stack: error.stack
-            }
-        });
+        tests.set_status(tests.status.ERROR,
+                         "Error in remote" + filename + ": " + message,
+                         error.stack);
 
         if (error.preventDefault) {
             error.preventDefault();
         }
     };
 
     RemoteContext.prototype.test_state = function(data) {
         var remote_test = this.tests[data.test.index];
@@ -1822,20 +1956,19 @@ policies and contribution forms [3].
         remote_test.update_state_from(data.test);
         remote_test.done();
         tests.result(remote_test);
     };
 
     RemoteContext.prototype.remote_done = function(data) {
         if (tests.status.status === null &&
             data.status.status !== data.status.OK) {
-            tests.status.status = data.status.status;
-            tests.status.message = data.status.message;
-            tests.status.stack = data.status.stack;
+            tests.set_status(data.status.status, data.status.message, data.status.sack);
         }
+
         this.message_target.removeEventListener("message", this.message_handler);
         this.running = false;
 
         // If remote context is cross origin assigning to onerror is not
         // possible, so silently catch those errors.
         try {
           this.remote.onerror = null;
         } catch (e) {
@@ -1897,18 +2030,17 @@ policies and contribution forms [3].
         this.tests = [];
         this.num_pending = 0;
 
         this.phases = {
             INITIAL:0,
             SETUP:1,
             HAVE_TESTS:2,
             HAVE_RESULTS:3,
-            COMPLETE:4,
-            ABORTED:5
+            COMPLETE:4
         };
         this.phase = this.phases.INITIAL;
 
         this.properties = {};
 
         this.wait_for_finish = false;
         this.processing_callbacks = false;
 
@@ -1988,32 +2120,65 @@ policies and contribution forms [3].
             throw new Error("Tried to set file as test after creating a test");
         }
         this.wait_for_finish = true;
         this.file_is_test = true;
         // Create the test, which will add it to the list of tests
         async_test();
     };
 
+    Tests.prototype.set_status = function(status, message, stack)
+    {
+        this.status.status = status;
+        this.status.message = message;
+        this.status.stack = stack ? stack : null;
+    };
+
     Tests.prototype.set_timeout = function() {
         if (global_scope.clearTimeout) {
             var this_obj = this;
             clearTimeout(this.timeout_id);
             if (this.timeout_length !== null) {
                 this.timeout_id = setTimeout(function() {
                                                  this_obj.timeout();
                                              }, this.timeout_length);
             }
         }
     };
 
     Tests.prototype.timeout = function() {
+        var test_in_cleanup = null;
+
         if (this.status.status === null) {
-            this.status.status = this.status.TIMEOUT;
+            forEach(this.tests,
+                    function(test) {
+                        // No more than one test is expected to be in the
+                        // "CLEANUP" phase at any time
+                        if (test.phase === test.phases.CLEANING) {
+                            test_in_cleanup = test;
+                        }
+
+                        test.phase = test.phases.COMPLETE;
+                    });
+
+            // Timeouts that occur while a test is in the "cleanup" phase
+            // indicate that some global state was not properly reverted. This
+            // invalidates the overall test execution, so the timeout should be
+            // reported as an error and cancel the execution of any remaining
+            // tests.
+            if (test_in_cleanup) {
+                this.status.status = this.status.ERROR;
+                this.status.message = "Timeout while running cleanup for " +
+                    "test named \"" + test_in_cleanup.name + "\".";
+                tests.status.stack = null;
+            } else {
+                this.status.status = this.status.TIMEOUT;
+            }
         }
+
         this.complete();
     };
 
     Tests.prototype.end_wait = function()
     {
         this.wait_for_finish = false;
         if (this.all_done()) {
             this.complete();
@@ -2034,21 +2199,20 @@ policies and contribution forms [3].
         var this_obj = this;
         forEach(this.test_state_callbacks,
                 function(callback) {
                     callback(test, this_obj);
                 });
     };
 
     Tests.prototype.all_done = function() {
-        return this.phase === this.phases.ABORTED ||
-            (this.tests.length > 0 && test_environment.all_loaded &&
-                this.num_pending === 0 && !this.wait_for_finish &&
+        return this.tests.length > 0 && test_environment.all_loaded &&
+                (this.num_pending === 0 || this.is_aborted) && !this.wait_for_finish &&
                 !this.processing_callbacks &&
-                !this.pending_remotes.some(function(w) { return w.running; }));
+                !this.pending_remotes.some(function(w) { return w.running; });
     };
 
     Tests.prototype.start = function() {
         this.phase = this.phases.HAVE_TESTS;
         this.notify_start();
     };
 
     Tests.prototype.notify_start = function() {
@@ -2057,20 +2221,21 @@ policies and contribution forms [3].
                  function(callback)
                  {
                      callback(this_obj.properties);
                  });
     };
 
     Tests.prototype.result = function(test)
     {
-        if (this.phase > this.phases.HAVE_RESULTS) {
-            return;
+        // If the harness has already transitioned beyond the `HAVE_RESULTS`
+        // phase, subsequent tests should not cause it to revert.
+        if (this.phase <= this.phases.HAVE_RESULTS) {
+            this.phase = this.phases.HAVE_RESULTS;
         }
-        this.phase = this.phases.HAVE_RESULTS;
         this.num_pending--;
         this.notify_result(test);
     };
 
     Tests.prototype.notify_result = function(test) {
         var this_obj = this;
         this.processing_callbacks = true;
         forEach(this.test_done_callbacks,
@@ -2083,29 +2248,64 @@ policies and contribution forms [3].
             this_obj.complete();
         }
     };
 
     Tests.prototype.complete = function() {
         if (this.phase === this.phases.COMPLETE) {
             return;
         }
-        this.phase = this.phases.COMPLETE;
         var this_obj = this;
-        this.tests.forEach(
-            function(x)
-            {
-                if (x.phase < x.phases.COMPLETE) {
-                    this_obj.notify_result(x);
-                    x.cleanup();
-                    x.phase = x.phases.COMPLETE;
-                }
-            }
-        );
-        this.notify_complete();
+        var all_complete = function() {
+            this_obj.phase = this_obj.phases.COMPLETE;
+            this_obj.notify_complete();
+        };
+        var incomplete = filter(this.tests,
+                                function(test) {
+                                    return test.phase < test.phases.COMPLETE;
+                                });
+
+        /**
+         * To preserve legacy behavior, overall test completion must be
+         * signaled synchronously.
+         */
+        if (incomplete.length === 0) {
+            all_complete();
+            return;
+        }
+
+        all_async(incomplete,
+                  function(test, testDone)
+                  {
+                      if (test.phase === test.phases.INITIAL) {
+                          test.phase = test.phases.COMPLETE;
+                          testDone();
+                      } else {
+                          add_test_done_callback(test, testDone);
+                          test.cleanup();
+                      }
+                  },
+                  all_complete);
+    };
+
+    /**
+     * Update the harness status to reflect an unrecoverable harness error that
+     * should cancel all further testing. Update all previously-defined tests
+     * which have not yet started to indicate that they will not be executed.
+     */
+    Tests.prototype.abort = function() {
+        this.status.status = this.status.ERROR;
+        this.is_aborted = true;
+
+        forEach(this.tests,
+                function(test) {
+                    if (test.phase === test.phases.INITIAL) {
+                        test.phase = test.phases.COMPLETE;
+                    }
+                });
     };
 
     /*
      * Determine if any tests share the same `name` property. Return an array
      * containing the names of any such duplicates.
      */
     Tests.prototype.find_duplicates = function() {
         var names = Object.create(null);
@@ -2861,16 +3061,67 @@ policies and contribution forms [3].
     {
         for (var i = 0; i < array.length; i++) {
             if (array.hasOwnProperty(i)) {
                 callback.call(thisObj, array[i], i, array);
             }
         }
     }
 
+    /**
+     * Immediately invoke a "iteratee" function with a series of values in
+     * parallel and invoke a final "done" function when all of the "iteratee"
+     * invocations have signaled completion.
+     *
+     * If all callbacks complete synchronously (or if no callbacks are
+     * specified), the `done_callback` will be invoked synchronously. It is the
+     * responsibility of the caller to ensure asynchronicity in cases where
+     * that is desired.
+     *
+     * @param {array} value Zero or more values to use in the invocation of
+     *                      `iter_callback`
+     * @param {function} iter_callback A function that will be invoked once for
+     *                                 each of the provided `values`. Two
+     *                                 arguments will be available in each
+     *                                 invocation: the value from `values` and
+     *                                 a function that must be invoked to
+     *                                 signal completion
+     * @param {function} done_callback A function that will be invoked after
+     *                                 all operations initiated by the
+     *                                 `iter_callback` function have signaled
+     *                                 completion
+     */
+    function all_async(values, iter_callback, done_callback)
+    {
+        var remaining = values.length;
+
+        if (remaining === 0) {
+            done_callback();
+        }
+
+        forEach(values,
+                function(element) {
+                    var invoked = false;
+                    var elDone = function() {
+                        if (invoked) {
+                            return;
+                        }
+
+                        invoked = true;
+                        remaining -= 1;
+
+                        if (remaining === 0) {
+                            done_callback();
+                        }
+                    };
+
+                    iter_callback(element, elDone);
+                });
+    }
+
     function merge(a,b)
     {
         var rv = {};
         var p;
         for (p in a) {
             rv[p] = a[p];
         }
         for (p in b) {
@@ -3003,16 +3254,18 @@ policies and contribution forms [3].
 
             if (tests.file_is_test) {
                 var test = tests.tests[0];
                 if (test.phase >= test.phases.HAS_RESULT) {
                     return;
                 }
                 test.set_status(test.FAIL, e.message, stack);
                 test.phase = test.phases.HAS_RESULT;
+                // The following function invocation is superfluous.
+                // TODO: Remove.
                 test.done();
             } else if (!tests.allow_uncaught_exception) {
                 tests.status.status = tests.status.ERROR;
                 tests.status.message = e.message;
                 tests.status.stack = stack;
             }
             done();
         };