--- a/content/media/webspeech/synth/ipc/test/file_ipc.html
+++ b/content/media/webspeech/synth/ipc/test/file_ipc.html
@@ -8,16 +8,17 @@
<body>
<script type="application/javascript;version=1.7">
"use strict";
window.SimpleTest = parent.SimpleTest;
window.ok = parent.ok;
window.is = parent.is;
+ window.info = parent.info;
function iframeScriptFirst() {
content.wrappedJSObject.RunSet.reloadAndRunAll({
preventDefault: function() { },
__exposedProps__: { preventDefault: 'r' }
});
}
--- a/testing/mochitest/browser-harness.xul
+++ b/testing/mochitest/browser-harness.xul
@@ -114,17 +114,18 @@
if (gConfig.autorun)
setTimeout(runTests, 0);
}
var gErrorCount = 0;
function browserTest(aTestFile) {
- this.path = aTestFile;
+ this.path = aTestFile['url'];
+ this.expected = aTestFile['expected'];
this.dumper = gDumper;
this.results = [];
this.scope = null;
this.duration = 0;
this.unexpectedTimeouts = 0;
this.lastOutputTime = 0;
}
browserTest.prototype = {
--- a/testing/mochitest/browser-test.js
+++ b/testing/mochitest/browser-test.js
@@ -352,16 +352,20 @@ Tester.prototype = {
try {
yield func.apply(testScope);
}
catch (ex) {
this.currentTest.addResult(new testResult(false, "Cleanup function threw an exception", ex, false));
}
};
+ if (testScope.__expected == 'fail' && testScope.__num_failed <= 0) {
+ this.currentTest.addResult(new testResult(false, "We expected at least one assertion to fail because this test file was marked as fail-if in the manifest", "", false));
+ }
+
this.Promise.Debugging.flushUncaughtErrors();
let winUtils = window.QueryInterface(Ci.nsIInterfaceRequestor)
.getInterface(Ci.nsIDOMWindowUtils);
if (winUtils.isTestControllingRefreshes) {
this.currentTest.addResult(new testResult(false, "test left refresh driver under test control", "", false));
winUtils.restoreNormalRefresh();
}
@@ -570,17 +574,17 @@ Tester.prototype = {
}),
execTest: function Tester_execTest() {
this.dumper.structuredLogger.testStart(this.currentTest.path);
this.SimpleTest.reset();
// Load the tests into a testscope
- let currentScope = this.currentTest.scope = new testScope(this, this.currentTest);
+ let currentScope = this.currentTest.scope = new testScope(this, this.currentTest, this.currentTest.expected);
let currentTest = this.currentTest;
// Import utils in the test scope.
this.currentTest.scope.EventUtils = this.EventUtils;
this.currentTest.scope.SimpleTest = this.SimpleTest;
this.currentTest.scope.gTestPath = this.currentTest.path;
this.currentTest.scope.Task = this.Task;
this.currentTest.scope.Promise = this.Promise;
@@ -799,21 +803,30 @@ function testResult(aCondition, aName, a
function testMessage(aName) {
this.msg = aName || "";
this.info = true;
}
// Need to be careful adding properties to this object, since its properties
// cannot conflict with global variables used in tests.
-function testScope(aTester, aTest) {
+function testScope(aTester, aTest, expected) {
this.__tester = aTester;
+ this.__expected = expected;
+ this.__num_failed = 0;
var self = this;
this.ok = function test_ok(condition, name, diag, stack) {
+ if (this.__expected == 'fail') {
+ if (!condition) {
+ this.__num_failed++;
+ condition = true;
+ }
+ }
+
aTest.addResult(new testResult(condition, name, diag, false,
stack ? stack : Components.stack.caller));
};
this.is = function test_is(a, b, name) {
self.ok(a == b, name, "Got " + a + ", expected " + b, false,
Components.stack.caller);
};
this.isnot = function test_isnot(a, b, name) {
@@ -925,16 +938,20 @@ function testScope(aTester, aTest) {
if (typeof(min) != "number" || typeof(max) != "number" ||
min < 0 || max < min) {
throw "bad parameter to expectAssertions";
}
self.__expectedMinAsserts = min;
self.__expectedMaxAsserts = max;
};
+ this.setExpected = function test_setExpected() {
+ self.__expected = 'fail';
+ };
+
this.finish = function test_finish() {
self.__done = true;
if (self.__waitTimer) {
self.executeSoon(function() {
if (self.__done && self.__waitTimer) {
clearTimeout(self.__waitTimer);
self.__waitTimer = null;
self.__tester.nextTest();
@@ -954,16 +971,17 @@ testScope.prototype = {
__done: true,
__generator: null,
__tasks: null,
__waitTimer: null,
__cleanupFunctions: [],
__timeoutFactor: 1,
__expectedMinAsserts: 0,
__expectedMaxAsserts: 0,
+ __expected: 'pass',
EventUtils: {},
SimpleTest: {},
Task: null,
Promise: null,
Assert: null,
/**
--- a/testing/mochitest/chrome/chrome.ini
+++ b/testing/mochitest/chrome/chrome.ini
@@ -3,9 +3,13 @@ support-files = test-dir/test-file
[test_sample.xul]
[test_sanityChromeUtils.xul]
skip-if = buildapp == 'mulet'
[test_sanityPluginUtils.html]
skip-if = buildapp == 'mulet'
[test_sanityException.xul]
[test_sanityException2.xul]
+[test_sanityManifest.xul]
+fail-if = true
+[test_sanityManifest_pf.xul]
+fail-if = true
[test_chromeGetTestFile.xul]
new file mode 100644
--- /dev/null
+++ b/testing/mochitest/chrome/test_sanityManifest.xul
@@ -0,0 +1,19 @@
+<?xml version="1.0"?>
+<?xml-stylesheet href="chrome://global/skin" type="text/css"?>
+<?xml-stylesheet href="chrome://mochikit/content/tests/SimpleTest/test.css" type="text/css"?>
+<!--
+https://bugzilla.mozilla.org/show_bug.cgi?id=670817
+-->
+<window title="Mozilla Bug 987849"
+ xmlns="http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul">
+
+ <script type="application/javascript" src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"/>
+
+<body xmlns="http://www.w3.org/1999/xhtml">
+<a target="_blank" href="https://bugzilla.mozilla.org/show_bug.cgi?id=987849">Mozilla Bug 987849</a>
+<script type="application/javascript"><![CDATA[
+ok(false, "a call to ok");
+]]></script>
+</body>
+
+</window>
new file mode 100644
--- /dev/null
+++ b/testing/mochitest/chrome/test_sanityManifest_pf.xul
@@ -0,0 +1,20 @@
+<?xml version="1.0"?>
+<?xml-stylesheet href="chrome://global/skin" type="text/css"?>
+<?xml-stylesheet href="chrome://mochikit/content/tests/SimpleTest/test.css" type="text/css"?>
+<!--
+https://bugzilla.mozilla.org/show_bug.cgi?id=670817
+-->
+<window title="Mozilla Bug 987849"
+ xmlns="http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul">
+
+ <script type="application/javascript" src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"/>
+
+<body xmlns="http://www.w3.org/1999/xhtml">
+<a target="_blank" href="https://bugzilla.mozilla.org/show_bug.cgi?id=987849">Mozilla Bug 987849</a>
+<script type="application/javascript"><![CDATA[
+ok(true, "a true call to ok");
+ok(false, "a false call to ok");
+]]></script>
+</body>
+
+</window>
--- a/testing/mochitest/chunkifyTests.js
+++ b/testing/mochitest/chunkifyTests.js
@@ -10,17 +10,21 @@ function chunkifyTests(tests, totalChunk
// We want to split the tests up into chunks according to which directory
// they're in
if (chunkByDir) {
chunkByDir = parseInt(chunkByDir);
var tests_by_dir = {};
var test_dirs = []
for (var i = 0; i < tests.length; ++i) {
- var test_path = tests[i];
+ if ('test' in tests[i]) {
+ var test_path = tests[i]['test']['url'];
+ } else {
+ var test_path = tests[i]['url'];
+ }
if (test_path[0] == '/') {
test_path = test_path.substr(1);
}
// mochitest-chrome and mochitest-browser-chrome pass an array of chrome://
// URIs
var protocolRegexp = /^[a-zA-Z]+:\/\//;
if (protocolRegexp.test(test_path)) {
test_path = test_path.replace(protocolRegexp, "");
--- a/testing/mochitest/manifestLibrary.js
+++ b/testing/mochitest/manifestLibrary.js
@@ -20,19 +20,21 @@ function parseTestManifest(testManifest,
for (var obj of testManifest['tests']) {
var path = obj['path'];
// Note that obj.disabled may be "". We still want to skip in that case.
if ("disabled" in obj) {
dump("TEST-SKIPPED | " + path + " | " + obj.disabled + "\n");
continue;
}
if (params.testRoot != 'tests' && params.testRoot !== undefined) {
- links[params.baseurl + '/' + params.testRoot + '/' + path] = true
+ name = params.baseurl + '/' + params.testRoot + '/' + path;
+ links[name] = {'test': {'url': name, 'expected': obj['expected']}};
} else {
- paths.push(params.testPrefix + path);
+ name = params.testPrefix + path;
+ paths.push({'test': {'url': name, 'expected': obj['expected']}});
}
}
if (paths.length > 0) {
callback(paths);
} else {
callback(links);
}
}
@@ -95,17 +97,21 @@ function filterTests(filter, testList, r
}
}
var testRoot = config.testRoot || "tests";
// Start with testList, and put everything that's in 'runtests' in
// filteredTests.
if (Object.keys(runtests).length) {
for (var i = 0; i < testList.length; i++) {
- var testpath = testList[i];
+ if ((testList[i] instanceof Object) && ('test' in testList[i])) {
+ var testpath = testList[i]['test']['url'];
+ } else {
+ var testpath = testList[i];
+ }
var tmppath = testpath.replace(/^\//, '');
for (var f in runtests) {
// Remove leading /tests/ if exists
file = f.replace(/^\//, '')
file = file.replace(/^tests\//, '')
// Match directory or filename, testList has <testroot>/<path>
if (tmppath.match(testRoot + "/" + file) != null) {
@@ -122,17 +128,21 @@ function filterTests(filter, testList, r
// excludedtests.
if (!Object.keys(excludetests).length) {
return filteredTests;
}
var refilteredTests = [];
for (var i = 0; i < filteredTests.length; i++) {
var found = false;
- var testpath = filteredTests[i];
+ if ((filteredTests[i] instanceof Object) && ('test' in filteredTests[i])) {
+ var testpath = filteredTests[i]['test']['url'];
+ } else {
+ var testpath = filteredTests[i];
+ }
var tmppath = testpath.replace(/^\//, '');
for (var f in excludetests) {
// Remove leading /tests/ if exists
file = f.replace(/^\//, '')
file = file.replace(/^tests\//, '')
// Match directory or filename, testList has <testroot>/<path>
if (tmppath.match(testRoot + "/" + file) != null) {
--- a/testing/mochitest/runtests.py
+++ b/testing/mochitest/runtests.py
@@ -656,17 +656,16 @@ class MochitestUtilsMixin(object):
testsToFilter option is used to filter/keep the tests provided in the list
disabled -- This allows to add all disabled tests on the build side
and then on the run side to only run the enabled ones
"""
tests = self.getActiveTests(options, disabled)
paths = []
-
for test in tests:
if testsToFilter and (test['path'] not in testsToFilter):
continue
paths.append(test)
# Bug 883865 - add this functionality into manifestparser
with open(os.path.join(SCRIPT_DIR, 'tests.json'), 'w') as manifestFile:
manifestFile.write(json.dumps({'tests': paths}))
@@ -1592,33 +1591,33 @@ class Mochitest(MochitestUtilsMixin):
for test in tests:
if 'disabled' in test:
del test['disabled']
else:
tests = manifest.active_tests(disabled=disabled, options=options, **info)
paths = []
for test in tests:
- if test.get('expected') == 'fail':
- raise Exception('fail-if encountered for test: %s. There is no support for fail-if in Mochitests.' % test['name'])
pathAbs = os.path.abspath(test['path'])
assert pathAbs.startswith(self.testRootAbs)
tp = pathAbs[len(self.testRootAbs):].replace('\\', '/').strip('/')
# Filter out tests if we are using --test-path
if testPath and not tp.startswith(testPath):
continue
if not self.isTest(options, tp):
self.log.warning('Warning: %s from manifest %s is not a valid test' % (test['name'], test['manifest']))
continue
testob = {'path': tp}
- if test.has_key('disabled'):
+ if 'disabled' in test:
testob['disabled'] = test['disabled']
+ if 'expected' in test:
+ testob['expected'] = test['expected']
paths.append(testob)
def path_sort(ob1, ob2):
path1 = ob1['path'].split('/')
path2 = ob2['path'].split('/')
return cmp(path1, path2)
paths.sort(path_sort)
@@ -1638,17 +1637,17 @@ class Mochitest(MochitestUtilsMixin):
"""
This method makes a list of tests that are to be run. Required mainly for --bisect-chunk.
"""
tests = self.getActiveTests(options)
self.logPreamble(tests)
testsToRun = []
for test in tests:
- if test.has_key('disabled'):
+ if 'disabled' in test:
continue
testsToRun.append(test['path'])
return testsToRun
def runMochitests(self, options, onLaunch=None):
"This is a base method for calling other methods in this class for --bisect-chunk."
testsToRun = self.getTestsToRun(options)
--- a/testing/mochitest/server.js
+++ b/testing/mochitest/server.js
@@ -416,17 +416,17 @@ function list(requestPath, directory, re
if (file.isDirectory()) {
key += "/";
}
if (recurse && file.isDirectory()) {
[links[key], childCount] = list(key, file, recurse);
count += childCount;
} else {
if (file.leafName.charAt(0) != '.') {
- links[key] = true;
+ links[key] = {'test': {'url': key, 'expected': 'pass'}};
}
}
}
return [links, count];
}
/**
@@ -488,23 +488,23 @@ function linksToListItems(links)
/**
* Transform nested hashtables of paths to a flat table rows.
*/
function linksToTableRows(links, recursionLevel)
{
var response = "";
for (var [link, value] in links) {
- var classVal = (!isTest(link) && !(value instanceof Object))
+ var classVal = (!isTest(link) && ((value instanceof Object) && ('test' in value)))
? "non-test invisible"
: "";
var spacer = "padding-left: " + (10 * recursionLevel) + "px";
- if (value instanceof Object) {
+ if ((value instanceof Object) && !('test' in value)) {
response += TR({class: "dir", id: "tr-" + link },
TD({colspan: "3"}, " "),
TD({style: spacer},
A({href: link}, link)));
response += linksToTableRows(value, recursionLevel + 1);
} else {
var bug_title = link.match(/test_bug\S+/);
var bug_num = null;
@@ -530,31 +530,31 @@ function linksToTableRows(links, recursi
}
}
}
return response;
}
function arrayOfTestFiles(linkArray, fileArray, testPattern) {
for (var [link, value] in Iterator(linkArray)) {
- if (value instanceof Object) {
+ if ((value instanceof Object) && !('test' in value)) {
arrayOfTestFiles(value, fileArray, testPattern);
- } else if (isTest(link, testPattern)) {
- fileArray.push(link)
+ } else if (isTest(link, testPattern) && (value instanceof Object)) {
+ fileArray.push(value['test'])
}
}
}
/**
* Produce a flat array of test file paths to be executed in the harness.
*/
function jsonArrayOfTestFiles(links)
{
var testFiles = [];
arrayOfTestFiles(links, testFiles);
- testFiles = ['"' + file + '"' for each(file in testFiles)];
+ testFiles = ['"' + file['url'] + '"' for each(file in testFiles)];
return "[" + testFiles.join(",\n") + "]";
}
/**
* Produce a normal directory listing.
*/
function regularListing(metadata, response)
{
--- a/testing/mochitest/tests/Harness_sanity/mochitest.ini
+++ b/testing/mochitest/tests/Harness_sanity/mochitest.ini
@@ -18,8 +18,15 @@ support-files = SpecialPowersLoadChromeS
[test_bug816847.html]
skip-if = toolkit == 'android' || e10s #No test app installed
[test_sanity_cleanup.html]
[test_sanity_cleanup2.html]
[test_sanityEventUtils.html]
skip-if = toolkit == 'android' #bug 688052
[test_sanitySimpletest.html]
skip-if = toolkit == 'android' #bug 688052
+[test_sanity_manifest.html]
+skip-if = toolkit == 'android' # we use the old manifest style on android
+fail-if = true
+[test_sanity_manifest_pf.html]
+skip-if = toolkit == 'android' # we use the old manifest style on android
+fail-if = true
+
new file mode 100644
--- /dev/null
+++ b/testing/mochitest/tests/Harness_sanity/test_sanity_manifest.html
@@ -0,0 +1,16 @@
+<!-- Any copyright is dedicated to the Public Domain.
+ - http://creativecommons.org/publicdomain/zero/1.0/ -->
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>SimpleTest.expected = 'fail' test</title>
+ <script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<script class="testbody" type="text/javascript">
+ok(false, "We expect this to fail");
+
+</script>
+</body>
+</html>
new file mode 100644
--- /dev/null
+++ b/testing/mochitest/tests/Harness_sanity/test_sanity_manifest_pf.html
@@ -0,0 +1,17 @@
+<!-- Any copyright is dedicated to the Public Domain.
+ - http://creativecommons.org/publicdomain/zero/1.0/ -->
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>SimpleTest.expected = 'fail' test</title>
+ <script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<script class="testbody" type="text/javascript">
+ok(true, "We expect this to pass");
+ok(false, "We expect this to fail");
+
+</script>
+</body>
+</html>
--- a/testing/mochitest/tests/SimpleTest/SimpleTest.js
+++ b/testing/mochitest/tests/SimpleTest/SimpleTest.js
@@ -233,24 +233,43 @@ SimpleTest.testPluginIsOOP = function ()
}
return testPluginIsOOP;
};
SimpleTest._tests = [];
SimpleTest._stopOnLoad = true;
SimpleTest._cleanupFunctions = [];
+SimpleTest.expected = 'pass';
+SimpleTest.num_failed = 0;
+
+SimpleTest.setExpected = function () {
+ if (parent.TestRunner) {
+ SimpleTest.expected = parent.TestRunner.expected;
+ }
+}
+SimpleTest.setExpected();
/**
* Something like assert.
**/
SimpleTest.ok = function (condition, name, diag) {
+
var test = {'result': !!condition, 'name': name, 'diag': diag};
- var successInfo = {status:"PASS", expected:"PASS", message:"TEST-PASS"};
- var failureInfo = {status:"FAIL", expected:"PASS", message:"TEST-UNEXPECTED-FAIL"};
+ if (SimpleTest.expected == 'fail') {
+ if (!test.result) {
+ SimpleTest.num_failed++;
+ test.result = !test.result;
+ }
+ var successInfo = {status:"PASS", expected:"PASS", message:"TEST-PASS"};
+ var failureInfo = {status:"FAIL", expected:"FAIL", message:"TEST-KNOWN-FAIL"};
+ } else {
+ var successInfo = {status:"PASS", expected:"PASS", message:"TEST-PASS"};
+ var failureInfo = {status:"FAIL", expected:"PASS", message:"TEST-UNEXPECTED-FAIL"};
+ }
SimpleTest._logResult(test, successInfo, failureInfo);
SimpleTest._tests.push(test);
};
/**
* Roughly equivalent to ok(a==b, name)
**/
SimpleTest.is = function (a, b, name) {
@@ -816,16 +835,26 @@ SimpleTest.finish = function() {
var err = "[SimpleTest.finish()] this test already called finish!";
if (parentRunner) {
parentRunner.structuredLogger.error(err);
} else {
dump(err + '\n');
}
}
+ if (SimpleTest.expected == 'fail' && SimpleTest.num_failed <= 0) {
+ msg = 'We expected at least one failure';
+ var test = {'result': false, 'name': 'fail-if condition in manifest', 'diag': msg};
+ var successInfo = {status:"PASS", expected:"PASS", message:"TEST-PASS"};
+ var failureInfo = {status:"FAIL", expected:"FAIL", message:"TEST-KNOWN-FAIL"};
+
+ SimpleTest._logResult(test, successInfo, failureInfo);
+ SimpleTest._tests.push(test);
+ }
+
SimpleTest.testsLength = SimpleTest._tests.length;
SimpleTest._alreadyFinished = true;
var afterCleanup = function() {
if (SpecialPowers.DOMWindowUtils.isTestControllingRefreshes) {
SimpleTest.ok(false, "test left refresh driver under test control");
SpecialPowers.DOMWindowUtils.restoreNormalRefresh();
--- a/testing/mochitest/tests/SimpleTest/TestRunner.js
+++ b/testing/mochitest/tests/SimpleTest/TestRunner.js
@@ -557,25 +557,38 @@ TestRunner.resetTests = function(listURL
TestRunner.structuredLogger.info("SimpleTest START Loop " + TestRunner._currentLoop);
TestRunner._urls = listURLs;
$('testframe').src="";
TestRunner._checkForHangs();
TestRunner.runNextTest();
}
+TestRunner.getNextUrl = function() {
+ var url = "";
+ // sometimes we have a subtest/harness which doesn't use a manifest
+ if ((TestRunner._urls[TestRunner._currentTest] instanceof Object) && ('test' in TestRunner._urls[TestRunner._currentTest])) {
+ url = TestRunner._urls[TestRunner._currentTest]['test']['url'];
+ TestRunner.expected = TestRunner._urls[TestRunner._currentTest]['test']['expected'];
+ } else {
+ url = TestRunner._urls[TestRunner._currentTest];
+ TestRunner.expected = 'pass';
+ }
+ return url;
+}
+
/**
* Run the next test. If no test remains, calls onComplete().
**/
TestRunner._haltTests = false;
TestRunner.runNextTest = function() {
if (TestRunner._currentTest < TestRunner._urls.length &&
!TestRunner._haltTests)
{
- var url = TestRunner._urls[TestRunner._currentTest];
+ var url = TestRunner.getNextUrl();
TestRunner.currentTestURL = url;
$("current-test-path").innerHTML = url;
TestRunner._currentTestStartTime = new Date().valueOf();
TestRunner._timeoutFactor = 1;
TestRunner._expectedMinAsserts = 0;
TestRunner._expectedMaxAsserts = 0;
@@ -770,17 +783,17 @@ TestRunner.testUnloaded = function() {
// If we're in a debug build, check assertion counts. This code is
// similar to the code in Tester_nextTest in browser-test.js used
// for browser-chrome mochitests.
if (SpecialPowers.isDebugBuild) {
var newAssertionCount = SpecialPowers.assertionCount();
var numAsserts = newAssertionCount - TestRunner._lastAssertionCount;
TestRunner._lastAssertionCount = newAssertionCount;
- var url = TestRunner._urls[TestRunner._currentTest];
+ var url = TestRunner.getNextUrl();
var max = TestRunner._expectedMaxAsserts;
var min = TestRunner._expectedMinAsserts;
if (numAsserts > max) {
TestRunner.structuredLogger.testEnd(url,
"ERROR",
"OK",
"Assertion count " + numAsserts + " is greater than expected range " +
min + "-" + max + " assertions.",
--- a/testing/mochitest/tests/SimpleTest/setup.js
+++ b/testing/mochitest/tests/SimpleTest/setup.js
@@ -236,17 +236,17 @@ function hookup() {
}
function hookupTests(testList) {
if (testList.length > 0) {
gTestList = testList;
} else {
gTestList = [];
for (var obj in testList) {
- gTestList.push(obj);
+ gTestList.push(testList[obj]);
}
}
document.getElementById('runtests').onclick = RunSet.reloadAndRunAll;
document.getElementById('toggleNonTests').onclick = toggleNonTests;
// run automatically if autorun specified
if (params.autorun) {
RunSet.runall();
--- a/testing/mochitest/tests/browser/browser.ini
+++ b/testing/mochitest/tests/browser/browser.ini
@@ -12,21 +12,29 @@ support-files =
[browser_privileges.js]
[browser_sanityException.js]
[browser_sanityException2.js]
skip-if = e10s
[browser_getTestFile.js]
support-files =
test-dir/*
-# Disabled, these are only good for testing the harness' failure reporting
-# browser_zz_fail_openwindow.js
-# browser_fail.js
-# browser_fail_add_task.js
-# browser_fail_async_throw.js
-# browser_fail_fp.js
-# browser_fail_pf.js
-# browser_fail_throw.js
-# browser_fail_timeout.js
-# browser_fail_unexpectedTimeout.js
-#
# Disabled because it would take too long, useful to check functionality though.
# browser_requestLongerTimeout.js
+[browser_zz_fail_openwindow.js]
+skip-if = true # this catches outside of the main loop to find an extra window
+[browser_fail.js]
+skip-if = true
+[browser_fail_add_task.js]
+skip-if = true # fail-if doesnt catch an exception outside the test
+[browser_fail_async_throw.js]
+skip-if = true # fail-if doesnt catch an exception outside the test
+[browser_fail_fp.js]
+fail-if = true
+[browser_fail_pf.js]
+fail-if = true
+[browser_fail_throw.js]
+skip-if = true # fail-if doesnt catch an exception outside the test
+[browser_fail_timeout.js]
+fail-if = true
+[browser_fail_unexpectedTimeout.js]
+fail-if = false
+
deleted file mode 100644
--- a/testing/mochitest/tests/browser/moz.build
+++ /dev/null
@@ -1,8 +0,0 @@
-# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*-
-# vim: set filetype=python:
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-BROWSER_CHROME_MANIFESTS += ['browser.ini']
-
--- a/testing/mochitest/tests/moz.build
+++ b/testing/mochitest/tests/moz.build
@@ -1,13 +1,12 @@
# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*-
# vim: set filetype=python:
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
DIRS += [
- 'SimpleTest',
- 'browser',
+ 'SimpleTest'
]
MOCHITEST_MANIFESTS += ['Harness_sanity/mochitest.ini']
-
+BROWSER_CHROME_MANIFESTS += ['browser/browser.ini']