Bug 580786. Add support for fuzzy reftest matching. r=dbaron
authorJeff Muizelaar <jmuizelaar@mozilla.com>
Mon, 19 Dec 2011 09:02:53 -0500
changeset 85915 06c1e5e1387a8ca0eadbd6421b800103f1c6ceb0
parent 85914 39948fbe66594b52b4d52d30f47326674e4ce74a
child 85916 732a7796c490358717eef4dcf9b3d6cca863e655
push id805
push userakeybl@mozilla.com
push dateWed, 01 Feb 2012 18:17:35 +0000
treeherdermozilla-aurora@6fb3bf232436 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersdbaron
bugs580786
milestone12.0a1
Bug 580786. Add support for fuzzy reftest matching. r=dbaron This will let us convert a bunch of random-if() to fuzzy-if()
layout/reftests/reftest-sanity/fuzzy-ref.html
layout/reftests/reftest-sanity/fuzzy.html
layout/reftests/reftest-sanity/reftest.list
layout/reftests/reftest-sanity/too-fuzzy.html
layout/tools/reftest/README.txt
layout/tools/reftest/print-manifest-dirs.py
layout/tools/reftest/reftest.js
new file mode 100644
--- /dev/null
+++ b/layout/reftests/reftest-sanity/fuzzy-ref.html
@@ -0,0 +1,6 @@
+<!DOCTYPE html>
+<html>
+  <body>
+    <div style="background: #ff00ff; width: 500px; height: 500px;"></div>
+  </body>
+</html>
new file mode 100644
--- /dev/null
+++ b/layout/reftests/reftest-sanity/fuzzy.html
@@ -0,0 +1,6 @@
+<!DOCTYPE html>
+<html>
+  <body>
+    <div style="background: #ff01ff; width: 500px; height: 500px;"></div>
+  </body>
+</html>
--- a/layout/reftests/reftest-sanity/reftest.list
+++ b/layout/reftests/reftest-sanity/reftest.list
@@ -124,8 +124,15 @@ fails pref(font.size.variable.x-western,
 fails pref(font.size.variable.x-western,"foo") == font-size-16.html font-default.html
 # a string pref
 pref(font.default.x-western,"serif") == font-serif.html font-default.html
 pref(font.default.x-western,"serif") != font-sans-serif.html font-default.html
 pref(font.default.x-western,"sans-serif") == font-sans-serif.html font-default.html
 pref(font.default.x-western,"sans-serif") != font-serif.html font-default.html
 fails pref(font.default.x-western,true) == font-serif.html font-default.html
 fails pref(font.default.x-western,0) == font-serif.html font-default.html
+# reftest syntax: fuzzy
+fuzzy == fuzzy.html fuzzy-ref.html
+fuzzy != too-fuzzy.html fuzzy-ref.html
+fuzzy-if(true) == fuzzy.html fuzzy-ref.html
+fuzzy-if(false) == fuzzy-ref.html fuzzy-ref.html
+# When using 565 fuzzy.html and fuzzy-ref.html will compare as equal
+fails fuzzy-if(false) random-if(Android) == fuzzy.html fuzzy-ref.html
new file mode 100644
--- /dev/null
+++ b/layout/reftests/reftest-sanity/too-fuzzy.html
@@ -0,0 +1,6 @@
+<!DOCTYPE html>
+<html>
+  <body>
+    <div style="background: #ff04ff; width: 500px; height: 500px;"></div>
+  </body>
+</html>
--- a/layout/tools/reftest/README.txt
+++ b/layout/tools/reftest/README.txt
@@ -101,16 +101,24 @@ 2. A test item
 
       slow-if(condition) If the condition is met, the test is treated as if
                          'slow' had been specified.  This is useful for tests
                          which are slow only on particular platforms (e.g. a
                          test which exercised out-of-memory behavior might be
                          fast on a 32-bit system but inordinately slow on a
                          64-bit system).
 
+      fuzzy  This allows a test to pass if the pixel value differences
+             are <= 2. It can also be used with '!=' to ensure that the
+             difference is greater than 2.
+
+      fuzzy-if(condition) If the condition is met, the test is treated as if
+                          'fuzzy' had been specified. This is useful if there
+                          are differences on particular platforms.
+
       require-or(cond1&&cond2&&...,fallback)
           Require some particular setup be performed or environmental
           condition(s) made true (eg setting debug mode) before the test
           is run. If any condition is unknown, unimplemented, or fails,
           revert to the fallback failure-type.
           Example: require-or(debugMode,skip)
 
       asserts(count)
--- a/layout/tools/reftest/print-manifest-dirs.py
+++ b/layout/tools/reftest/print-manifest-dirs.py
@@ -34,17 +34,17 @@
 # the provisions above, a recipient may use your version of this file under
 # the terms of any one of the MPL, the GPL or the LGPL.
 #
 # ***** END LICENSE BLOCK *****
 
 import sys, os.path, re
 
 commentRE = re.compile(r"\s+#")
-conditionsRE = re.compile(r"^(fails|needs-focus|random|skip|asserts)")
+conditionsRE = re.compile(r"^(fails|needs-focus|random|skip|asserts|fuzzy)")
 httpRE = re.compile(r"HTTP\((\.\.(\/\.\.)*)\)")
 protocolRE = re.compile(r"^\w+:")
 
 def parseManifest(manifest, dirs):
   """Parse the reftest manifest |manifest|, adding all directories containing
   tests (and the dirs containing the manifests themselves) to the set |dirs|."""
   manifestdir = os.path.dirname(os.path.abspath(manifest))
   dirs.add(manifestdir)
--- a/layout/tools/reftest/reftest.js
+++ b/layout/tools/reftest/reftest.js
@@ -144,16 +144,17 @@ const TYPE_SCRIPT = 'script'; // test co
 // the test by using the *larger*.  
 // FIXME: In the future, we may also want to use this rule for combining
 // statuses that are on the same line (rather than making the last one
 // win).
 const EXPECTED_PASS = 0;
 const EXPECTED_FAIL = 1;
 const EXPECTED_RANDOM = 2;
 const EXPECTED_DEATH = 3;  // test must be skipped to avoid e.g. crash/hang
+const EXPECTED_FUZZY = 4;
 
 // types of preference value we might want to set for a specific test
 const PREF_BOOLEAN = 0;
 const PREF_STRING  = 1;
 const PREF_INTEGER = 2;
 
 var gPrefsToRestore = [];
 
@@ -681,26 +682,26 @@ function ReadManifest(aURL, inherited_st
         var expected_status = EXPECTED_PASS;
         var allow_silent_fail = false;
         var minAsserts = 0;
         var maxAsserts = 0;
         var needs_focus = false;
         var slow = false;
         var prefSettings = [];
         
-        while (items[0].match(/^(fails|needs-focus|random|skip|asserts|slow|require-or|silentfail|pref)/)) {
+        while (items[0].match(/^(fails|needs-focus|random|skip|asserts|slow|require-or|silentfail|pref|fuzzy)/)) {
             var item = items.shift();
             var stat;
             var cond;
-            var m = item.match(/^(fails|random|skip|silentfail)-if(\(.*\))$/);
+            var m = item.match(/^(fails|random|skip|silentfail|fuzzy)-if(\(.*\))$/);
             if (m) {
                 stat = m[1];
                 // Note: m[2] contains the parentheses, and we want them.
                 cond = Components.utils.evalInSandbox(m[2], sandbox);
-            } else if (item.match(/^(fails|random|skip)$/)) {
+            } else if (item.match(/^(fails|random|skip|fuzzy)$/)) {
                 stat = item;
                 cond = true;
             } else if (item == "needs-focus") {
                 needs_focus = true;
                 cond = false;
             } else if ((m = item.match(/^asserts\((\d+)(-\d+)?\)$/))) {
                 cond = false;
                 minAsserts = Number(m[1]);
@@ -772,16 +773,18 @@ function ReadManifest(aURL, inherited_st
 
             if (cond) {
                 if (stat == "fails") {
                     expected_status = EXPECTED_FAIL;
                 } else if (stat == "random") {
                     expected_status = EXPECTED_RANDOM;
                 } else if (stat == "skip") {
                     expected_status = EXPECTED_DEATH;
+                } else if (stat == "fuzzy") {
+                    expected_status = EXPECTED_FUZZY;
                 } else if (stat == "silentfail") {
                     allow_silent_fail = true;
                 }
             }
         }
 
         expected_status = Math.max(expected_status, inherited_status);
 
@@ -1291,16 +1294,18 @@ function RecordResult(testRunTime, error
     outputs[EXPECTED_FAIL] = {
         true:  {s: "TEST-UNEXPECTED-PASS"       , n: "UnexpectedPass"},
         false: {s: "TEST-KNOWN-FAIL"            , n: "KnownFail"}
     };
     outputs[EXPECTED_RANDOM] = {
         true:  {s: "TEST-PASS" + randomMsg      , n: "Random"},
         false: {s: "TEST-KNOWN-FAIL" + randomMsg, n: "Random"}
     };
+    outputs[EXPECTED_FUZZY] = outputs[EXPECTED_PASS];
+
     var output;
 
     if (gURLs[0].type == TYPE_LOAD) {
         ++gTestResults.LoadOnly;
         gDumpLog("REFTEST TEST-PASS | " + gURLs[0].prettyPath + " | (LOAD ONLY)\n");
         gCurrentCanvas = null;
         FinishTestItem();
         return;
@@ -1393,24 +1398,35 @@ function RecordResult(testRunTime, error
             // Both documents have been loaded. Compare the renderings and see
             // if the comparison result matches the expected result specified
             // in the manifest.
 
             // number of different pixels
             var differences;
             // whether the two renderings match:
             var equal;
+            var maxDifference = {};
 
-            differences = gWindowUtils.compareCanvases(gCanvas1, gCanvas2, {});
+            differences = gWindowUtils.compareCanvases(gCanvas1, gCanvas2, maxDifference);
             equal = (differences == 0);
 
+            // what is expected on this platform (PASS, FAIL, or RANDOM)
+            var expected = gURLs[0].expected;
+
+            if (maxDifference.value > 0 && maxDifference.value <= 2) {
+                if (equal) {
+                    throw "Inconsistent result from compareCanvases.";
+                }
+                equal = expected == EXPECTED_FUZZY;
+                gDumpLog("REFTEST fuzzy match\n");
+            }
+
             // whether the comparison result matches what is in the manifest
             var test_passed = (equal == (gURLs[0].type == TYPE_REFTEST_EQUAL));
-            // what is expected on this platform (PASS, FAIL, or RANDOM)
-            var expected = gURLs[0].expected;
+
             output = outputs[expected][test_passed];
 
             ++gTestResults[output.n];
 
             var result = "REFTEST " + output.s + " | " +
                          gURLs[0].prettyPath + " | "; // the URL being tested
             switch (gURLs[0].type) {
                 case TYPE_REFTEST_NOTEQUAL:
@@ -1418,21 +1434,22 @@ function RecordResult(testRunTime, error
                     break;
                 case TYPE_REFTEST_EQUAL:
                     result += "image comparison (==) ";
                     break;
             }
             gDumpLog(result + "\n");
 
             if (!test_passed && expected == EXPECTED_PASS ||
+                !test_passed && expected == EXPECTED_FUZZY ||
                 test_passed && expected == EXPECTED_FAIL) {
                 if (!equal) {
                     gDumpLog("REFTEST   IMAGE 1 (TEST): " + gCanvas1.toDataURL() + "\n");
                     gDumpLog("REFTEST   IMAGE 2 (REFERENCE): " + gCanvas2.toDataURL() + "\n");
-                    gDumpLog("REFTEST number of differing pixels: " + differences + "\n");
+                    gDumpLog("REFTEST number of differing pixels: " + differences + " max difference: " + maxDifference.value + "\n");
                 } else {
                     gDumpLog("REFTEST   IMAGE: " + gCanvas1.toDataURL() + "\n");
                 }
             }
 
             if (!test_passed && expected == EXPECTED_PASS) {
                 FlushTestLog();
             }