Bug 1541751 [wpt PR 16183] - Make test_tokenizer pass on python 3., a=testonly
authorMs2ger <Ms2ger@gmail.com>
Fri, 17 May 2019 11:02:40 +0000
changeset 477090 ee72519c02e68e9fa099024ba43433d9649950d7
parent 477089 139fcd0d986372bd45130c9620f3c4f372840814
child 477091 05ed1ae9f0fb6ba36e0b870693fd8ee563a1c4c0
push id36116
push usershindli@mozilla.com
push dateThu, 06 Jun 2019 10:00:05 +0000
treeherdermozilla-central@fee989d27558 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerstestonly
bugs1541751, 16183
milestone69.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1541751 [wpt PR 16183] - Make test_tokenizer pass on python 3., a=testonly Automatic update from web-platform-tests Make test_tokenizer pass on python 3. -- wpt-commits: cc5c06062869f5fff62d99c478b640be06efd6b2 wpt-pr: 16183
testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/parser.py
testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/tests/test_tokenizer.py
--- a/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/parser.py
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/parser.py
@@ -9,17 +9,17 @@
 #      os == win: FAIL #This is a comment
 #      PASS
 #
 
 # TODO: keep comments in the tree
 
 from __future__ import unicode_literals
 
-from six.moves import cStringIO as StringIO
+from six import binary_type, text_type, BytesIO
 
 from .node import (AtomNode, BinaryExpressionNode, BinaryOperatorNode,
                    ConditionalNode, DataNode, IndexNode, KeyValueNode, ListNode,
                    NumberNode, StringNode, UnaryExpressionNode,
                    UnaryOperatorNode, ValueNode, VariableNode)
 
 
 class ParseError(Exception):
@@ -44,17 +44,17 @@ binary_operators = ["==", "!=", "and", "
 
 operators = ["==", "!=", "not", "and", "or"]
 
 atoms = {"True": True,
          "False": False,
          "Reset": object()}
 
 def decode(s):
-    assert isinstance(s, unicode)
+    assert isinstance(s, text_type)
     return s
 
 
 def precedence(operator_node):
     return len(operators) - operators.index(operator_node.data)
 
 
 class TokenTypes(object):
@@ -72,35 +72,35 @@ class Tokenizer(object):
     def reset(self):
         self.indent_levels = [0]
         self.state = self.line_start_state
         self.next_state = self.data_line_state
         self.line_number = 0
 
     def tokenize(self, stream):
         self.reset()
-        assert not isinstance(stream, unicode)
-        if isinstance(stream, str):
-            stream = StringIO(stream)
+        assert not isinstance(stream, text_type)
+        if isinstance(stream, bytes):
+            stream = BytesIO(stream)
         if not hasattr(stream, "name"):
             self.filename = ""
         else:
             self.filename = stream.name
 
         self.next_line_state = self.line_start_state
         for i, line in enumerate(stream):
-            assert isinstance(line, str)
+            assert isinstance(line, binary_type)
             self.state = self.next_line_state
             assert self.state is not None
             states = []
             self.next_line_state = None
             self.line_number = i + 1
             self.index = 0
             self.line = line.decode('utf-8').rstrip()
-            assert isinstance(self.line, unicode)
+            assert isinstance(self.line, text_type)
             while self.state != self.eol_state:
                 states.append(self.state)
                 tokens = self.state()
                 if tokens:
                     for token in tokens:
                         yield token
             self.state()
         while True:
--- a/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/tests/test_tokenizer.py
+++ b/testing/web-platform/tests/tools/wptrunner/wptrunner/wptmanifest/tests/test_tokenizer.py
@@ -1,274 +1,267 @@
-import sys
-import pytest
 import unittest
 
-from six.moves import cStringIO as StringIO
-
 from .. import parser
 from ..parser import token_types
 
-
-@pytest.mark.xfail(sys.version[0] == "3",
-                   reason="Tokenizer doesn't support py3")
 class TokenizerTest(unittest.TestCase):
     def setUp(self):
         self.tokenizer = parser.Tokenizer()
 
     def tokenize(self, input_str):
         rv = []
-        for item in self.tokenizer.tokenize(StringIO(input_str)):
+        for item in self.tokenizer.tokenize(input_str):
             rv.append(item)
             if item[0] == token_types.eof:
                 break
         return rv
 
     def compare(self, input_text, expected):
         expected = expected + [(token_types.eof, None)]
         actual = self.tokenize(input_text)
-        self.assertEquals(actual, expected)
+        self.assertEqual(actual, expected)
 
     def test_heading_0(self):
-        self.compare("""[Heading text]""",
+        self.compare(b"""[Heading text]""",
                      [(token_types.paren, "["),
                       (token_types.string, "Heading text"),
                       (token_types.paren, "]")])
 
     def test_heading_1(self):
-        self.compare("""[Heading [text\]]""",
+        self.compare(b"""[Heading [text\]]""",
                      [(token_types.paren, "["),
                       (token_types.string, "Heading [text]"),
                       (token_types.paren, "]")])
 
     def test_heading_2(self):
-        self.compare("""[Heading #text]""",
+        self.compare(b"""[Heading #text]""",
                      [(token_types.paren, "["),
                       (token_types.string, "Heading #text"),
                       (token_types.paren, "]")])
 
     def test_heading_3(self):
-        self.compare("""[Heading [\]text]""",
+        self.compare(b"""[Heading [\]text]""",
                      [(token_types.paren, "["),
                       (token_types.string, "Heading []text"),
                       (token_types.paren, "]")])
 
     def test_heading_4(self):
         with self.assertRaises(parser.ParseError):
-            self.tokenize("[Heading")
+            self.tokenize(b"[Heading")
 
     def test_heading_5(self):
-        self.compare("""[Heading [\]text] #comment""",
+        self.compare(b"""[Heading [\]text] #comment""",
                      [(token_types.paren, "["),
                       (token_types.string, "Heading []text"),
                       (token_types.paren, "]")])
 
     def test_heading_6(self):
-        self.compare(r"""[Heading \ttext]""",
+        self.compare(br"""[Heading \ttext]""",
                      [(token_types.paren, "["),
                       (token_types.string, "Heading \ttext"),
                       (token_types.paren, "]")])
 
     def test_key_0(self):
-        self.compare("""key:value""",
+        self.compare(b"""key:value""",
                      [(token_types.string, "key"),
                       (token_types.separator, ":"),
                       (token_types.string, "value")])
 
     def test_key_1(self):
-        self.compare("""key  :  value""",
+        self.compare(b"""key  :  value""",
                      [(token_types.string, "key"),
                       (token_types.separator, ":"),
                       (token_types.string, "value")])
 
     def test_key_2(self):
-        self.compare("""key  :  val ue""",
+        self.compare(b"""key  :  val ue""",
                      [(token_types.string, "key"),
                       (token_types.separator, ":"),
                       (token_types.string, "val ue")])
 
     def test_key_3(self):
-        self.compare("""key: value#comment""",
+        self.compare(b"""key: value#comment""",
                      [(token_types.string, "key"),
                       (token_types.separator, ":"),
                       (token_types.string, "value")])
 
     def test_key_4(self):
         with self.assertRaises(parser.ParseError):
-            self.tokenize("""ke y: value""")
+            self.tokenize(b"""ke y: value""")
 
     def test_key_5(self):
         with self.assertRaises(parser.ParseError):
-            self.tokenize("""key""")
+            self.tokenize(b"""key""")
 
     def test_key_6(self):
-        self.compare("""key: "value\"""",
+        self.compare(b"""key: "value\"""",
                      [(token_types.string, "key"),
                       (token_types.separator, ":"),
                       (token_types.string, "value")])
 
     def test_key_7(self):
-        self.compare("""key: 'value'""",
+        self.compare(b"""key: 'value'""",
                      [(token_types.string, "key"),
                       (token_types.separator, ":"),
                       (token_types.string, "value")])
 
     def test_key_8(self):
-        self.compare("""key: "#value\"""",
+        self.compare(b"""key: "#value\"""",
                      [(token_types.string, "key"),
                       (token_types.separator, ":"),
                       (token_types.string, "#value")])
 
     def test_key_9(self):
-        self.compare("""key: '#value\'""",
+        self.compare(b"""key: '#value\'""",
                      [(token_types.string, "key"),
                       (token_types.separator, ":"),
                       (token_types.string, "#value")])
 
     def test_key_10(self):
         with self.assertRaises(parser.ParseError):
-            self.tokenize("""key: "value""")
+            self.tokenize(b"""key: "value""")
 
     def test_key_11(self):
         with self.assertRaises(parser.ParseError):
-            self.tokenize("""key: 'value""")
+            self.tokenize(b"""key: 'value""")
 
     def test_key_12(self):
         with self.assertRaises(parser.ParseError):
-            self.tokenize("""key: 'value""")
+            self.tokenize(b"""key: 'value""")
 
     def test_key_13(self):
         with self.assertRaises(parser.ParseError):
-            self.tokenize("""key: 'value' abc""")
+            self.tokenize(b"""key: 'value' abc""")
 
     def test_key_14(self):
-        self.compare(r"""key: \\nb""",
+        self.compare(br"""key: \\nb""",
                      [(token_types.string, "key"),
                       (token_types.separator, ":"),
                       (token_types.string, r"\nb")])
 
     def test_list_0(self):
-        self.compare("""
+        self.compare(b"""
 key: []""",
             [(token_types.string, "key"),
              (token_types.separator, ":"),
              (token_types.list_start, "["),
              (token_types.list_end, "]")])
 
     def test_list_1(self):
-        self.compare("""
+        self.compare(b"""
 key: [a, "b"]""",
             [(token_types.string, "key"),
              (token_types.separator, ":"),
              (token_types.list_start, "["),
              (token_types.string, "a"),
              (token_types.string, "b"),
              (token_types.list_end, "]")])
 
     def test_list_2(self):
-        self.compare("""
+        self.compare(b"""
 key: [a,
       b]""",
             [(token_types.string, "key"),
              (token_types.separator, ":"),
              (token_types.list_start, "["),
              (token_types.string, "a"),
              (token_types.string, "b"),
              (token_types.list_end, "]")])
 
     def test_list_3(self):
-        self.compare("""
+        self.compare(b"""
 key: [a, #b]
       c]""",
             [(token_types.string, "key"),
              (token_types.separator, ":"),
              (token_types.list_start, "["),
              (token_types.string, "a"),
              (token_types.string, "c"),
              (token_types.list_end, "]")])
 
     def test_list_4(self):
         with self.assertRaises(parser.ParseError):
-            self.tokenize("""key: [a #b]
+            self.tokenize(b"""key: [a #b]
             c]""")
 
     def test_list_5(self):
         with self.assertRaises(parser.ParseError):
-            self.tokenize("""key: [a \\
+            self.tokenize(b"""key: [a \\
             c]""")
 
     def test_list_6(self):
-        self.compare("""key: [a , b]""",
+        self.compare(b"""key: [a , b]""",
             [(token_types.string, "key"),
              (token_types.separator, ":"),
              (token_types.list_start, "["),
              (token_types.string, "a"),
              (token_types.string, "b"),
              (token_types.list_end, "]")])
 
     def test_expr_0(self):
-        self.compare("""
+        self.compare(b"""
 key:
   if cond == 1: value""",
             [(token_types.string, "key"),
              (token_types.separator, ":"),
              (token_types.group_start, None),
              (token_types.ident, "if"),
              (token_types.ident, "cond"),
              (token_types.ident, "=="),
              (token_types.number, "1"),
              (token_types.separator, ":"),
              (token_types.string, "value")])
 
     def test_expr_1(self):
-        self.compare("""
+        self.compare(b"""
 key:
   if cond == 1: value1
   value2""",
             [(token_types.string, "key"),
              (token_types.separator, ":"),
              (token_types.group_start, None),
              (token_types.ident, "if"),
              (token_types.ident, "cond"),
              (token_types.ident, "=="),
              (token_types.number, "1"),
              (token_types.separator, ":"),
              (token_types.string, "value1"),
              (token_types.string, "value2")])
 
     def test_expr_2(self):
-        self.compare("""
+        self.compare(b"""
 key:
   if cond=="1": value""",
             [(token_types.string, "key"),
              (token_types.separator, ":"),
              (token_types.group_start, None),
              (token_types.ident, "if"),
              (token_types.ident, "cond"),
              (token_types.ident, "=="),
              (token_types.string, "1"),
              (token_types.separator, ":"),
              (token_types.string, "value")])
 
     def test_expr_3(self):
-        self.compare("""
+        self.compare(b"""
 key:
   if cond==1.1: value""",
             [(token_types.string, "key"),
              (token_types.separator, ":"),
              (token_types.group_start, None),
              (token_types.ident, "if"),
              (token_types.ident, "cond"),
              (token_types.ident, "=="),
              (token_types.number, "1.1"),
              (token_types.separator, ":"),
              (token_types.string, "value")])
 
     def test_expr_4(self):
-        self.compare("""
+        self.compare(b"""
 key:
   if cond==1.1 and cond2 == "a": value""",
             [(token_types.string, "key"),
              (token_types.separator, ":"),
              (token_types.group_start, None),
              (token_types.ident, "if"),
              (token_types.ident, "cond"),
              (token_types.ident, "=="),
@@ -276,66 +269,63 @@ key:
              (token_types.ident, "and"),
              (token_types.ident, "cond2"),
              (token_types.ident, "=="),
              (token_types.string, "a"),
              (token_types.separator, ":"),
              (token_types.string, "value")])
 
     def test_expr_5(self):
-        self.compare("""
+        self.compare(b"""
 key:
   if (cond==1.1 ): value""",
             [(token_types.string, "key"),
              (token_types.separator, ":"),
              (token_types.group_start, None),
              (token_types.ident, "if"),
              (token_types.paren, "("),
              (token_types.ident, "cond"),
              (token_types.ident, "=="),
              (token_types.number, "1.1"),
              (token_types.paren, ")"),
              (token_types.separator, ":"),
              (token_types.string, "value")])
 
     def test_expr_6(self):
-        self.compare("""
+        self.compare(b"""
 key:
   if "\\ttest": value""",
             [(token_types.string, "key"),
              (token_types.separator, ":"),
              (token_types.group_start, None),
              (token_types.ident, "if"),
              (token_types.string, "\ttest"),
              (token_types.separator, ":"),
              (token_types.string, "value")])
 
     def test_expr_7(self):
         with self.assertRaises(parser.ParseError):
-            self.tokenize(
-                """
+            self.tokenize(b"""
 key:
   if 1A: value""")
 
     def test_expr_8(self):
         with self.assertRaises(parser.ParseError):
-            self.tokenize(
-                """
+            self.tokenize(b"""
 key:
   if 1a: value""")
 
     def test_expr_9(self):
         with self.assertRaises(parser.ParseError):
-            self.tokenize(
-                """
+            self.tokenize(b"""
 key:
   if 1.1.1: value""")
 
     def test_expr_10(self):
-        self.compare("""
+        self.compare(b"""
 key:
   if 1.: value""",
             [(token_types.string, "key"),
              (token_types.separator, ":"),
              (token_types.group_start, None),
              (token_types.ident, "if"),
              (token_types.number, "1."),
              (token_types.separator, ":"),