From 88201ecb5425689fae53343899b3ee1cc89d77c4 Mon Sep 17 00:00:00 2001 From: Ben Kurtovic Date: Mon, 18 Mar 2013 03:21:36 -0400 Subject: [PATCH] Adding TestTokens. Add from __future__ import unicode_literals to a few files. --- tests/test_builder.py | 1 + tests/test_ctokenizer.py | 1 + tests/test_parser.py | 1 + tests/test_pytokenizer.py | 1 + tests/test_tokens.py | 78 ++++++++++++++++++++++++++++++++++++++++++++++- 5 files changed, 81 insertions(+), 1 deletion(-) diff --git a/tests/test_builder.py b/tests/test_builder.py index e38e683..a3518fd 100644 --- a/tests/test_builder.py +++ b/tests/test_builder.py @@ -20,6 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +from __future__ import unicode_literals import unittest class TestBuilder(unittest.TestCase): diff --git a/tests/test_ctokenizer.py b/tests/test_ctokenizer.py index 86f4787..07b5290 100644 --- a/tests/test_ctokenizer.py +++ b/tests/test_ctokenizer.py @@ -20,6 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +from __future__ import unicode_literals import unittest from _test_tokenizer import TokenizerTestCase diff --git a/tests/test_parser.py b/tests/test_parser.py index 3f9b2e6..5ea2b49 100644 --- a/tests/test_parser.py +++ b/tests/test_parser.py @@ -20,6 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +from __future__ import unicode_literals import unittest class TestParser(unittest.TestCase): diff --git a/tests/test_pytokenizer.py b/tests/test_pytokenizer.py index 4254748..a2f2482 100644 --- a/tests/test_pytokenizer.py +++ b/tests/test_pytokenizer.py @@ -20,6 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +from __future__ import unicode_literals import unittest from _test_tokenizer import TokenizerTestCase diff --git a/tests/test_tokens.py b/tests/test_tokens.py index 0e7f87b..5a18b8e 100644 --- a/tests/test_tokens.py +++ b/tests/test_tokens.py @@ -20,10 +20,86 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +from __future__ import unicode_literals import unittest +from mwparserfromhell.compat import py3k +from mwparserfromhell.parser import tokens + class TestTokens(unittest.TestCase): - pass + """Test cases for the Token class and its subclasses.""" + + def test_issubclass(self): + """check that all classes within the tokens module are really Tokens""" + for name in tokens.__all__: + klass = getattr(tokens, name) + self.assertTrue(issubclass(klass, tokens.Token)) + self.assertIsInstance(klass(), klass) + self.assertIsInstance(klass(), tokens.Token) + + def test_attributes(self): + """check that Token attributes can be managed properly""" + token1 = tokens.Token() + token2 = tokens.Token(foo="bar", baz=123) + + self.assertEquals("bar", token2.foo) + self.assertEquals(123, token2.baz) + self.assertRaises(KeyError, lambda: token1.foo) + self.assertRaises(KeyError, lambda: token2.bar) + + token1.spam = "eggs" + token2.foo = "ham" + del token2.baz + + self.assertEquals("eggs", token1.spam) + self.assertEquals("ham", token2.foo) + self.assertRaises(KeyError, lambda: token2.baz) + self.assertRaises(KeyError, delattr, token2, "baz") + + def test_repr(self): + """check that repr() on a Token works as expected""" + token1 = tokens.Token() + token2 = tokens.Token(foo="bar", baz=123) + token3 = tokens.Text(text="earwig" * 100) + hundredchars = ("earwig" * 100)[:97] + "..." + + self.assertEquals("Token()", repr(token1)) + if py3k: + token2repr = "Token(foo='bar', baz=123)" + token3repr = "Text(text='" + hundredchars + "')" + else: + token2repr = "Token(foo=u'bar', baz=123)" + token3repr = "Text(text=u'" + hundredchars + "')" + self.assertEquals(token2repr, repr(token2)) + self.assertEquals(token3repr, repr(token3)) + + def test_equality(self): + """check that equivalent tokens are considered equal""" + token1 = tokens.Token() + token2 = tokens.Token() + token3 = tokens.Token(foo="bar", baz=123) + token4 = tokens.Text(text="asdf") + token5 = tokens.Text(text="asdf") + token6 = tokens.TemplateOpen(text="asdf") + + self.assertEquals(token1, token2) + self.assertEquals(token2, token1) + self.assertEquals(token4, token5) + self.assertEquals(token5, token4) + self.assertNotEquals(token1, token3) + self.assertNotEquals(token2, token3) + self.assertNotEquals(token4, token6) + self.assertNotEquals(token5, token6) + + def test_repr_equality(self): + "check that eval(repr(token)) == token" + tests = [ + tokens.Token(), + tokens.Token(foo="bar", baz=123), + tokens.Text(text="earwig") + ] + for token in tests: + self.assertEquals(token, eval(repr(token), vars(tokens))) if __name__ == "__main__": unittest.main(verbosity=2)