Browse Source

Can specify specific tokenizer tests via the command line; add files

tags/v0.2
Ben Kurtovic 11 years ago
parent
commit
a0fb8361eb
9 changed files with 31 additions and 14 deletions
  1. +17
    -6
      tests/_test_tokenizer.py
  2. +5
    -4
      tests/test_ctokenizer.py
  3. +5
    -4
      tests/test_pytokenizer.py
  4. +4
    -0
      tests/tokenizer/arguments.mwtest
  5. +0
    -0
     
  6. +0
    -0
     
  7. +0
    -0
     
  8. +0
    -0
     
  9. +0
    -0
     

+ 17
- 6
tests/_test_tokenizer.py View File

@@ -22,6 +22,7 @@


from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
from os import listdir, path from os import listdir, path
import sys


from mwparserfromhell.compat import py3k from mwparserfromhell.compat import py3k
from mwparserfromhell.parser import tokens from mwparserfromhell.parser import tokens
@@ -107,15 +108,25 @@ class TokenizerTestCase(object):
@classmethod @classmethod
def build(cls): def build(cls):
"""Load and install all tests from the 'tokenizer' directory.""" """Load and install all tests from the 'tokenizer' directory."""
directory = path.join(path.dirname(__file__), "tokenizer")
extension = ".mwtest"
for filename in listdir(directory):
if not filename.endswith(extension):
continue
with open(path.join(directory, filename), "r") as fp:
def load_file(filename):
with open(filename, "r") as fp:
text = fp.read() text = fp.read()
if not py3k: if not py3k:
text = text.decode("utf8") text = text.decode("utf8")
cls._load_tests(filename[:0-len(extension)], text) cls._load_tests(filename[:0-len(extension)], text)


directory = path.join(path.dirname(__file__), "tokenizer")
extension = ".mwtest"
if len(sys.argv) > 1: # Read specific tests from command line
for name in sys.argv[1:]:
load_file(path.join(directory, name + extension))
sys.argv = [sys.argv[0]] # So unittest doesn't try to load these
cls.skip_others = True
else:
for filename in listdir(directory):
if not filename.endswith(extension):
continue
load_file(path.join(directory, filename))
cls.skip_others = False

TokenizerTestCase.build() TokenizerTestCase.build()

+ 5
- 4
tests/test_ctokenizer.py View File

@@ -38,10 +38,11 @@ class TestCTokenizer(TokenizerTestCase, unittest.TestCase):
def setUpClass(cls): def setUpClass(cls):
cls.tokenizer = CTokenizer cls.tokenizer = CTokenizer


def test_uses_c(self):
"""make sure the C tokenizer identifies as using a C extension"""
self.assertTrue(CTokenizer.USES_C)
self.assertTrue(CTokenizer().USES_C)
if not TokenizerTestCase.skip_others:
def test_uses_c(self):
"""make sure the C tokenizer identifies as using a C extension"""
self.assertTrue(CTokenizer.USES_C)
self.assertTrue(CTokenizer().USES_C)


if __name__ == "__main__": if __name__ == "__main__":
unittest.main(verbosity=2) unittest.main(verbosity=2)

+ 5
- 4
tests/test_pytokenizer.py View File

@@ -34,10 +34,11 @@ class TestPyTokenizer(TokenizerTestCase, unittest.TestCase):
def setUpClass(cls): def setUpClass(cls):
cls.tokenizer = Tokenizer cls.tokenizer = Tokenizer


def test_uses_c(self):
"""make sure the Python tokenizer identifies as not using C"""
self.assertFalse(Tokenizer.USES_C)
self.assertFalse(Tokenizer().USES_C)
if not TokenizerTestCase.skip_others:
def test_uses_c(self):
"""make sure the Python tokenizer identifies as not using C"""
self.assertFalse(Tokenizer.USES_C)
self.assertFalse(Tokenizer().USES_C)


if __name__ == "__main__": if __name__ == "__main__":
unittest.main(verbosity=2) unittest.main(verbosity=2)

+ 4
- 0
tests/tokenizer/arguments.mwtest View File

@@ -0,0 +1,4 @@
name: no_params
label: simplest type of argument
input: "{{{argument}}}"
output: [ArgumentOpen(), Text(text="argument"), ArgumentClose()]

+ 0
- 0
View File


+ 0
- 0
View File


+ 0
- 0
View File


+ 0
- 0
View File


+ 0
- 0
View File


Loading…
Cancel
Save