A Python parser for MediaWiki wikicode https://mwparserfromhell.readthedocs.io/
Nevar pievienot vairāk kā 25 tēmas Tēmai ir jāsākas ar burtu vai ciparu, tā var saturēt domu zīmes ('-') un var būt līdz 35 simboliem gara.

test_tokens.py 3.2 KiB

pirms 11 gadiem
pirms 11 gadiem
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293
  1. # Copyright (C) 2012-2020 Ben Kurtovic <ben.kurtovic@gmail.com>
  2. #
  3. # Permission is hereby granted, free of charge, to any person obtaining a copy
  4. # of this software and associated documentation files (the "Software"), to deal
  5. # in the Software without restriction, including without limitation the rights
  6. # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  7. # copies of the Software, and to permit persons to whom the Software is
  8. # furnished to do so, subject to the following conditions:
  9. #
  10. # The above copyright notice and this permission notice shall be included in
  11. # all copies or substantial portions of the Software.
  12. #
  13. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  14. # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  15. # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  16. # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  17. # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  18. # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
  19. # SOFTWARE.
  20. """
  21. Test cases for the Token class and its subclasses.
  22. """
  23. import pytest
  24. from mwparserfromhell.parser import tokens
  25. @pytest.mark.parametrize("name", tokens.__all__)
  26. def test_issubclass(name):
  27. """check that all classes within the tokens module are really Tokens"""
  28. klass = getattr(tokens, name)
  29. assert issubclass(klass, tokens.Token) is True
  30. assert isinstance(klass(), klass)
  31. assert isinstance(klass(), tokens.Token)
  32. def test_attributes():
  33. """check that Token attributes can be managed properly"""
  34. token1 = tokens.Token()
  35. token2 = tokens.Token(foo="bar", baz=123)
  36. assert "bar" == token2.foo
  37. assert 123 == token2.baz
  38. assert token1.foo is None
  39. assert token2.bar is None
  40. token1.spam = "eggs"
  41. token2.foo = "ham"
  42. del token2.baz
  43. assert "eggs" == token1.spam
  44. assert "ham" == token2.foo
  45. assert token2.baz is None
  46. with pytest.raises(KeyError):
  47. token2.__delattr__("baz")
  48. def test_repr():
  49. """check that repr() on a Token works as expected"""
  50. token1 = tokens.Token()
  51. token2 = tokens.Token(foo="bar", baz=123)
  52. token3 = tokens.Text(text="earwig" * 100)
  53. hundredchars = ("earwig" * 100)[:97] + "..."
  54. assert "Token()" == repr(token1)
  55. assert repr(token2) in ("Token(foo='bar', baz=123)", "Token(baz=123, foo='bar')")
  56. assert "Text(text='" + hundredchars + "')" == repr(token3)
  57. def test_equality():
  58. """check that equivalent tokens are considered equal"""
  59. token1 = tokens.Token()
  60. token2 = tokens.Token()
  61. token3 = tokens.Token(foo="bar", baz=123)
  62. token4 = tokens.Text(text="asdf")
  63. token5 = tokens.Text(text="asdf")
  64. token6 = tokens.TemplateOpen(text="asdf")
  65. assert token1 == token2
  66. assert token2 == token1
  67. assert token4 == token5
  68. assert token5 == token4
  69. assert token1 != token3
  70. assert token2 != token3
  71. assert token4 != token6
  72. assert token5 != token6
  73. @pytest.mark.parametrize("token", [
  74. tokens.Token(),
  75. tokens.Token(foo="bar", baz=123),
  76. tokens.Text(text="earwig")
  77. ])
  78. def test_repr_equality(token):
  79. """check that eval(repr(token)) == token"""
  80. assert token == eval(repr(token), vars(tokens))