@@ -8,13 +8,14 @@ jobs: | |||
steps: | |||
- uses: actions/checkout@v2 | |||
- name: Build manylinux1 Python wheels | |||
uses: RalfG/python-wheels-manylinux-build@0c24cb31441c7a1e6ea90d6a6408d406b2fee279 | |||
uses: RalfG/python-wheels-manylinux-build@e645ea95dae94f606ab25f95f44d3a2caf55764c | |||
with: | |||
python-versions: 'cp35-cp35m cp36-cp36m cp37-cp37m cp38-cp38 cp39-cp39' | |||
pip-wheel-args: '-w ./wheelhouse --no-deps' | |||
- name: Move to dist/ | |||
run: | | |||
mkdir -p dist | |||
cp -v wheelhouse/*-manylinux1_x86_64.whl dist/ | |||
cp -v wheelhouse/*-manylinux*.whl dist/ | |||
- name: Publish package to PyPI | |||
# Only actually publish if a new tag was pushed | |||
if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags') | |||
@@ -5,6 +5,7 @@ | |||
*.egg | |||
*.egg-info | |||
.coverage | |||
.eggs | |||
.DS_Store | |||
__pycache__ | |||
build | |||
@@ -10,10 +10,10 @@ arch: | |||
- amd64 | |||
- ppc64le | |||
install: | |||
- pip install coveralls | |||
- pip install coveralls pytest | |||
- python setup.py develop | |||
script: | |||
- coverage run --source=mwparserfromhell -m unittest discover | |||
- coverage run --source=mwparserfromhell -m pytest | |||
after_success: | |||
- coveralls | |||
env: | |||
@@ -1,3 +1,10 @@ | |||
v0.6.1 (released May 16, 2021): | |||
- Improved parsing of external links. (#232) | |||
- Fixed parsing of nested wikilinks. | |||
- Ported tests to pytest. (#237) | |||
- Moved mwparserfromhell package to src/ dir. | |||
v0.6 (released December 21, 2020): | |||
Thanks to everyone for their patience with this release! | |||
@@ -7,7 +14,7 @@ Thanks to everyone for their patience with this release! | |||
- Added binary wheels for Linux and macOS. | |||
- Updated Wikicode.matches() to recognize underscores as being equivalent | |||
to spaces. (#216) | |||
- Added a 'default' parameter to Template.get, and implement dict-style item | |||
- Added a 'default' parameter to Template.get(), and implement dict-style item | |||
access for template parameters. (#252) | |||
- Fixed a rare parsing bug involving deeply nested style tags. (#224) | |||
- Fixed parsing of section headings inside templates. (#233) | |||
@@ -1,3 +1,3 @@ | |||
include LICENSE CHANGELOG | |||
recursive-include mwparserfromhell *.h | |||
recursive-include src *.h | |||
recursive-include tests *.py *.mwtest |
@@ -29,8 +29,8 @@ Alternatively, get the latest development version:: | |||
cd mwparserfromhell | |||
python setup.py install | |||
You can run the comprehensive unit testing suite with | |||
``python -m unittest discover``. | |||
The comprehensive unit testing suite requires `pytest`_ (``pip install pytest``) | |||
and can be run with ``python -m pytest``. | |||
Usage | |||
----- | |||
@@ -210,6 +210,7 @@ Python 3 code (using the API_ and the requests_ library): | |||
.. _GitHub: https://github.com/earwig/mwparserfromhell | |||
.. _Python Package Index: https://pypi.org/ | |||
.. _get pip: https://pypi.org/project/pip/ | |||
.. _pytest: https://docs.pytest.org/ | |||
.. _Word-ending links: https://www.mediawiki.org/wiki/Help:Links#linktrail | |||
.. _EarwigBot: https://github.com/earwig/earwigbot | |||
.. _Pywikibot: https://www.mediawiki.org/wiki/Manual:Pywikibot | |||
@@ -1,6 +1,6 @@ | |||
# This config file is used by appveyor.com to build Windows release binaries | |||
version: 0.6-b{build} | |||
version: 0.6.1-b{build} | |||
branches: | |||
only: | |||
@@ -58,14 +58,14 @@ environment: | |||
install: | |||
- "%PIP% install --disable-pip-version-check --user --upgrade pip" | |||
- "%PIP% install wheel twine" | |||
- "%PIP% install wheel twine pytest" | |||
build_script: | |||
- "%SETUPPY% build" | |||
- "%SETUPPY% develop --user" | |||
test_script: | |||
- "%PYEXE% -m unittest discover" | |||
- "%PYEXE% -m pytest" | |||
after_test: | |||
- "%SETUPPY% bdist_wheel" | |||
@@ -1,6 +1,19 @@ | |||
Changelog | |||
========= | |||
v0.6.1 | |||
------ | |||
`Released May 16, 2021 <https://github.com/earwig/mwparserfromhell/tree/v0.6.1>`_ | |||
(`changes <https://github.com/earwig/mwparserfromhell/compare/v0.6...v0.6.1>`__): | |||
- Improved parsing of external links. | |||
(`#232 <https://github.com/earwig/mwparserfromhell/issues/232>`_) | |||
- Fixed parsing of nested wikilinks. | |||
- Ported tests to pytest. | |||
(`#237 <https://github.com/earwig/mwparserfromhell/issues/237>`_) | |||
- Moved mwparserfromhell package to src/ dir. | |||
v0.6 | |||
---- | |||
@@ -42,7 +42,7 @@ master_doc = 'index' | |||
# General information about the project. | |||
project = u'mwparserfromhell' | |||
copyright = u'2012–2020 Ben Kurtovic' | |||
copyright = u'2012–2021 Ben Kurtovic' | |||
# The version info for the project you're documenting, acts as replacement for | |||
# |version| and |release|, also used in various other places throughout the | |||
@@ -27,11 +27,12 @@ Alternatively, get the latest development version:: | |||
cd mwparserfromhell | |||
python setup.py install | |||
You can run the comprehensive unit testing suite with | |||
``python -m unittest discover``. | |||
The comprehensive unit testing suite requires `pytest`_ (``pip install pytest``) | |||
and can be run with ``python -m pytest``. | |||
.. _Python Package Index: https://pypi.org/ | |||
.. _get pip: https://pypi.org/project/pip/ | |||
.. _pytest: https://docs.pytest.org/ | |||
Contents | |||
-------- | |||
@@ -101,7 +101,8 @@ test_release() { | |||
source bin/activate | |||
echo " done." | |||
echo -n "Installing mwparserfromhell with pip..." | |||
pip -q install mwparserfromhell | |||
pip -q install --upgrade pip | |||
pip -q install mwparserfromhell pytest | |||
echo " done." | |||
echo -n "Checking version..." | |||
reported_version=$(python -c 'print(__import__("mwparserfromhell").__version__)') | |||
@@ -134,7 +135,7 @@ test_release() { | |||
cd mwparserfromhell-$VERSION | |||
echo "Running unit tests..." | |||
python setup.py -q install | |||
python -m unittest discover | |||
python -m pytest | |||
if [[ "$?" != "0" ]]; then | |||
echo "*** ERROR: Unit tests failed!" | |||
deactivate | |||
@@ -0,0 +1,2 @@ | |||
[aliases] | |||
test=pytest |
@@ -20,14 +20,15 @@ | |||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||
# SOFTWARE. | |||
from distutils.errors import DistutilsError, CCompilerError | |||
from glob import glob | |||
from os import environ | |||
import os | |||
import sys | |||
from setuptools import setup, find_packages, Extension | |||
from setuptools import find_packages, setup, Extension | |||
from setuptools.command.build_ext import build_ext | |||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "src")) | |||
from mwparserfromhell import __version__ | |||
with open("README.rst") as fp: | |||
@@ -38,7 +39,7 @@ fallback = True | |||
# Allow env var WITHOUT_EXTENSION and args --with[out]-extension: | |||
env_var = environ.get("WITHOUT_EXTENSION") | |||
env_var = os.environ.get("WITHOUT_EXTENSION") | |||
if "--without-extension" in sys.argv: | |||
use_extension = False | |||
elif "--with-extension" in sys.argv: | |||
@@ -52,12 +53,12 @@ elif env_var is not None: | |||
# Remove the command line argument as it isn't understood by setuptools: | |||
sys.argv = [arg for arg in sys.argv | |||
if arg != "--without-extension" and arg != "--with-extension"] | |||
if arg not in ("--without-extension", "--with-extension")] | |||
def build_ext_patched(self): | |||
try: | |||
build_ext_original(self) | |||
except (DistutilsError, CCompilerError) as exc: | |||
except Exception as exc: | |||
print("error: " + str(exc)) | |||
print("Falling back to pure Python mode.") | |||
del self.extensions[:] | |||
@@ -68,14 +69,16 @@ if fallback: | |||
# Project-specific part begins here: | |||
tokenizer = Extension("mwparserfromhell.parser._tokenizer", | |||
sources=sorted(glob("mwparserfromhell/parser/ctokenizer/*.c")), | |||
depends=sorted(glob("mwparserfromhell/parser/ctokenizer/*.h"))) | |||
sources=sorted(glob("src/mwparserfromhell/parser/ctokenizer/*.c")), | |||
depends=sorted(glob("src/mwparserfromhell/parser/ctokenizer/*.h"))) | |||
setup( | |||
name = "mwparserfromhell", | |||
packages = find_packages(exclude=("tests",)), | |||
packages = find_packages("src"), | |||
package_dir = {"": "src"}, | |||
ext_modules = [tokenizer] if use_extension else [], | |||
test_suite = "tests", | |||
setup_requires = ["pytest-runner"] if "test" in sys.argv or "pytest" in sys.argv else [], | |||
tests_require = ["pytest"], | |||
version = __version__, | |||
python_requires = ">= 3.5", | |||
author = "Ben Kurtovic", | |||
@@ -1,4 +1,4 @@ | |||
# Copyright (C) 2012-2020 Ben Kurtovic <ben.kurtovic@gmail.com> | |||
# Copyright (C) 2012-2021 Ben Kurtovic <ben.kurtovic@gmail.com> | |||
# | |||
# Permission is hereby granted, free of charge, to any person obtaining a copy | |||
# of this software and associated documentation files (the "Software"), to deal | |||
@@ -25,9 +25,9 @@ outrageously powerful parser for `MediaWiki <https://www.mediawiki.org>`_ wikico | |||
""" | |||
__author__ = "Ben Kurtovic" | |||
__copyright__ = "Copyright (C) 2012-2020 Ben Kurtovic" | |||
__copyright__ = "Copyright (C) 2012-2021 Ben Kurtovic" | |||
__license__ = "MIT License" | |||
__version__ = "0.6" | |||
__version__ = "0.7.dev0" | |||
__email__ = "ben.kurtovic@gmail.com" | |||
from . import (definitions, nodes, parser, smart_list, string_mixin, |
@@ -27,15 +27,18 @@ __all__ = ["ExternalLink"] | |||
class ExternalLink(Node): | |||
"""Represents an external link, like ``[http://example.com/ Example]``.""" | |||
def __init__(self, url, title=None, brackets=True): | |||
def __init__(self, url, title=None, brackets=True, suppress_space=False): | |||
super().__init__() | |||
self.url = url | |||
self.title = title | |||
self.brackets = brackets | |||
self.suppress_space = suppress_space | |||
def __str__(self): | |||
if self.brackets: | |||
if self.title is not None: | |||
if self.suppress_space is True: | |||
return "[" + str(self.url) + str(self.title) + "]" | |||
return "[" + str(self.url) + " " + str(self.title) + "]" | |||
return "[" + str(self.url) + "]" | |||
return str(self.url) |
@@ -157,17 +157,20 @@ class Builder: | |||
@_add_handler(tokens.ExternalLinkOpen) | |||
def _handle_external_link(self, token): | |||
"""Handle when an external link is at the head of the tokens.""" | |||
brackets, url = token.brackets, None | |||
brackets, url, suppress_space = token.brackets, None, None | |||
self._push() | |||
while self._tokens: | |||
token = self._tokens.pop() | |||
if isinstance(token, tokens.ExternalLinkSeparator): | |||
url = self._pop() | |||
suppress_space = token.suppress_space | |||
self._push() | |||
elif isinstance(token, tokens.ExternalLinkClose): | |||
if url is not None: | |||
return ExternalLink(url, self._pop(), brackets) | |||
return ExternalLink(self._pop(), brackets=brackets) | |||
return ExternalLink(url, self._pop(), brackets=brackets, | |||
suppress_space=suppress_space is True) | |||
return ExternalLink(self._pop(), brackets=brackets, | |||
suppress_space=suppress_space is True) | |||
else: | |||
self._write(self._handle_token(token)) | |||
raise ParserError("_handle_external_link() missed a close token") |
@@ -30,7 +30,7 @@ SOFTWARE. | |||
#define DIGITS "0123456789" | |||
#define HEXDIGITS "0123456789abcdefABCDEF" | |||
#define ALPHANUM "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" | |||
#define URISCHEME "abcdefghijklmnopqrstuvwxyz0123456789+.-" | |||
#define URISCHEME "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+.-" | |||
#define MAX_BRACES 255 | |||
#define MAX_ENTITY_SIZE 8 | |||
@@ -100,6 +100,66 @@ static PyObject* strip_tag_name(PyObject* token, int take_attr) | |||
} | |||
/* | |||
Check if the given character is a non-word character. | |||
Equivalent to this Python code: | |||
def is_non_word_character(ch): | |||
if re.fullmatch(r"\W", chunk): | |||
return True | |||
return False | |||
*/ | |||
static int is_non_word_character(Py_UCS4 ch) | |||
{ | |||
int ret = 0; | |||
PyObject* modname = NULL; | |||
PyObject* module = NULL; | |||
PyObject* fmatch = NULL; | |||
PyObject* pattern = NULL; | |||
PyObject* str = NULL; | |||
PyObject* posArgs = NULL; | |||
PyObject* match = NULL; | |||
modname = PyUnicode_FromString("re"); | |||
if (modname == NULL) | |||
goto error; | |||
module = PyImport_Import(modname); | |||
if (module == NULL) | |||
goto error; | |||
fmatch = PyObject_GetAttrString(module, "fullmatch"); | |||
if (fmatch == NULL) | |||
goto error; | |||
pattern = PyUnicode_FromString("\\W"); | |||
if (pattern == NULL) | |||
goto error; | |||
str = PyUnicode_FROM_SINGLE(ch); | |||
if (str == NULL) | |||
goto error; | |||
posArgs = PyTuple_Pack(2, pattern, str); | |||
if (posArgs == NULL) | |||
goto error; | |||
match = PyObject_Call(fmatch, posArgs, NULL); | |||
if (match == NULL) | |||
goto error; | |||
if (match != Py_None) | |||
ret = 1; | |||
goto end; | |||
error: | |||
ret = -1; | |||
end: | |||
Py_XDECREF(match); | |||
Py_XDECREF(posArgs); | |||
Py_XDECREF(str); | |||
Py_XDECREF(pattern); | |||
Py_XDECREF(fmatch); | |||
Py_XDECREF(module); | |||
Py_XDECREF(modname); | |||
return ret; | |||
} | |||
/* | |||
Parse a template at the head of the wikicode string. | |||
*/ | |||
static int Tokenizer_parse_template(Tokenizer* self, int has_content) | |||
@@ -527,7 +587,13 @@ static int Tokenizer_parse_free_uri_scheme(Tokenizer* self) | |||
// it was just parsed as text: | |||
for (i = self->topstack->textbuffer->length - 1; i >= 0; i--) { | |||
chunk = Textbuffer_read(self->topstack->textbuffer, i); | |||
if (Py_UNICODE_ISSPACE(chunk) || is_marker(chunk)) | |||
// stop at the first non-word character | |||
int is_non_word = is_non_word_character(chunk); | |||
if (is_non_word < 0) { | |||
Textbuffer_dealloc(scheme_buffer); | |||
return -1; | |||
} | |||
else if (is_non_word == 1) | |||
goto end_of_loop; | |||
j = 0; | |||
do { | |||
@@ -607,14 +673,15 @@ static int Tokenizer_handle_free_link_text( | |||
Return whether the current head is the end of a free link. | |||
*/ | |||
static int | |||
Tokenizer_is_free_link(Tokenizer* self, Py_UCS4 this, Py_UCS4 next) | |||
Tokenizer_is_free_link_end(Tokenizer* self, Py_UCS4 this, Py_UCS4 next) | |||
{ | |||
// Built from Tokenizer_parse()'s end sentinels: | |||
Py_UCS4 after = Tokenizer_read(self, 2); | |||
uint64_t ctx = self->topstack->context; | |||
return (!this || this == '\n' || this == '[' || this == ']' || | |||
this == '<' || this == '>' || (this == '\'' && next == '\'') || | |||
this == '<' || this == '>' || this == '"' || | |||
(this == '\'' && next == '\'') || | |||
(this == '|' && ctx & LC_TEMPLATE) || | |||
(this == '=' && ctx & (LC_TEMPLATE_PARAM_KEY | LC_HEADING)) || | |||
(this == '}' && next == '}' && | |||
@@ -656,7 +723,7 @@ Tokenizer_really_parse_external_link(Tokenizer* self, int brackets, | |||
if (Tokenizer_parse_comment(self)) | |||
return NULL; | |||
} | |||
else if (!brackets && Tokenizer_is_free_link(self, this, next)) { | |||
else if (!brackets && Tokenizer_is_free_link_end(self, this, next)) { | |||
self->head--; | |||
return Tokenizer_pop(self); | |||
} | |||
@@ -669,16 +736,28 @@ Tokenizer_really_parse_external_link(Tokenizer* self, int brackets, | |||
} | |||
else if (this == ']') | |||
return Tokenizer_pop(self); | |||
else if (this == ' ') { | |||
else if (this == ' ' || Tokenizer_is_free_link_end(self, this, next)) { | |||
if (brackets) { | |||
if (Tokenizer_emit(self, ExternalLinkSeparator)) | |||
return NULL; | |||
if (this == ' ') { | |||
if (Tokenizer_emit(self, ExternalLinkSeparator)) | |||
return NULL; | |||
} | |||
else { | |||
PyObject* kwargs = PyDict_New(); | |||
if (!kwargs) | |||
return NULL; | |||
if (this != ' ') | |||
PyDict_SetItemString(kwargs, "suppress_space", Py_True); | |||
if (Tokenizer_emit_kwargs(self, ExternalLinkSeparator, kwargs)) | |||
return NULL; | |||
} | |||
self->topstack->context ^= LC_EXT_LINK_URI; | |||
self->topstack->context |= LC_EXT_LINK_TITLE; | |||
self->head++; | |||
if (this == ' ') | |||
self->head++; | |||
return Tokenizer_parse(self, 0, 0); | |||
} | |||
if (Textbuffer_write(extra, ' ')) | |||
if (Textbuffer_write(extra, this)) | |||
return NULL; | |||
return Tokenizer_pop(self); | |||
} | |||
@@ -2649,6 +2728,9 @@ PyObject* Tokenizer_parse(Tokenizer* self, uint64_t context, int push) | |||
return NULL; | |||
} | |||
else if (this == next && next == '[' && Tokenizer_CAN_RECURSE(self)) { | |||
if (this_context & LC_WIKILINK_TEXT) { | |||
return Tokenizer_fail_route(self); | |||
} | |||
if (!(this_context & AGG_NO_WIKILINKS)) { | |||
if (Tokenizer_parse_wikilink(self)) | |||
return NULL; |
@@ -366,7 +366,7 @@ class Tokenizer: | |||
self._emit_text("//") | |||
self._head += 2 | |||
else: | |||
valid = "abcdefghijklmnopqrstuvwxyz0123456789+.-" | |||
valid = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+.-" | |||
all_valid = lambda: all(char in valid for char in self._read()) | |||
scheme = "" | |||
while self._read() is not self.END and all_valid(): | |||
@@ -386,14 +386,15 @@ class Tokenizer: | |||
def _parse_free_uri_scheme(self): | |||
"""Parse the URI scheme of a free (no brackets) external link.""" | |||
valid = "abcdefghijklmnopqrstuvwxyz0123456789+.-" | |||
valid = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+.-" | |||
scheme = [] | |||
try: | |||
# We have to backtrack through the textbuffer looking for our | |||
# scheme since it was just parsed as text: | |||
for chunk in reversed(self._textbuffer): | |||
for char in reversed(chunk): | |||
if char.isspace() or char in self.MARKERS: | |||
# stop at the first non-word character | |||
if re.fullmatch(r"\W", char): | |||
raise StopIteration() | |||
if char not in valid: | |||
raise BadRoute() | |||
@@ -438,7 +439,7 @@ class Tokenizer: | |||
# Built from _parse()'s end sentinels: | |||
after, ctx = self._read(2), self._context | |||
equal_sign_contexts = contexts.TEMPLATE_PARAM_KEY | contexts.HEADING | |||
return (this in (self.END, "\n", "[", "]", "<", ">") or | |||
return (this in (self.END, "\n", "[", "]", "<", ">", "\"") or | |||
this == nxt == "'" or | |||
(this == "|" and ctx & contexts.TEMPLATE) or | |||
(this == "=" and ctx & equal_sign_contexts) or | |||
@@ -481,16 +482,29 @@ class Tokenizer: | |||
self._parse_template_or_argument() | |||
elif this == "]": | |||
return self._pop(), tail, 0 | |||
elif " " in this: | |||
before, after = this.split(" ", 1) | |||
elif this == "'" and nxt == "'": | |||
separator = tokens.ExternalLinkSeparator() | |||
separator.suppress_space = True | |||
self._emit(separator) | |||
self._context ^= contexts.EXT_LINK_URI | |||
self._context |= contexts.EXT_LINK_TITLE | |||
return self._parse(push=False), None, 0 | |||
elif any(ch in this for ch in (" ", "\n", "[", "]", "<", ">", | |||
"\"")): | |||
before, after = re.split(r"[ \n[\]<>\"]", this, maxsplit=1) | |||
delimiter = this[len(before)] | |||
if brackets: | |||
self._emit_text(before) | |||
self._emit(tokens.ExternalLinkSeparator()) | |||
separator = tokens.ExternalLinkSeparator() | |||
if delimiter != " ": | |||
separator.suppress_space = True | |||
self._emit(separator) | |||
if after: | |||
self._emit_text(after) | |||
self._context ^= contexts.EXT_LINK_URI | |||
self._context |= contexts.EXT_LINK_TITLE | |||
self._head += 1 | |||
if delimiter == " ": | |||
self._head += 1 | |||
return self._parse(push=False), None, 0 | |||
punct, tail = self._handle_free_link_text(punct, tail, before) | |||
return self._pop(), tail + " " + after, 0 | |||
@@ -1339,6 +1353,8 @@ class Tokenizer: | |||
return self._handle_argument_end() | |||
self._emit_text("}") | |||
elif this == nxt == "[" and self._can_recurse(): | |||
if self._context & contexts.WIKILINK_TEXT: | |||
self._fail_route() | |||
if not self._context & contexts.NO_WIKILINKS: | |||
self._parse_wikilink() | |||
else: |
@@ -1,146 +0,0 @@ | |||
# Copyright (C) 2012-2020 Ben Kurtovic <ben.kurtovic@gmail.com> | |||
# | |||
# Permission is hereby granted, free of charge, to any person obtaining a copy | |||
# of this software and associated documentation files (the "Software"), to deal | |||
# in the Software without restriction, including without limitation the rights | |||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | |||
# copies of the Software, and to permit persons to whom the Software is | |||
# furnished to do so, subject to the following conditions: | |||
# | |||
# The above copyright notice and this permission notice shall be included in | |||
# all copies or substantial portions of the Software. | |||
# | |||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | |||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | |||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | |||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||
# SOFTWARE. | |||
import codecs | |||
from os import listdir, path | |||
import sys | |||
import warnings | |||
from mwparserfromhell.parser import tokens | |||
from mwparserfromhell.parser.builder import Builder | |||
class _TestParseError(Exception): | |||
"""Raised internally when a test could not be parsed.""" | |||
class TokenizerTestCase: | |||
"""A base test case for tokenizers, whose tests are loaded dynamically. | |||
Subclassed along with unittest.TestCase to form TestPyTokenizer and | |||
TestCTokenizer. Tests are loaded dynamically from files in the 'tokenizer' | |||
directory. | |||
""" | |||
@staticmethod | |||
def _build_test_method(data): | |||
"""Create and return a method to be treated as a test case method. | |||
*data* is a dict containing multiple keys: the *input* text to be | |||
tokenized, the expected list of tokens as *output*, and an optional | |||
*label* for the method's docstring. | |||
""" | |||
def inner(self): | |||
if hasattr(self, "roundtrip"): | |||
expected = data["input"] | |||
actual = str(Builder().build(data["output"][:])) | |||
else: | |||
expected = data["output"] | |||
actual = self.tokenizer().tokenize(data["input"]) | |||
self.assertEqual(expected, actual) | |||
inner.__doc__ = data["label"] | |||
return inner | |||
@staticmethod | |||
def _parse_test(test, data): | |||
"""Parse an individual *test*, storing its info in *data*.""" | |||
for line in test.strip().splitlines(): | |||
if line.startswith("name:"): | |||
data["name"] = line[len("name:"):].strip() | |||
elif line.startswith("label:"): | |||
data["label"] = line[len("label:"):].strip() | |||
elif line.startswith("input:"): | |||
raw = line[len("input:"):].strip() | |||
if raw[0] == '"' and raw[-1] == '"': | |||
raw = raw[1:-1] | |||
raw = raw.encode("raw_unicode_escape") | |||
data["input"] = raw.decode("unicode_escape") | |||
elif line.startswith("output:"): | |||
raw = line[len("output:"):].strip() | |||
try: | |||
data["output"] = eval(raw, vars(tokens)) | |||
except Exception as err: | |||
raise _TestParseError(err) from err | |||
@classmethod | |||
def _load_tests(cls, filename, name, text, restrict=None): | |||
"""Load all tests in *text* from the file *filename*.""" | |||
tests = text.split("\n---\n") | |||
counter = 1 | |||
digits = len(str(len(tests))) | |||
for test in tests: | |||
data = {"name": None, "label": None, "input": None, "output": None} | |||
try: | |||
cls._parse_test(test, data) | |||
except _TestParseError as err: | |||
if data["name"]: | |||
error = "Could not parse test '{0}' in '{1}':\n\t{2}" | |||
warnings.warn(error.format(data["name"], filename, err)) | |||
else: | |||
error = "Could not parse a test in '{0}':\n\t{1}" | |||
warnings.warn(error.format(filename, err)) | |||
continue | |||
if not data["name"]: | |||
error = "A test in '{0}' was ignored because it lacked a name" | |||
warnings.warn(error.format(filename)) | |||
continue | |||
if data["input"] is None or data["output"] is None: | |||
error = "Test '{}' in '{}' was ignored because it lacked an input or an output" | |||
warnings.warn(error.format(data["name"], filename)) | |||
continue | |||
number = str(counter).zfill(digits) | |||
counter += 1 | |||
if restrict and data["name"] != restrict: | |||
continue | |||
fname = "test_{}{}_{}".format(name, number, data["name"]) | |||
meth = cls._build_test_method(data) | |||
setattr(cls, fname, meth) | |||
@classmethod | |||
def build(cls): | |||
"""Load and install all tests from the 'tokenizer' directory.""" | |||
def load_file(filename, restrict=None): | |||
with codecs.open(filename, "r", encoding="utf8") as fp: | |||
text = fp.read() | |||
name = path.split(filename)[1][:-len(extension)] | |||
cls._load_tests(filename, name, text, restrict) | |||
directory = path.join(path.dirname(__file__), "tokenizer") | |||
extension = ".mwtest" | |||
if len(sys.argv) > 2 and sys.argv[1] == "--use": | |||
for name in sys.argv[2:]: | |||
if "." in name: | |||
name, test = name.split(".", 1) | |||
else: | |||
test = None | |||
load_file(path.join(directory, name + extension), test) | |||
sys.argv = [sys.argv[0]] # So unittest doesn't try to parse this | |||
cls.skip_others = True | |||
else: | |||
for filename in listdir(directory): | |||
if not filename.endswith(extension): | |||
continue | |||
load_file(path.join(directory, filename)) | |||
cls.skip_others = False | |||
TokenizerTestCase.build() |
@@ -1,133 +0,0 @@ | |||
# Copyright (C) 2012-2020 Ben Kurtovic <ben.kurtovic@gmail.com> | |||
# | |||
# Permission is hereby granted, free of charge, to any person obtaining a copy | |||
# of this software and associated documentation files (the "Software"), to deal | |||
# in the Software without restriction, including without limitation the rights | |||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | |||
# copies of the Software, and to permit persons to whom the Software is | |||
# furnished to do so, subject to the following conditions: | |||
# | |||
# The above copyright notice and this permission notice shall be included in | |||
# all copies or substantial portions of the Software. | |||
# | |||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | |||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | |||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | |||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||
# SOFTWARE. | |||
from unittest import TestCase | |||
from mwparserfromhell.nodes import (Argument, Comment, Heading, HTMLEntity, | |||
Tag, Template, Text, Wikilink) | |||
from mwparserfromhell.smart_list import SmartList | |||
from mwparserfromhell.wikicode import Wikicode | |||
wrap = lambda L: Wikicode(SmartList(L)) | |||
wraptext = lambda *args: wrap([Text(t) for t in args]) | |||
class TreeEqualityTestCase(TestCase): | |||
"""A base test case with support for comparing the equality of node trees. | |||
This adds a number of type equality functions, for Wikicode, Text, | |||
Templates, and Wikilinks. | |||
""" | |||
def assertNodeEqual(self, expected, actual): | |||
"""Assert that two Nodes have the same type and have the same data.""" | |||
registry = { | |||
Argument: self.assertArgumentNodeEqual, | |||
Comment: self.assertCommentNodeEqual, | |||
Heading: self.assertHeadingNodeEqual, | |||
HTMLEntity: self.assertHTMLEntityNodeEqual, | |||
Tag: self.assertTagNodeEqual, | |||
Template: self.assertTemplateNodeEqual, | |||
Text: self.assertTextNodeEqual, | |||
Wikilink: self.assertWikilinkNodeEqual | |||
} | |||
for nodetype in registry: | |||
if isinstance(expected, nodetype): | |||
self.assertIsInstance(actual, nodetype) | |||
registry[nodetype](expected, actual) | |||
def assertArgumentNodeEqual(self, expected, actual): | |||
"""Assert that two Argument nodes have the same data.""" | |||
self.assertWikicodeEqual(expected.name, actual.name) | |||
if expected.default is not None: | |||
self.assertWikicodeEqual(expected.default, actual.default) | |||
else: | |||
self.assertIs(None, actual.default) | |||
def assertCommentNodeEqual(self, expected, actual): | |||
"""Assert that two Comment nodes have the same data.""" | |||
self.assertEqual(expected.contents, actual.contents) | |||
def assertHeadingNodeEqual(self, expected, actual): | |||
"""Assert that two Heading nodes have the same data.""" | |||
self.assertWikicodeEqual(expected.title, actual.title) | |||
self.assertEqual(expected.level, actual.level) | |||
def assertHTMLEntityNodeEqual(self, expected, actual): | |||
"""Assert that two HTMLEntity nodes have the same data.""" | |||
self.assertEqual(expected.value, actual.value) | |||
self.assertIs(expected.named, actual.named) | |||
self.assertIs(expected.hexadecimal, actual.hexadecimal) | |||
self.assertEqual(expected.hex_char, actual.hex_char) | |||
def assertTagNodeEqual(self, expected, actual): | |||
"""Assert that two Tag nodes have the same data.""" | |||
self.assertWikicodeEqual(expected.tag, actual.tag) | |||
if expected.contents is not None: | |||
self.assertWikicodeEqual(expected.contents, actual.contents) | |||
length = len(expected.attributes) | |||
self.assertEqual(length, len(actual.attributes)) | |||
for i in range(length): | |||
exp_attr = expected.attributes[i] | |||
act_attr = actual.attributes[i] | |||
self.assertWikicodeEqual(exp_attr.name, act_attr.name) | |||
if exp_attr.value is not None: | |||
self.assertWikicodeEqual(exp_attr.value, act_attr.value) | |||
self.assertEqual(exp_attr.quotes, act_attr.quotes) | |||
self.assertEqual(exp_attr.pad_first, act_attr.pad_first) | |||
self.assertEqual(exp_attr.pad_before_eq, act_attr.pad_before_eq) | |||
self.assertEqual(exp_attr.pad_after_eq, act_attr.pad_after_eq) | |||
self.assertEqual(expected.wiki_markup, actual.wiki_markup) | |||
self.assertIs(expected.self_closing, actual.self_closing) | |||
self.assertIs(expected.invalid, actual.invalid) | |||
self.assertIs(expected.implicit, actual.implicit) | |||
self.assertEqual(expected.padding, actual.padding) | |||
self.assertWikicodeEqual(expected.closing_tag, actual.closing_tag) | |||
def assertTemplateNodeEqual(self, expected, actual): | |||
"""Assert that two Template nodes have the same data.""" | |||
self.assertWikicodeEqual(expected.name, actual.name) | |||
length = len(expected.params) | |||
self.assertEqual(length, len(actual.params)) | |||
for i in range(length): | |||
exp_param = expected.params[i] | |||
act_param = actual.params[i] | |||
self.assertWikicodeEqual(exp_param.name, act_param.name) | |||
self.assertWikicodeEqual(exp_param.value, act_param.value) | |||
self.assertIs(exp_param.showkey, act_param.showkey) | |||
def assertTextNodeEqual(self, expected, actual): | |||
"""Assert that two Text nodes have the same data.""" | |||
self.assertEqual(expected.value, actual.value) | |||
def assertWikilinkNodeEqual(self, expected, actual): | |||
"""Assert that two Wikilink nodes have the same data.""" | |||
self.assertWikicodeEqual(expected.title, actual.title) | |||
if expected.text is not None: | |||
self.assertWikicodeEqual(expected.text, actual.text) | |||
else: | |||
self.assertIs(None, actual.text) | |||
def assertWikicodeEqual(self, expected, actual): | |||
"""Assert that two Wikicode objects have the same data.""" | |||
self.assertIsInstance(actual, Wikicode) | |||
length = len(expected.nodes) | |||
self.assertEqual(length, len(actual.nodes)) | |||
for i in range(length): | |||
self.assertNodeEqual(expected.get(i), actual.get(i)) |
@@ -0,0 +1,138 @@ | |||
# Copyright (C) 2012-2021 Ben Kurtovic <ben.kurtovic@gmail.com> | |||
# | |||
# Permission is hereby granted, free of charge, to any person obtaining a copy | |||
# of this software and associated documentation files (the "Software"), to deal | |||
# in the Software without restriction, including without limitation the rights | |||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | |||
# copies of the Software, and to permit persons to whom the Software is | |||
# furnished to do so, subject to the following conditions: | |||
# | |||
# The above copyright notice and this permission notice shall be included in | |||
# all copies or substantial portions of the Software. | |||
# | |||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | |||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | |||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | |||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||
# SOFTWARE. | |||
from mwparserfromhell.nodes import (Argument, Comment, ExternalLink, Heading, | |||
HTMLEntity, Tag, Template, Text, Wikilink) | |||
from mwparserfromhell.smart_list import SmartList | |||
from mwparserfromhell.wikicode import Wikicode | |||
wrap = lambda L: Wikicode(SmartList(L)) | |||
wraptext = lambda *args: wrap([Text(t) for t in args]) | |||
def _assert_node_equal(expected, actual): | |||
"""Assert that two Nodes have the same type and have the same data.""" | |||
registry = { | |||
Argument: _assert_argument_node_equal, | |||
Comment: _assert_comment_node_equal, | |||
ExternalLink: _assert_external_link_node_equal, | |||
Heading: _assert_heading_node_equal, | |||
HTMLEntity: _assert_html_entity_node_equal, | |||
Tag: _assert_tag_node_equal, | |||
Template: _assert_template_node_equal, | |||
Text: _assert_text_node_equal, | |||
Wikilink: _assert_wikilink_node_equal, | |||
} | |||
# pylint: disable=unidiomatic-typecheck | |||
assert type(expected) == type(actual) | |||
registry[type(expected)](expected, actual) | |||
def _assert_argument_node_equal(expected, actual): | |||
"""Assert that two Argument nodes have the same data.""" | |||
assert_wikicode_equal(expected.name, actual.name) | |||
if expected.default is not None: | |||
assert_wikicode_equal(expected.default, actual.default) | |||
else: | |||
assert actual.default is None | |||
def _assert_comment_node_equal(expected, actual): | |||
"""Assert that two Comment nodes have the same data.""" | |||
assert expected.contents == actual.contents | |||
def _assert_external_link_node_equal(expected, actual): | |||
"""Assert that two ExternalLink nodes have the same data.""" | |||
assert_wikicode_equal(expected.url, actual.url) | |||
if expected.title is not None: | |||
assert_wikicode_equal(expected.title, actual.title) | |||
else: | |||
assert actual.title is None | |||
assert expected.brackets is actual.brackets | |||
assert expected.suppress_space is actual.suppress_space | |||
def _assert_heading_node_equal(expected, actual): | |||
"""Assert that two Heading nodes have the same data.""" | |||
assert_wikicode_equal(expected.title, actual.title) | |||
assert expected.level == actual.level | |||
def _assert_html_entity_node_equal(expected, actual): | |||
"""Assert that two HTMLEntity nodes have the same data.""" | |||
assert expected.value == actual.value | |||
assert expected.named is actual.named | |||
assert expected.hexadecimal is actual.hexadecimal | |||
assert expected.hex_char == actual.hex_char | |||
def _assert_tag_node_equal(expected, actual): | |||
"""Assert that two Tag nodes have the same data.""" | |||
assert_wikicode_equal(expected.tag, actual.tag) | |||
if expected.contents is not None: | |||
assert_wikicode_equal(expected.contents, actual.contents) | |||
else: | |||
assert actual.contents is None | |||
length = len(expected.attributes) | |||
assert length == len(actual.attributes) | |||
for i in range(length): | |||
exp_attr = expected.attributes[i] | |||
act_attr = actual.attributes[i] | |||
assert_wikicode_equal(exp_attr.name, act_attr.name) | |||
if exp_attr.value is not None: | |||
assert_wikicode_equal(exp_attr.value, act_attr.value) | |||
assert exp_attr.quotes == act_attr.quotes | |||
else: | |||
assert act_attr.value is None | |||
assert exp_attr.pad_first == act_attr.pad_first | |||
assert exp_attr.pad_before_eq == act_attr.pad_before_eq | |||
assert exp_attr.pad_after_eq == act_attr.pad_after_eq | |||
assert expected.wiki_markup == actual.wiki_markup | |||
assert expected.self_closing is actual.self_closing | |||
assert expected.invalid is actual.invalid | |||
assert expected.implicit is actual.implicit | |||
assert expected.padding == actual.padding | |||
assert_wikicode_equal(expected.closing_tag, actual.closing_tag) | |||
def _assert_template_node_equal(expected, actual): | |||
"""Assert that two Template nodes have the same data.""" | |||
assert_wikicode_equal(expected.name, actual.name) | |||
length = len(expected.params) | |||
assert length == len(actual.params) | |||
for i in range(length): | |||
exp_param = expected.params[i] | |||
act_param = actual.params[i] | |||
assert_wikicode_equal(exp_param.name, act_param.name) | |||
assert_wikicode_equal(exp_param.value, act_param.value) | |||
assert exp_param.showkey is act_param.showkey | |||
def _assert_text_node_equal(expected, actual): | |||
"""Assert that two Text nodes have the same data.""" | |||
assert expected.value == actual.value | |||
def _assert_wikilink_node_equal(expected, actual): | |||
"""Assert that two Wikilink nodes have the same data.""" | |||
assert_wikicode_equal(expected.title, actual.title) | |||
if expected.text is not None: | |||
assert_wikicode_equal(expected.text, actual.text) | |||
else: | |||
assert actual.text is None | |||
def assert_wikicode_equal(expected, actual): | |||
"""Assert that two Wikicode objects have the same data.""" | |||
assert isinstance(actual, Wikicode) | |||
length = len(expected.nodes) | |||
assert length == len(actual.nodes) | |||
for i in range(length): | |||
_assert_node_equal(expected.get(i), actual.get(i)) |
@@ -18,79 +18,78 @@ | |||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||
# SOFTWARE. | |||
import unittest | |||
""" | |||
Test cases for the Argument node. | |||
""" | |||
from mwparserfromhell.nodes import Argument, Text | |||
from ._test_tree_equality import TreeEqualityTestCase, wrap, wraptext | |||
import pytest | |||
class TestArgument(TreeEqualityTestCase): | |||
"""Test cases for the Argument node.""" | |||
def test_str(self): | |||
"""test Argument.__str__()""" | |||
node = Argument(wraptext("foobar")) | |||
self.assertEqual("{{{foobar}}}", str(node)) | |||
node2 = Argument(wraptext("foo"), wraptext("bar")) | |||
self.assertEqual("{{{foo|bar}}}", str(node2)) | |||
from mwparserfromhell.nodes import Argument, Text | |||
from .conftest import assert_wikicode_equal, wrap, wraptext | |||
def test_children(self): | |||
"""test Argument.__children__()""" | |||
node1 = Argument(wraptext("foobar")) | |||
node2 = Argument(wraptext("foo"), wrap([Text("bar"), Text("baz")])) | |||
gen1 = node1.__children__() | |||
gen2 = node2.__children__() | |||
self.assertIs(node1.name, next(gen1)) | |||
self.assertIs(node2.name, next(gen2)) | |||
self.assertIs(node2.default, next(gen2)) | |||
self.assertRaises(StopIteration, next, gen1) | |||
self.assertRaises(StopIteration, next, gen2) | |||
def test_str(): | |||
"""test Argument.__str__()""" | |||
node = Argument(wraptext("foobar")) | |||
assert "{{{foobar}}}" == str(node) | |||
node2 = Argument(wraptext("foo"), wraptext("bar")) | |||
assert "{{{foo|bar}}}" == str(node2) | |||
def test_strip(self): | |||
"""test Argument.__strip__()""" | |||
node1 = Argument(wraptext("foobar")) | |||
node2 = Argument(wraptext("foo"), wraptext("bar")) | |||
self.assertIs(None, node1.__strip__()) | |||
self.assertEqual("bar", node2.__strip__()) | |||
def test_children(): | |||
"""test Argument.__children__()""" | |||
node1 = Argument(wraptext("foobar")) | |||
node2 = Argument(wraptext("foo"), wrap([Text("bar"), Text("baz")])) | |||
gen1 = node1.__children__() | |||
gen2 = node2.__children__() | |||
assert node1.name is next(gen1) | |||
assert node2.name is next(gen2) | |||
assert node2.default is next(gen2) | |||
with pytest.raises(StopIteration): | |||
next(gen1) | |||
with pytest.raises(StopIteration): | |||
next(gen2) | |||
def test_showtree(self): | |||
"""test Argument.__showtree__()""" | |||
output = [] | |||
getter, marker = object(), object() | |||
get = lambda code: output.append((getter, code)) | |||
mark = lambda: output.append(marker) | |||
node1 = Argument(wraptext("foobar")) | |||
node2 = Argument(wraptext("foo"), wraptext("bar")) | |||
node1.__showtree__(output.append, get, mark) | |||
node2.__showtree__(output.append, get, mark) | |||
valid = [ | |||
"{{{", (getter, node1.name), "}}}", "{{{", (getter, node2.name), | |||
" | ", marker, (getter, node2.default), "}}}"] | |||
self.assertEqual(valid, output) | |||
def test_strip(): | |||
"""test Argument.__strip__()""" | |||
node1 = Argument(wraptext("foobar")) | |||
node2 = Argument(wraptext("foo"), wraptext("bar")) | |||
assert node1.__strip__() is None | |||
assert "bar" == node2.__strip__() | |||
def test_name(self): | |||
"""test getter/setter for the name attribute""" | |||
name = wraptext("foobar") | |||
node1 = Argument(name) | |||
node2 = Argument(name, wraptext("baz")) | |||
self.assertIs(name, node1.name) | |||
self.assertIs(name, node2.name) | |||
node1.name = "héhehé" | |||
node2.name = "héhehé" | |||
self.assertWikicodeEqual(wraptext("héhehé"), node1.name) | |||
self.assertWikicodeEqual(wraptext("héhehé"), node2.name) | |||
def test_showtree(): | |||
"""test Argument.__showtree__()""" | |||
output = [] | |||
getter, marker = object(), object() | |||
get = lambda code: output.append((getter, code)) | |||
mark = lambda: output.append(marker) | |||
node1 = Argument(wraptext("foobar")) | |||
node2 = Argument(wraptext("foo"), wraptext("bar")) | |||
node1.__showtree__(output.append, get, mark) | |||
node2.__showtree__(output.append, get, mark) | |||
valid = [ | |||
"{{{", (getter, node1.name), "}}}", "{{{", (getter, node2.name), | |||
" | ", marker, (getter, node2.default), "}}}"] | |||
assert valid == output | |||
def test_default(self): | |||
"""test getter/setter for the default attribute""" | |||
default = wraptext("baz") | |||
node1 = Argument(wraptext("foobar")) | |||
node2 = Argument(wraptext("foobar"), default) | |||
self.assertIs(None, node1.default) | |||
self.assertIs(default, node2.default) | |||
node1.default = "buzz" | |||
node2.default = None | |||
self.assertWikicodeEqual(wraptext("buzz"), node1.default) | |||
self.assertIs(None, node2.default) | |||
def test_name(): | |||
"""test getter/setter for the name attribute""" | |||
name = wraptext("foobar") | |||
node1 = Argument(name) | |||
node2 = Argument(name, wraptext("baz")) | |||
assert name is node1.name | |||
assert name is node2.name | |||
node1.name = "héhehé" | |||
node2.name = "héhehé" | |||
assert_wikicode_equal(wraptext("héhehé"), node1.name) | |||
assert_wikicode_equal(wraptext("héhehé"), node2.name) | |||
if __name__ == "__main__": | |||
unittest.main(verbosity=2) | |||
def test_default(): | |||
"""test getter/setter for the default attribute""" | |||
default = wraptext("baz") | |||
node1 = Argument(wraptext("foobar")) | |||
node2 = Argument(wraptext("foobar"), default) | |||
assert None is node1.default | |||
assert default is node2.default | |||
node1.default = "buzz" | |||
node2.default = None | |||
assert_wikicode_equal(wraptext("buzz"), node1.default) | |||
assert None is node2.default |
@@ -18,88 +18,88 @@ | |||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||
# SOFTWARE. | |||
import unittest | |||
""" | |||
Test cases for the Attribute node extra. | |||
""" | |||
import pytest | |||
from mwparserfromhell.nodes import Template | |||
from mwparserfromhell.nodes.extras import Attribute | |||
from .conftest import assert_wikicode_equal, wrap, wraptext | |||
from ._test_tree_equality import TreeEqualityTestCase, wrap, wraptext | |||
class TestAttribute(TreeEqualityTestCase): | |||
"""Test cases for the Attribute node extra.""" | |||
def test_str(self): | |||
"""test Attribute.__str__()""" | |||
node = Attribute(wraptext("foo")) | |||
self.assertEqual(" foo", str(node)) | |||
node2 = Attribute(wraptext("foo"), wraptext("bar")) | |||
self.assertEqual(' foo="bar"', str(node2)) | |||
node3 = Attribute(wraptext("a"), wraptext("b"), '"', "", " ", " ") | |||
self.assertEqual('a = "b"', str(node3)) | |||
node4 = Attribute(wraptext("a"), wraptext("b"), "'", "", " ", " ") | |||
self.assertEqual("a = 'b'", str(node4)) | |||
node5 = Attribute(wraptext("a"), wraptext("b"), None, "", " ", " ") | |||
self.assertEqual("a = b", str(node5)) | |||
node6 = Attribute(wraptext("a"), wrap([]), None, " ", "", " ") | |||
self.assertEqual(" a= ", str(node6)) | |||
def test_name(self): | |||
"""test getter/setter for the name attribute""" | |||
name = wraptext("id") | |||
node = Attribute(name, wraptext("bar")) | |||
self.assertIs(name, node.name) | |||
node.name = "{{id}}" | |||
self.assertWikicodeEqual(wrap([Template(wraptext("id"))]), node.name) | |||
def test_str(): | |||
"""test Attribute.__str__()""" | |||
node = Attribute(wraptext("foo")) | |||
assert " foo" == str(node) | |||
node2 = Attribute(wraptext("foo"), wraptext("bar")) | |||
assert ' foo="bar"' == str(node2) | |||
node3 = Attribute(wraptext("a"), wraptext("b"), '"', "", " ", " ") | |||
assert 'a = "b"' == str(node3) | |||
node4 = Attribute(wraptext("a"), wraptext("b"), "'", "", " ", " ") | |||
assert "a = 'b'" == str(node4) | |||
node5 = Attribute(wraptext("a"), wraptext("b"), None, "", " ", " ") | |||
assert "a = b" == str(node5) | |||
node6 = Attribute(wraptext("a"), wrap([]), None, " ", "", " ") | |||
assert " a= " == str(node6) | |||
def test_value(self): | |||
"""test getter/setter for the value attribute""" | |||
value = wraptext("foo") | |||
node = Attribute(wraptext("id"), value) | |||
self.assertIs(value, node.value) | |||
node.value = "{{bar}}" | |||
self.assertWikicodeEqual(wrap([Template(wraptext("bar"))]), node.value) | |||
node.value = None | |||
self.assertIs(None, node.value) | |||
node2 = Attribute(wraptext("id"), wraptext("foo"), None) | |||
node2.value = "foo bar baz" | |||
self.assertWikicodeEqual(wraptext("foo bar baz"), node2.value) | |||
self.assertEqual('"', node2.quotes) | |||
node2.value = 'foo "bar" baz' | |||
self.assertWikicodeEqual(wraptext('foo "bar" baz'), node2.value) | |||
self.assertEqual("'", node2.quotes) | |||
node2.value = "foo 'bar' baz" | |||
self.assertWikicodeEqual(wraptext("foo 'bar' baz"), node2.value) | |||
self.assertEqual('"', node2.quotes) | |||
node2.value = "fo\"o 'bar' b\"az" | |||
self.assertWikicodeEqual(wraptext("fo\"o 'bar' b\"az"), node2.value) | |||
self.assertEqual('"', node2.quotes) | |||
def test_name(): | |||
"""test getter/setter for the name attribute""" | |||
name = wraptext("id") | |||
node = Attribute(name, wraptext("bar")) | |||
assert name is node.name | |||
node.name = "{{id}}" | |||
assert_wikicode_equal(wrap([Template(wraptext("id"))]), node.name) | |||
def test_quotes(self): | |||
"""test getter/setter for the quotes attribute""" | |||
node1 = Attribute(wraptext("id"), wraptext("foo"), None) | |||
node2 = Attribute(wraptext("id"), wraptext("bar")) | |||
node3 = Attribute(wraptext("id"), wraptext("foo bar baz")) | |||
self.assertIs(None, node1.quotes) | |||
self.assertEqual('"', node2.quotes) | |||
node1.quotes = "'" | |||
node2.quotes = None | |||
self.assertEqual("'", node1.quotes) | |||
self.assertIs(None, node2.quotes) | |||
self.assertRaises(ValueError, setattr, node1, "quotes", "foobar") | |||
self.assertRaises(ValueError, setattr, node3, "quotes", None) | |||
self.assertRaises(ValueError, Attribute, wraptext("id"), | |||
wraptext("foo bar baz"), None) | |||
def test_value(): | |||
"""test getter/setter for the value attribute""" | |||
value = wraptext("foo") | |||
node = Attribute(wraptext("id"), value) | |||
assert value is node.value | |||
node.value = "{{bar}}" | |||
assert_wikicode_equal(wrap([Template(wraptext("bar"))]), node.value) | |||
node.value = None | |||
assert None is node.value | |||
node2 = Attribute(wraptext("id"), wraptext("foo"), None) | |||
node2.value = "foo bar baz" | |||
assert_wikicode_equal(wraptext("foo bar baz"), node2.value) | |||
assert '"' == node2.quotes | |||
node2.value = 'foo "bar" baz' | |||
assert_wikicode_equal(wraptext('foo "bar" baz'), node2.value) | |||
assert "'" == node2.quotes | |||
node2.value = "foo 'bar' baz" | |||
assert_wikicode_equal(wraptext("foo 'bar' baz"), node2.value) | |||
assert '"' == node2.quotes | |||
node2.value = "fo\"o 'bar' b\"az" | |||
assert_wikicode_equal(wraptext("fo\"o 'bar' b\"az"), node2.value) | |||
assert '"' == node2.quotes | |||
def test_padding(self): | |||
"""test getter/setter for the padding attributes""" | |||
for pad in ["pad_first", "pad_before_eq", "pad_after_eq"]: | |||
node = Attribute(wraptext("id"), wraptext("foo"), **{pad: "\n"}) | |||
self.assertEqual("\n", getattr(node, pad)) | |||
setattr(node, pad, " ") | |||
self.assertEqual(" ", getattr(node, pad)) | |||
setattr(node, pad, None) | |||
self.assertEqual("", getattr(node, pad)) | |||
self.assertRaises(ValueError, setattr, node, pad, True) | |||
def test_quotes(): | |||
"""test getter/setter for the quotes attribute""" | |||
node1 = Attribute(wraptext("id"), wraptext("foo"), None) | |||
node2 = Attribute(wraptext("id"), wraptext("bar")) | |||
node3 = Attribute(wraptext("id"), wraptext("foo bar baz")) | |||
assert None is node1.quotes | |||
assert '"' == node2.quotes | |||
node1.quotes = "'" | |||
node2.quotes = None | |||
assert "'" == node1.quotes | |||
assert None is node2.quotes | |||
with pytest.raises(ValueError): | |||
node1.__setattr__("quotes", "foobar") | |||
with pytest.raises(ValueError): | |||
node3.__setattr__("quotes", None) | |||
with pytest.raises(ValueError): | |||
Attribute(wraptext("id"), wraptext("foo bar baz"), None) | |||
if __name__ == "__main__": | |||
unittest.main(verbosity=2) | |||
def test_padding(): | |||
"""test getter/setter for the padding attributes""" | |||
for pad in ["pad_first", "pad_before_eq", "pad_after_eq"]: | |||
node = Attribute(wraptext("id"), wraptext("foo"), **{pad: "\n"}) | |||
assert "\n" == getattr(node, pad) | |||
setattr(node, pad, " ") | |||
assert " " == getattr(node, pad) | |||
setattr(node, pad, None) | |||
assert "" == getattr(node, pad) | |||
with pytest.raises(ValueError): | |||
node.__setattr__(pad, True) |
@@ -18,416 +18,409 @@ | |||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||
# SOFTWARE. | |||
import unittest | |||
""" | |||
Tests for the builder, which turns tokens into Wikicode objects. | |||
""" | |||
import pytest | |||
from mwparserfromhell.nodes import (Argument, Comment, ExternalLink, Heading, | |||
HTMLEntity, Tag, Template, Text, Wikilink) | |||
from mwparserfromhell.nodes.extras import Attribute, Parameter | |||
from mwparserfromhell.parser import tokens, ParserError | |||
from mwparserfromhell.parser.builder import Builder | |||
from ._test_tree_equality import TreeEqualityTestCase, wrap, wraptext | |||
class TestBuilder(TreeEqualityTestCase): | |||
"""Tests for the builder, which turns tokens into Wikicode objects.""" | |||
def setUp(self): | |||
self.builder = Builder() | |||
def test_text(self): | |||
"""tests for building Text nodes""" | |||
tests = [ | |||
([tokens.Text(text="foobar")], wraptext("foobar")), | |||
([tokens.Text(text="fóóbar")], wraptext("fóóbar")), | |||
([tokens.Text(text="spam"), tokens.Text(text="eggs")], | |||
wraptext("spam", "eggs")), | |||
] | |||
for test, valid in tests: | |||
self.assertWikicodeEqual(valid, self.builder.build(test)) | |||
def test_template(self): | |||
"""tests for building Template nodes""" | |||
tests = [ | |||
([tokens.TemplateOpen(), tokens.Text(text="foobar"), | |||
tokens.TemplateClose()], | |||
wrap([Template(wraptext("foobar"))])), | |||
([tokens.TemplateOpen(), tokens.Text(text="spam"), | |||
tokens.Text(text="eggs"), tokens.TemplateClose()], | |||
wrap([Template(wraptext("spam", "eggs"))])), | |||
([tokens.TemplateOpen(), tokens.Text(text="foo"), | |||
tokens.TemplateParamSeparator(), tokens.Text(text="bar"), | |||
tokens.TemplateClose()], | |||
wrap([Template(wraptext("foo"), params=[ | |||
Parameter(wraptext("1"), wraptext("bar"), showkey=False)])])), | |||
([tokens.TemplateOpen(), tokens.Text(text="foo"), | |||
tokens.TemplateParamSeparator(), tokens.Text(text="bar"), | |||
tokens.TemplateParamEquals(), tokens.Text(text="baz"), | |||
tokens.TemplateClose()], | |||
wrap([Template(wraptext("foo"), params=[ | |||
Parameter(wraptext("bar"), wraptext("baz"))])])), | |||
([tokens.TemplateOpen(), tokens.TemplateParamSeparator(), | |||
tokens.TemplateParamSeparator(), tokens.TemplateParamEquals(), | |||
tokens.TemplateParamSeparator(), tokens.TemplateClose()], | |||
wrap([Template(wrap([]), params=[ | |||
Parameter(wraptext("1"), wrap([]), showkey=False), | |||
Parameter(wrap([]), wrap([]), showkey=True), | |||
Parameter(wraptext("2"), wrap([]), showkey=False)])])), | |||
([tokens.TemplateOpen(), tokens.Text(text="foo"), | |||
tokens.TemplateParamSeparator(), tokens.Text(text="bar"), | |||
tokens.TemplateParamEquals(), tokens.Text(text="baz"), | |||
tokens.TemplateParamSeparator(), tokens.Text(text="biz"), | |||
tokens.TemplateParamSeparator(), tokens.Text(text="buzz"), | |||
tokens.TemplateParamSeparator(), tokens.Text(text="3"), | |||
tokens.TemplateParamEquals(), tokens.Text(text="buff"), | |||
tokens.TemplateParamSeparator(), tokens.Text(text="baff"), | |||
tokens.TemplateClose()], | |||
wrap([Template(wraptext("foo"), params=[ | |||
Parameter(wraptext("bar"), wraptext("baz")), | |||
Parameter(wraptext("1"), wraptext("biz"), showkey=False), | |||
Parameter(wraptext("2"), wraptext("buzz"), showkey=False), | |||
Parameter(wraptext("3"), wraptext("buff")), | |||
Parameter(wraptext("3"), wraptext("baff"), | |||
showkey=False)])])), | |||
] | |||
for test, valid in tests: | |||
self.assertWikicodeEqual(valid, self.builder.build(test)) | |||
def test_argument(self): | |||
"""tests for building Argument nodes""" | |||
tests = [ | |||
([tokens.ArgumentOpen(), tokens.Text(text="foobar"), | |||
tokens.ArgumentClose()], | |||
wrap([Argument(wraptext("foobar"))])), | |||
([tokens.ArgumentOpen(), tokens.Text(text="spam"), | |||
tokens.Text(text="eggs"), tokens.ArgumentClose()], | |||
wrap([Argument(wraptext("spam", "eggs"))])), | |||
([tokens.ArgumentOpen(), tokens.Text(text="foo"), | |||
tokens.ArgumentSeparator(), tokens.Text(text="bar"), | |||
tokens.ArgumentClose()], | |||
wrap([Argument(wraptext("foo"), wraptext("bar"))])), | |||
([tokens.ArgumentOpen(), tokens.Text(text="foo"), | |||
tokens.Text(text="bar"), tokens.ArgumentSeparator(), | |||
tokens.Text(text="baz"), tokens.Text(text="biz"), | |||
tokens.ArgumentClose()], | |||
wrap([Argument(wraptext("foo", "bar"), wraptext("baz", "biz"))])), | |||
] | |||
for test, valid in tests: | |||
self.assertWikicodeEqual(valid, self.builder.build(test)) | |||
def test_wikilink(self): | |||
"""tests for building Wikilink nodes""" | |||
tests = [ | |||
([tokens.WikilinkOpen(), tokens.Text(text="foobar"), | |||
tokens.WikilinkClose()], | |||
wrap([Wikilink(wraptext("foobar"))])), | |||
([tokens.WikilinkOpen(), tokens.Text(text="spam"), | |||
tokens.Text(text="eggs"), tokens.WikilinkClose()], | |||
wrap([Wikilink(wraptext("spam", "eggs"))])), | |||
([tokens.WikilinkOpen(), tokens.Text(text="foo"), | |||
tokens.WikilinkSeparator(), tokens.Text(text="bar"), | |||
tokens.WikilinkClose()], | |||
wrap([Wikilink(wraptext("foo"), wraptext("bar"))])), | |||
([tokens.WikilinkOpen(), tokens.Text(text="foo"), | |||
tokens.Text(text="bar"), tokens.WikilinkSeparator(), | |||
tokens.Text(text="baz"), tokens.Text(text="biz"), | |||
tokens.WikilinkClose()], | |||
wrap([Wikilink(wraptext("foo", "bar"), wraptext("baz", "biz"))])), | |||
] | |||
for test, valid in tests: | |||
self.assertWikicodeEqual(valid, self.builder.build(test)) | |||
def test_external_link(self): | |||
"""tests for building ExternalLink nodes""" | |||
tests = [ | |||
([tokens.ExternalLinkOpen(brackets=False), | |||
tokens.Text(text="http://example.com/"), | |||
tokens.ExternalLinkClose()], | |||
wrap([ExternalLink(wraptext("http://example.com/"), | |||
brackets=False)])), | |||
([tokens.ExternalLinkOpen(brackets=True), | |||
tokens.Text(text="http://example.com/"), | |||
tokens.ExternalLinkClose()], | |||
wrap([ExternalLink(wraptext("http://example.com/"))])), | |||
([tokens.ExternalLinkOpen(brackets=True), | |||
tokens.Text(text="http://example.com/"), | |||
tokens.ExternalLinkSeparator(), tokens.ExternalLinkClose()], | |||
wrap([ExternalLink(wraptext("http://example.com/"), wrap([]))])), | |||
([tokens.ExternalLinkOpen(brackets=True), | |||
tokens.Text(text="http://example.com/"), | |||
tokens.ExternalLinkSeparator(), tokens.Text(text="Example"), | |||
tokens.ExternalLinkClose()], | |||
wrap([ExternalLink(wraptext("http://example.com/"), | |||
wraptext("Example"))])), | |||
([tokens.ExternalLinkOpen(brackets=False), | |||
tokens.Text(text="http://example"), tokens.Text(text=".com/foo"), | |||
tokens.ExternalLinkClose()], | |||
wrap([ExternalLink(wraptext("http://example", ".com/foo"), | |||
brackets=False)])), | |||
([tokens.ExternalLinkOpen(brackets=True), | |||
tokens.Text(text="http://example"), tokens.Text(text=".com/foo"), | |||
tokens.ExternalLinkSeparator(), tokens.Text(text="Example"), | |||
tokens.Text(text=" Web Page"), tokens.ExternalLinkClose()], | |||
wrap([ExternalLink(wraptext("http://example", ".com/foo"), | |||
wraptext("Example", " Web Page"))])), | |||
] | |||
for test, valid in tests: | |||
self.assertWikicodeEqual(valid, self.builder.build(test)) | |||
def test_html_entity(self): | |||
"""tests for building HTMLEntity nodes""" | |||
tests = [ | |||
([tokens.HTMLEntityStart(), tokens.Text(text="nbsp"), | |||
tokens.HTMLEntityEnd()], | |||
wrap([HTMLEntity("nbsp", named=True, hexadecimal=False)])), | |||
([tokens.HTMLEntityStart(), tokens.HTMLEntityNumeric(), | |||
tokens.Text(text="107"), tokens.HTMLEntityEnd()], | |||
wrap([HTMLEntity("107", named=False, hexadecimal=False)])), | |||
([tokens.HTMLEntityStart(), tokens.HTMLEntityNumeric(), | |||
tokens.HTMLEntityHex(char="X"), tokens.Text(text="6B"), | |||
tokens.HTMLEntityEnd()], | |||
wrap([HTMLEntity("6B", named=False, hexadecimal=True, | |||
hex_char="X")])), | |||
] | |||
for test, valid in tests: | |||
self.assertWikicodeEqual(valid, self.builder.build(test)) | |||
def test_heading(self): | |||
"""tests for building Heading nodes""" | |||
tests = [ | |||
([tokens.HeadingStart(level=2), tokens.Text(text="foobar"), | |||
tokens.HeadingEnd()], | |||
wrap([Heading(wraptext("foobar"), 2)])), | |||
([tokens.HeadingStart(level=4), tokens.Text(text="spam"), | |||
tokens.Text(text="eggs"), tokens.HeadingEnd()], | |||
wrap([Heading(wraptext("spam", "eggs"), 4)])), | |||
] | |||
for test, valid in tests: | |||
self.assertWikicodeEqual(valid, self.builder.build(test)) | |||
def test_comment(self): | |||
"""tests for building Comment nodes""" | |||
tests = [ | |||
([tokens.CommentStart(), tokens.Text(text="foobar"), | |||
tokens.CommentEnd()], | |||
wrap([Comment("foobar")])), | |||
([tokens.CommentStart(), tokens.Text(text="spam"), | |||
tokens.Text(text="eggs"), tokens.CommentEnd()], | |||
wrap([Comment("spameggs")])), | |||
] | |||
for test, valid in tests: | |||
self.assertWikicodeEqual(valid, self.builder.build(test)) | |||
def test_tag(self): | |||
"""tests for building Tag nodes""" | |||
tests = [ | |||
# <ref></ref> | |||
([tokens.TagOpenOpen(), tokens.Text(text="ref"), | |||
tokens.TagCloseOpen(padding=""), tokens.TagOpenClose(), | |||
tokens.Text(text="ref"), tokens.TagCloseClose()], | |||
wrap([Tag(wraptext("ref"), wrap([]), | |||
closing_tag=wraptext("ref"))])), | |||
# <ref name></ref> | |||
([tokens.TagOpenOpen(), tokens.Text(text="ref"), | |||
tokens.TagAttrStart(pad_first=" ", pad_before_eq="", | |||
pad_after_eq=""), | |||
tokens.Text(text="name"), tokens.TagCloseOpen(padding=""), | |||
tokens.TagOpenClose(), tokens.Text(text="ref"), | |||
tokens.TagCloseClose()], | |||
wrap([Tag(wraptext("ref"), wrap([]), | |||
attrs=[Attribute(wraptext("name"))])])), | |||
# <ref name="abc" /> | |||
([tokens.TagOpenOpen(), tokens.Text(text="ref"), | |||
tokens.TagAttrStart(pad_first=" ", pad_before_eq="", | |||
pad_after_eq=""), | |||
tokens.Text(text="name"), tokens.TagAttrEquals(), | |||
tokens.TagAttrQuote(char='"'), tokens.Text(text="abc"), | |||
tokens.TagCloseSelfclose(padding=" ")], | |||
wrap([Tag(wraptext("ref"), | |||
attrs=[Attribute(wraptext("name"), wraptext("abc"))], | |||
self_closing=True, padding=" ")])), | |||
# <br/> | |||
([tokens.TagOpenOpen(), tokens.Text(text="br"), | |||
tokens.TagCloseSelfclose(padding="")], | |||
wrap([Tag(wraptext("br"), self_closing=True)])), | |||
# <li> | |||
([tokens.TagOpenOpen(), tokens.Text(text="li"), | |||
tokens.TagCloseSelfclose(padding="", implicit=True)], | |||
wrap([Tag(wraptext("li"), self_closing=True, implicit=True)])), | |||
# </br> | |||
([tokens.TagOpenOpen(invalid=True), tokens.Text(text="br"), | |||
tokens.TagCloseSelfclose(padding="", implicit=True)], | |||
wrap([Tag(wraptext("br"), self_closing=True, invalid=True, | |||
implicit=True)])), | |||
# </br/> | |||
([tokens.TagOpenOpen(invalid=True), tokens.Text(text="br"), | |||
tokens.TagCloseSelfclose(padding="")], | |||
wrap([Tag(wraptext("br"), self_closing=True, invalid=True)])), | |||
# <ref name={{abc}} foo="bar {{baz}}" abc={{de}}f ghi=j{{k}}{{l}} | |||
# mno = '{{p}} [[q]] {{r}}'>[[Source]]</ref> | |||
([tokens.TagOpenOpen(), tokens.Text(text="ref"), | |||
tokens.TagAttrStart(pad_first=" ", pad_before_eq="", | |||
pad_after_eq=""), | |||
tokens.Text(text="name"), tokens.TagAttrEquals(), | |||
tokens.TemplateOpen(), tokens.Text(text="abc"), | |||
tokens.TemplateClose(), | |||
tokens.TagAttrStart(pad_first=" ", pad_before_eq="", | |||
pad_after_eq=""), | |||
tokens.Text(text="foo"), tokens.TagAttrEquals(), | |||
tokens.TagAttrQuote(char='"'), tokens.Text(text="bar "), | |||
tokens.TemplateOpen(), tokens.Text(text="baz"), | |||
tokens.TemplateClose(), | |||
tokens.TagAttrStart(pad_first=" ", pad_before_eq="", | |||
pad_after_eq=""), | |||
tokens.Text(text="abc"), tokens.TagAttrEquals(), | |||
tokens.TemplateOpen(), tokens.Text(text="de"), | |||
tokens.TemplateClose(), tokens.Text(text="f"), | |||
tokens.TagAttrStart(pad_first=" ", pad_before_eq="", | |||
pad_after_eq=""), | |||
tokens.Text(text="ghi"), tokens.TagAttrEquals(), | |||
tokens.Text(text="j"), tokens.TemplateOpen(), | |||
tokens.Text(text="k"), tokens.TemplateClose(), | |||
tokens.TemplateOpen(), tokens.Text(text="l"), | |||
tokens.TemplateClose(), | |||
tokens.TagAttrStart(pad_first=" \n ", pad_before_eq=" ", | |||
pad_after_eq=" "), | |||
tokens.Text(text="mno"), tokens.TagAttrEquals(), | |||
tokens.TagAttrQuote(char="'"), tokens.TemplateOpen(), | |||
tokens.Text(text="p"), tokens.TemplateClose(), | |||
tokens.Text(text=" "), tokens.WikilinkOpen(), | |||
tokens.Text(text="q"), tokens.WikilinkClose(), | |||
tokens.Text(text=" "), tokens.TemplateOpen(), | |||
tokens.Text(text="r"), tokens.TemplateClose(), | |||
tokens.TagCloseOpen(padding=""), tokens.WikilinkOpen(), | |||
tokens.Text(text="Source"), tokens.WikilinkClose(), | |||
tokens.TagOpenClose(), tokens.Text(text="ref"), | |||
tokens.TagCloseClose()], | |||
wrap([Tag(wraptext("ref"), wrap([Wikilink(wraptext("Source"))]), [ | |||
Attribute(wraptext("name"), | |||
wrap([Template(wraptext("abc"))]), None), | |||
Attribute(wraptext("foo"), wrap([Text("bar "), | |||
Template(wraptext("baz"))]), pad_first=" "), | |||
Attribute(wraptext("abc"), wrap([Template(wraptext("de")), | |||
Text("f")]), None), | |||
Attribute(wraptext("ghi"), wrap([Text("j"), | |||
Template(wraptext("k")), | |||
Template(wraptext("l"))]), None), | |||
Attribute(wraptext("mno"), wrap([Template(wraptext("p")), | |||
Text(" "), Wikilink(wraptext("q")), Text(" "), | |||
Template(wraptext("r"))]), "'", " \n ", " ", | |||
" ")])])), | |||
# "''italic text''" | |||
([tokens.TagOpenOpen(wiki_markup="''"), tokens.Text(text="i"), | |||
tokens.TagCloseOpen(), tokens.Text(text="italic text"), | |||
tokens.TagOpenClose(), tokens.Text(text="i"), | |||
tokens.TagCloseClose()], | |||
wrap([Tag(wraptext("i"), wraptext("italic text"), | |||
wiki_markup="''")])), | |||
# * bullet | |||
([tokens.TagOpenOpen(wiki_markup="*"), tokens.Text(text="li"), | |||
tokens.TagCloseSelfclose(), tokens.Text(text=" bullet")], | |||
wrap([Tag(wraptext("li"), wiki_markup="*", self_closing=True), | |||
Text(" bullet")])), | |||
] | |||
for test, valid in tests: | |||
self.assertWikicodeEqual(valid, self.builder.build(test)) | |||
def test_integration(self): | |||
"""a test for building a combination of templates together""" | |||
# {{{{{{{{foo}}bar|baz=biz}}buzz}}usr|{{bin}}}} | |||
test = [tokens.TemplateOpen(), tokens.TemplateOpen(), | |||
tokens.TemplateOpen(), tokens.TemplateOpen(), | |||
tokens.Text(text="foo"), tokens.TemplateClose(), | |||
tokens.Text(text="bar"), tokens.TemplateParamSeparator(), | |||
tokens.Text(text="baz"), tokens.TemplateParamEquals(), | |||
tokens.Text(text="biz"), tokens.TemplateClose(), | |||
tokens.Text(text="buzz"), tokens.TemplateClose(), | |||
tokens.Text(text="usr"), tokens.TemplateParamSeparator(), | |||
tokens.TemplateOpen(), tokens.Text(text="bin"), | |||
tokens.TemplateClose(), tokens.TemplateClose()] | |||
valid = wrap( | |||
[Template(wrap([Template(wrap([Template(wrap([Template(wraptext( | |||
"foo")), Text("bar")]), params=[Parameter(wraptext("baz"), | |||
wraptext("biz"))]), Text("buzz")])), Text("usr")]), params=[ | |||
Parameter(wraptext("1"), wrap([Template(wraptext("bin"))]), | |||
showkey=False)])]) | |||
self.assertWikicodeEqual(valid, self.builder.build(test)) | |||
def test_integration2(self): | |||
"""an even more audacious test for building a horrible wikicode mess""" | |||
# {{a|b|{{c|[[d]]{{{e}}}}}}}[[f|{{{g}}}<!--h-->]]{{i|j= }} | |||
test = [tokens.TemplateOpen(), tokens.Text(text="a"), | |||
tokens.TemplateParamSeparator(), tokens.Text(text="b"), | |||
tokens.TemplateParamSeparator(), tokens.TemplateOpen(), | |||
tokens.Text(text="c"), tokens.TemplateParamSeparator(), | |||
tokens.WikilinkOpen(), tokens.Text(text="d"), | |||
tokens.WikilinkClose(), tokens.ArgumentOpen(), | |||
tokens.Text(text="e"), tokens.ArgumentClose(), | |||
tokens.TemplateClose(), tokens.TemplateClose(), | |||
tokens.WikilinkOpen(), tokens.Text(text="f"), | |||
tokens.WikilinkSeparator(), tokens.ArgumentOpen(), | |||
tokens.Text(text="g"), tokens.ArgumentClose(), | |||
tokens.CommentStart(), tokens.Text(text="h"), | |||
tokens.CommentEnd(), tokens.WikilinkClose(), | |||
tokens.TemplateOpen(), tokens.Text(text="i"), | |||
tokens.TemplateParamSeparator(), tokens.Text(text="j"), | |||
tokens.TemplateParamEquals(), tokens.HTMLEntityStart(), | |||
tokens.Text(text="nbsp"), tokens.HTMLEntityEnd(), | |||
tokens.TemplateClose()] | |||
valid = wrap( | |||
[Template(wraptext("a"), params=[Parameter(wraptext("1"), wraptext( | |||
"b"), showkey=False), Parameter(wraptext("2"), wrap([Template( | |||
wraptext("c"), params=[Parameter(wraptext("1"), wrap([Wikilink( | |||
wraptext("d")), Argument(wraptext("e"))]), showkey=False)])]), | |||
showkey=False)]), Wikilink(wraptext("f"), wrap([Argument(wraptext( | |||
"g")), Comment("h")])), Template(wraptext("i"), params=[ | |||
Parameter(wraptext("j"), wrap([HTMLEntity("nbsp", | |||
named=True)]))])]) | |||
self.assertWikicodeEqual(valid, self.builder.build(test)) | |||
def test_parser_errors(self): | |||
"""test whether ParserError gets thrown for bad input""" | |||
missing_closes = [ | |||
[tokens.TemplateOpen(), tokens.TemplateParamSeparator()], | |||
[tokens.TemplateOpen()], [tokens.ArgumentOpen()], | |||
[tokens.WikilinkOpen()], [tokens.ExternalLinkOpen()], | |||
[tokens.HeadingStart()], [tokens.CommentStart()], | |||
[tokens.TagOpenOpen(), tokens.TagAttrStart()], | |||
[tokens.TagOpenOpen()] | |||
] | |||
msg = r"_handle_token\(\) got unexpected TemplateClose" | |||
self.assertRaisesRegex(ParserError, msg, self.builder.build, [tokens.TemplateClose()]) | |||
for test in missing_closes: | |||
self.assertRaises(ParserError, self.builder.build, test) | |||
if __name__ == "__main__": | |||
unittest.main(verbosity=2) | |||
from .conftest import assert_wikicode_equal, wrap, wraptext | |||
@pytest.fixture() | |||
def builder(): | |||
return Builder() | |||
@pytest.mark.parametrize("test,valid", [ | |||
([tokens.Text(text="foobar")], wraptext("foobar")), | |||
([tokens.Text(text="fóóbar")], wraptext("fóóbar")), | |||
([tokens.Text(text="spam"), tokens.Text(text="eggs")], | |||
wraptext("spam", "eggs")), | |||
]) | |||
def test_text(builder, test, valid): | |||
"""tests for building Text nodes""" | |||
assert_wikicode_equal(valid, builder.build(test)) | |||
@pytest.mark.parametrize("test,valid", [ | |||
([tokens.TemplateOpen(), tokens.Text(text="foobar"), | |||
tokens.TemplateClose()], | |||
wrap([Template(wraptext("foobar"))])), | |||
([tokens.TemplateOpen(), tokens.Text(text="spam"), | |||
tokens.Text(text="eggs"), tokens.TemplateClose()], | |||
wrap([Template(wraptext("spam", "eggs"))])), | |||
([tokens.TemplateOpen(), tokens.Text(text="foo"), | |||
tokens.TemplateParamSeparator(), tokens.Text(text="bar"), | |||
tokens.TemplateClose()], | |||
wrap([Template(wraptext("foo"), params=[ | |||
Parameter(wraptext("1"), wraptext("bar"), showkey=False)])])), | |||
([tokens.TemplateOpen(), tokens.Text(text="foo"), | |||
tokens.TemplateParamSeparator(), tokens.Text(text="bar"), | |||
tokens.TemplateParamEquals(), tokens.Text(text="baz"), | |||
tokens.TemplateClose()], | |||
wrap([Template(wraptext("foo"), params=[ | |||
Parameter(wraptext("bar"), wraptext("baz"))])])), | |||
([tokens.TemplateOpen(), tokens.TemplateParamSeparator(), | |||
tokens.TemplateParamSeparator(), tokens.TemplateParamEquals(), | |||
tokens.TemplateParamSeparator(), tokens.TemplateClose()], | |||
wrap([Template(wrap([]), params=[ | |||
Parameter(wraptext("1"), wrap([]), showkey=False), | |||
Parameter(wrap([]), wrap([]), showkey=True), | |||
Parameter(wraptext("2"), wrap([]), showkey=False)])])), | |||
([tokens.TemplateOpen(), tokens.Text(text="foo"), | |||
tokens.TemplateParamSeparator(), tokens.Text(text="bar"), | |||
tokens.TemplateParamEquals(), tokens.Text(text="baz"), | |||
tokens.TemplateParamSeparator(), tokens.Text(text="biz"), | |||
tokens.TemplateParamSeparator(), tokens.Text(text="buzz"), | |||
tokens.TemplateParamSeparator(), tokens.Text(text="3"), | |||
tokens.TemplateParamEquals(), tokens.Text(text="buff"), | |||
tokens.TemplateParamSeparator(), tokens.Text(text="baff"), | |||
tokens.TemplateClose()], | |||
wrap([Template(wraptext("foo"), params=[ | |||
Parameter(wraptext("bar"), wraptext("baz")), | |||
Parameter(wraptext("1"), wraptext("biz"), showkey=False), | |||
Parameter(wraptext("2"), wraptext("buzz"), showkey=False), | |||
Parameter(wraptext("3"), wraptext("buff")), | |||
Parameter(wraptext("3"), wraptext("baff"), | |||
showkey=False)])])), | |||
]) | |||
def test_template(builder, test, valid): | |||
"""tests for building Template nodes""" | |||
assert_wikicode_equal(valid, builder.build(test)) | |||
@pytest.mark.parametrize("test,valid", [ | |||
([tokens.ArgumentOpen(), tokens.Text(text="foobar"), | |||
tokens.ArgumentClose()], | |||
wrap([Argument(wraptext("foobar"))])), | |||
([tokens.ArgumentOpen(), tokens.Text(text="spam"), | |||
tokens.Text(text="eggs"), tokens.ArgumentClose()], | |||
wrap([Argument(wraptext("spam", "eggs"))])), | |||
([tokens.ArgumentOpen(), tokens.Text(text="foo"), | |||
tokens.ArgumentSeparator(), tokens.Text(text="bar"), | |||
tokens.ArgumentClose()], | |||
wrap([Argument(wraptext("foo"), wraptext("bar"))])), | |||
([tokens.ArgumentOpen(), tokens.Text(text="foo"), | |||
tokens.Text(text="bar"), tokens.ArgumentSeparator(), | |||
tokens.Text(text="baz"), tokens.Text(text="biz"), | |||
tokens.ArgumentClose()], | |||
wrap([Argument(wraptext("foo", "bar"), wraptext("baz", "biz"))])), | |||
]) | |||
def test_argument(builder, test, valid): | |||
"""tests for building Argument nodes""" | |||
assert_wikicode_equal(valid, builder.build(test)) | |||
@pytest.mark.parametrize("test,valid", [ | |||
([tokens.WikilinkOpen(), tokens.Text(text="foobar"), | |||
tokens.WikilinkClose()], | |||
wrap([Wikilink(wraptext("foobar"))])), | |||
([tokens.WikilinkOpen(), tokens.Text(text="spam"), | |||
tokens.Text(text="eggs"), tokens.WikilinkClose()], | |||
wrap([Wikilink(wraptext("spam", "eggs"))])), | |||
([tokens.WikilinkOpen(), tokens.Text(text="foo"), | |||
tokens.WikilinkSeparator(), tokens.Text(text="bar"), | |||
tokens.WikilinkClose()], | |||
wrap([Wikilink(wraptext("foo"), wraptext("bar"))])), | |||
([tokens.WikilinkOpen(), tokens.Text(text="foo"), | |||
tokens.Text(text="bar"), tokens.WikilinkSeparator(), | |||
tokens.Text(text="baz"), tokens.Text(text="biz"), | |||
tokens.WikilinkClose()], | |||
wrap([Wikilink(wraptext("foo", "bar"), wraptext("baz", "biz"))])), | |||
]) | |||
def test_wikilink(builder, test, valid): | |||
"""tests for building Wikilink nodes""" | |||
assert_wikicode_equal(valid, builder.build(test)) | |||
@pytest.mark.parametrize("test,valid", [ | |||
([tokens.ExternalLinkOpen(brackets=False), | |||
tokens.Text(text="http://example.com/"), | |||
tokens.ExternalLinkClose()], | |||
wrap([ExternalLink(wraptext("http://example.com/"), | |||
brackets=False)])), | |||
([tokens.ExternalLinkOpen(brackets=True), | |||
tokens.Text(text="http://example.com/"), | |||
tokens.ExternalLinkClose()], | |||
wrap([ExternalLink(wraptext("http://example.com/"))])), | |||
([tokens.ExternalLinkOpen(brackets=True), | |||
tokens.Text(text="http://example.com/"), | |||
tokens.ExternalLinkSeparator(), tokens.ExternalLinkClose()], | |||
wrap([ExternalLink(wraptext("http://example.com/"), wrap([]))])), | |||
([tokens.ExternalLinkOpen(brackets=True), | |||
tokens.Text(text="http://example.com/"), | |||
tokens.ExternalLinkSeparator(), tokens.Text(text="Example"), | |||
tokens.ExternalLinkClose()], | |||
wrap([ExternalLink(wraptext("http://example.com/"), | |||
wraptext("Example"))])), | |||
([tokens.ExternalLinkOpen(brackets=False), | |||
tokens.Text(text="http://example"), tokens.Text(text=".com/foo"), | |||
tokens.ExternalLinkClose()], | |||
wrap([ExternalLink(wraptext("http://example", ".com/foo"), | |||
brackets=False)])), | |||
([tokens.ExternalLinkOpen(brackets=True), | |||
tokens.Text(text="http://example"), tokens.Text(text=".com/foo"), | |||
tokens.ExternalLinkSeparator(), tokens.Text(text="Example"), | |||
tokens.Text(text=" Web Page"), tokens.ExternalLinkClose()], | |||
wrap([ExternalLink(wraptext("http://example", ".com/foo"), | |||
wraptext("Example", " Web Page"))])), | |||
]) | |||
def test_external_link(builder, test, valid): | |||
"""tests for building ExternalLink nodes""" | |||
assert_wikicode_equal(valid, builder.build(test)) | |||
@pytest.mark.parametrize("test,valid", [ | |||
([tokens.HTMLEntityStart(), tokens.Text(text="nbsp"), | |||
tokens.HTMLEntityEnd()], | |||
wrap([HTMLEntity("nbsp", named=True, hexadecimal=False)])), | |||
([tokens.HTMLEntityStart(), tokens.HTMLEntityNumeric(), | |||
tokens.Text(text="107"), tokens.HTMLEntityEnd()], | |||
wrap([HTMLEntity("107", named=False, hexadecimal=False)])), | |||
([tokens.HTMLEntityStart(), tokens.HTMLEntityNumeric(), | |||
tokens.HTMLEntityHex(char="X"), tokens.Text(text="6B"), | |||
tokens.HTMLEntityEnd()], | |||
wrap([HTMLEntity("6B", named=False, hexadecimal=True, | |||
hex_char="X")])), | |||
]) | |||
def test_html_entity(builder, test, valid): | |||
"""tests for building HTMLEntity nodes""" | |||
assert_wikicode_equal(valid, builder.build(test)) | |||
@pytest.mark.parametrize("test,valid", [ | |||
([tokens.HeadingStart(level=2), tokens.Text(text="foobar"), | |||
tokens.HeadingEnd()], | |||
wrap([Heading(wraptext("foobar"), 2)])), | |||
([tokens.HeadingStart(level=4), tokens.Text(text="spam"), | |||
tokens.Text(text="eggs"), tokens.HeadingEnd()], | |||
wrap([Heading(wraptext("spam", "eggs"), 4)])), | |||
]) | |||
def test_heading(builder, test, valid): | |||
"""tests for building Heading nodes""" | |||
assert_wikicode_equal(valid, builder.build(test)) | |||
@pytest.mark.parametrize("test,valid", [ | |||
([tokens.CommentStart(), tokens.Text(text="foobar"), | |||
tokens.CommentEnd()], | |||
wrap([Comment("foobar")])), | |||
([tokens.CommentStart(), tokens.Text(text="spam"), | |||
tokens.Text(text="eggs"), tokens.CommentEnd()], | |||
wrap([Comment("spameggs")])), | |||
]) | |||
def test_comment(builder, test, valid): | |||
"""tests for building Comment nodes""" | |||
assert_wikicode_equal(valid, builder.build(test)) | |||
@pytest.mark.parametrize("test,valid", [ | |||
# <ref></ref> | |||
([tokens.TagOpenOpen(), tokens.Text(text="ref"), | |||
tokens.TagCloseOpen(padding=""), tokens.TagOpenClose(), | |||
tokens.Text(text="ref"), tokens.TagCloseClose()], | |||
wrap([Tag(wraptext("ref"), wrap([]), | |||
closing_tag=wraptext("ref"))])), | |||
# <ref name></ref> | |||
([tokens.TagOpenOpen(), tokens.Text(text="ref"), | |||
tokens.TagAttrStart(pad_first=" ", pad_before_eq="", | |||
pad_after_eq=""), | |||
tokens.Text(text="name"), tokens.TagCloseOpen(padding=""), | |||
tokens.TagOpenClose(), tokens.Text(text="ref"), | |||
tokens.TagCloseClose()], | |||
wrap([Tag(wraptext("ref"), wrap([]), | |||
attrs=[Attribute(wraptext("name"))])])), | |||
# <ref name="abc" /> | |||
([tokens.TagOpenOpen(), tokens.Text(text="ref"), | |||
tokens.TagAttrStart(pad_first=" ", pad_before_eq="", | |||
pad_after_eq=""), | |||
tokens.Text(text="name"), tokens.TagAttrEquals(), | |||
tokens.TagAttrQuote(char='"'), tokens.Text(text="abc"), | |||
tokens.TagCloseSelfclose(padding=" ")], | |||
wrap([Tag(wraptext("ref"), | |||
attrs=[Attribute(wraptext("name"), wraptext("abc"))], | |||
self_closing=True, padding=" ")])), | |||
# <br/> | |||
([tokens.TagOpenOpen(), tokens.Text(text="br"), | |||
tokens.TagCloseSelfclose(padding="")], | |||
wrap([Tag(wraptext("br"), self_closing=True)])), | |||
# <li> | |||
([tokens.TagOpenOpen(), tokens.Text(text="li"), | |||
tokens.TagCloseSelfclose(padding="", implicit=True)], | |||
wrap([Tag(wraptext("li"), self_closing=True, implicit=True)])), | |||
# </br> | |||
([tokens.TagOpenOpen(invalid=True), tokens.Text(text="br"), | |||
tokens.TagCloseSelfclose(padding="", implicit=True)], | |||
wrap([Tag(wraptext("br"), self_closing=True, invalid=True, | |||
implicit=True)])), | |||
# </br/> | |||
([tokens.TagOpenOpen(invalid=True), tokens.Text(text="br"), | |||
tokens.TagCloseSelfclose(padding="")], | |||
wrap([Tag(wraptext("br"), self_closing=True, invalid=True)])), | |||
# <ref name={{abc}} foo="bar {{baz}}" abc={{de}}f ghi=j{{k}}{{l}} | |||
# mno = '{{p}} [[q]] {{r}}'>[[Source]]</ref> | |||
([tokens.TagOpenOpen(), tokens.Text(text="ref"), | |||
tokens.TagAttrStart(pad_first=" ", pad_before_eq="", | |||
pad_after_eq=""), | |||
tokens.Text(text="name"), tokens.TagAttrEquals(), | |||
tokens.TemplateOpen(), tokens.Text(text="abc"), | |||
tokens.TemplateClose(), | |||
tokens.TagAttrStart(pad_first=" ", pad_before_eq="", | |||
pad_after_eq=""), | |||
tokens.Text(text="foo"), tokens.TagAttrEquals(), | |||
tokens.TagAttrQuote(char='"'), tokens.Text(text="bar "), | |||
tokens.TemplateOpen(), tokens.Text(text="baz"), | |||
tokens.TemplateClose(), | |||
tokens.TagAttrStart(pad_first=" ", pad_before_eq="", | |||
pad_after_eq=""), | |||
tokens.Text(text="abc"), tokens.TagAttrEquals(), | |||
tokens.TemplateOpen(), tokens.Text(text="de"), | |||
tokens.TemplateClose(), tokens.Text(text="f"), | |||
tokens.TagAttrStart(pad_first=" ", pad_before_eq="", | |||
pad_after_eq=""), | |||
tokens.Text(text="ghi"), tokens.TagAttrEquals(), | |||
tokens.Text(text="j"), tokens.TemplateOpen(), | |||
tokens.Text(text="k"), tokens.TemplateClose(), | |||
tokens.TemplateOpen(), tokens.Text(text="l"), | |||
tokens.TemplateClose(), | |||
tokens.TagAttrStart(pad_first=" \n ", pad_before_eq=" ", | |||
pad_after_eq=" "), | |||
tokens.Text(text="mno"), tokens.TagAttrEquals(), | |||
tokens.TagAttrQuote(char="'"), tokens.TemplateOpen(), | |||
tokens.Text(text="p"), tokens.TemplateClose(), | |||
tokens.Text(text=" "), tokens.WikilinkOpen(), | |||
tokens.Text(text="q"), tokens.WikilinkClose(), | |||
tokens.Text(text=" "), tokens.TemplateOpen(), | |||
tokens.Text(text="r"), tokens.TemplateClose(), | |||
tokens.TagCloseOpen(padding=""), tokens.WikilinkOpen(), | |||
tokens.Text(text="Source"), tokens.WikilinkClose(), | |||
tokens.TagOpenClose(), tokens.Text(text="ref"), | |||
tokens.TagCloseClose()], | |||
wrap([Tag(wraptext("ref"), wrap([Wikilink(wraptext("Source"))]), [ | |||
Attribute(wraptext("name"), | |||
wrap([Template(wraptext("abc"))]), None), | |||
Attribute(wraptext("foo"), wrap([Text("bar "), | |||
Template(wraptext("baz"))]), pad_first=" "), | |||
Attribute(wraptext("abc"), wrap([Template(wraptext("de")), | |||
Text("f")]), None), | |||
Attribute(wraptext("ghi"), wrap([Text("j"), | |||
Template(wraptext("k")), | |||
Template(wraptext("l"))]), None), | |||
Attribute(wraptext("mno"), wrap([Template(wraptext("p")), | |||
Text(" "), Wikilink(wraptext("q")), Text(" "), | |||
Template(wraptext("r"))]), "'", " \n ", " ", | |||
" ")])])), | |||
# "''italic text''" | |||
([tokens.TagOpenOpen(wiki_markup="''"), tokens.Text(text="i"), | |||
tokens.TagCloseOpen(), tokens.Text(text="italic text"), | |||
tokens.TagOpenClose(), tokens.Text(text="i"), | |||
tokens.TagCloseClose()], | |||
wrap([Tag(wraptext("i"), wraptext("italic text"), | |||
wiki_markup="''")])), | |||
# * bullet | |||
([tokens.TagOpenOpen(wiki_markup="*"), tokens.Text(text="li"), | |||
tokens.TagCloseSelfclose(), tokens.Text(text=" bullet")], | |||
wrap([Tag(wraptext("li"), wiki_markup="*", self_closing=True), | |||
Text(" bullet")])), | |||
]) | |||
def test_tag(builder, test, valid): | |||
"""tests for building Tag nodes""" | |||
assert_wikicode_equal(valid, builder.build(test)) | |||
def test_integration(builder): | |||
"""a test for building a combination of templates together""" | |||
# {{{{{{{{foo}}bar|baz=biz}}buzz}}usr|{{bin}}}} | |||
test = [tokens.TemplateOpen(), tokens.TemplateOpen(), | |||
tokens.TemplateOpen(), tokens.TemplateOpen(), | |||
tokens.Text(text="foo"), tokens.TemplateClose(), | |||
tokens.Text(text="bar"), tokens.TemplateParamSeparator(), | |||
tokens.Text(text="baz"), tokens.TemplateParamEquals(), | |||
tokens.Text(text="biz"), tokens.TemplateClose(), | |||
tokens.Text(text="buzz"), tokens.TemplateClose(), | |||
tokens.Text(text="usr"), tokens.TemplateParamSeparator(), | |||
tokens.TemplateOpen(), tokens.Text(text="bin"), | |||
tokens.TemplateClose(), tokens.TemplateClose()] | |||
valid = wrap( | |||
[Template(wrap([Template(wrap([Template(wrap([Template(wraptext( | |||
"foo")), Text("bar")]), params=[Parameter(wraptext("baz"), | |||
wraptext("biz"))]), Text("buzz")])), Text("usr")]), params=[ | |||
Parameter(wraptext("1"), wrap([Template(wraptext("bin"))]), | |||
showkey=False)])]) | |||
assert_wikicode_equal(valid, builder.build(test)) | |||
def test_integration2(builder): | |||
"""an even more audacious test for building a horrible wikicode mess""" | |||
# {{a|b|{{c|[[d]]{{{e}}}}}}}[[f|{{{g}}}<!--h-->]]{{i|j= }} | |||
test = [tokens.TemplateOpen(), tokens.Text(text="a"), | |||
tokens.TemplateParamSeparator(), tokens.Text(text="b"), | |||
tokens.TemplateParamSeparator(), tokens.TemplateOpen(), | |||
tokens.Text(text="c"), tokens.TemplateParamSeparator(), | |||
tokens.WikilinkOpen(), tokens.Text(text="d"), | |||
tokens.WikilinkClose(), tokens.ArgumentOpen(), | |||
tokens.Text(text="e"), tokens.ArgumentClose(), | |||
tokens.TemplateClose(), tokens.TemplateClose(), | |||
tokens.WikilinkOpen(), tokens.Text(text="f"), | |||
tokens.WikilinkSeparator(), tokens.ArgumentOpen(), | |||
tokens.Text(text="g"), tokens.ArgumentClose(), | |||
tokens.CommentStart(), tokens.Text(text="h"), | |||
tokens.CommentEnd(), tokens.WikilinkClose(), | |||
tokens.TemplateOpen(), tokens.Text(text="i"), | |||
tokens.TemplateParamSeparator(), tokens.Text(text="j"), | |||
tokens.TemplateParamEquals(), tokens.HTMLEntityStart(), | |||
tokens.Text(text="nbsp"), tokens.HTMLEntityEnd(), | |||
tokens.TemplateClose()] | |||
valid = wrap( | |||
[Template(wraptext("a"), params=[Parameter(wraptext("1"), wraptext( | |||
"b"), showkey=False), Parameter(wraptext("2"), wrap([Template( | |||
wraptext("c"), params=[Parameter(wraptext("1"), wrap([Wikilink( | |||
wraptext("d")), Argument(wraptext("e"))]), showkey=False)])]), | |||
showkey=False)]), Wikilink(wraptext("f"), wrap([Argument(wraptext( | |||
"g")), Comment("h")])), Template(wraptext("i"), params=[ | |||
Parameter(wraptext("j"), wrap([HTMLEntity("nbsp", | |||
named=True)]))])]) | |||
assert_wikicode_equal(valid, builder.build(test)) | |||
@pytest.mark.parametrize("tokens", [ | |||
[tokens.TemplateOpen(), tokens.TemplateParamSeparator()], | |||
[tokens.TemplateOpen()], [tokens.ArgumentOpen()], | |||
[tokens.WikilinkOpen()], [tokens.ExternalLinkOpen()], | |||
[tokens.HeadingStart()], [tokens.CommentStart()], | |||
[tokens.TagOpenOpen(), tokens.TagAttrStart()], | |||
[tokens.TagOpenOpen()] | |||
]) | |||
def test_parser_errors(builder, tokens): | |||
"""test whether ParserError gets thrown for bad input""" | |||
with pytest.raises(ParserError): | |||
builder.build(tokens) | |||
def test_parser_errors_templateclose(builder): | |||
with pytest.raises( | |||
ParserError, | |||
match=r"_handle_token\(\) got unexpected TemplateClose" | |||
): | |||
builder.build([tokens.TemplateClose()]) |
@@ -18,44 +18,41 @@ | |||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||
# SOFTWARE. | |||
import unittest | |||
""" | |||
Test cases for the Comment node. | |||
""" | |||
import pytest | |||
from mwparserfromhell.nodes import Comment | |||
from ._test_tree_equality import TreeEqualityTestCase | |||
class TestComment(TreeEqualityTestCase): | |||
"""Test cases for the Comment node.""" | |||
def test_str(self): | |||
"""test Comment.__str__()""" | |||
node = Comment("foobar") | |||
self.assertEqual("<!--foobar-->", str(node)) | |||
def test_children(self): | |||
"""test Comment.__children__()""" | |||
node = Comment("foobar") | |||
gen = node.__children__() | |||
self.assertRaises(StopIteration, next, gen) | |||
def test_strip(self): | |||
"""test Comment.__strip__()""" | |||
node = Comment("foobar") | |||
self.assertIs(None, node.__strip__()) | |||
def test_showtree(self): | |||
"""test Comment.__showtree__()""" | |||
output = [] | |||
node = Comment("foobar") | |||
node.__showtree__(output.append, None, None) | |||
self.assertEqual(["<!--foobar-->"], output) | |||
def test_contents(self): | |||
"""test getter/setter for the contents attribute""" | |||
node = Comment("foobar") | |||
self.assertEqual("foobar", node.contents) | |||
node.contents = "barfoo" | |||
self.assertEqual("barfoo", node.contents) | |||
if __name__ == "__main__": | |||
unittest.main(verbosity=2) | |||
def test_str(): | |||
"""test Comment.__str__()""" | |||
node = Comment("foobar") | |||
assert "<!--foobar-->" == str(node) | |||
def test_children(): | |||
"""test Comment.__children__()""" | |||
node = Comment("foobar") | |||
gen = node.__children__() | |||
with pytest.raises(StopIteration): | |||
next(gen) | |||
def test_strip(): | |||
"""test Comment.__strip__()""" | |||
node = Comment("foobar") | |||
assert node.__strip__() is None | |||
def test_showtree(): | |||
"""test Comment.__showtree__()""" | |||
output = [] | |||
node = Comment("foobar") | |||
node.__showtree__(output.append, None, None) | |||
assert ["<!--foobar-->"] == output | |||
def test_contents(): | |||
"""test getter/setter for the contents attribute""" | |||
node = Comment("foobar") | |||
assert "foobar" == node.contents | |||
node.contents = "barfoo" | |||
assert "barfoo" == node.contents |
@@ -1,45 +0,0 @@ | |||
# Copyright (C) 2012-2016 Ben Kurtovic <ben.kurtovic@gmail.com> | |||
# | |||
# Permission is hereby granted, free of charge, to any person obtaining a copy | |||
# of this software and associated documentation files (the "Software"), to deal | |||
# in the Software without restriction, including without limitation the rights | |||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | |||
# copies of the Software, and to permit persons to whom the Software is | |||
# furnished to do so, subject to the following conditions: | |||
# | |||
# The above copyright notice and this permission notice shall be included in | |||
# all copies or substantial portions of the Software. | |||
# | |||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | |||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | |||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | |||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||
# SOFTWARE. | |||
import unittest | |||
try: | |||
from mwparserfromhell.parser._tokenizer import CTokenizer | |||
except ImportError: | |||
CTokenizer = None | |||
from ._test_tokenizer import TokenizerTestCase | |||
@unittest.skipUnless(CTokenizer, "C tokenizer not available") | |||
class TestCTokenizer(TokenizerTestCase, unittest.TestCase): | |||
"""Test cases for the C tokenizer.""" | |||
@classmethod | |||
def setUpClass(cls): | |||
cls.tokenizer = CTokenizer | |||
if not TokenizerTestCase.skip_others: | |||
def test_uses_c(self): | |||
"""make sure the C tokenizer identifies as using a C extension""" | |||
self.assertTrue(CTokenizer.USES_C) | |||
self.assertTrue(CTokenizer().USES_C) | |||
if __name__ == "__main__": | |||
unittest.main(verbosity=2) |
@@ -18,106 +18,104 @@ | |||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||
# SOFTWARE. | |||
""" | |||
Integration test cases for mwparserfromhell's documentation. | |||
""" | |||
import json | |||
from io import StringIO | |||
import os | |||
import unittest | |||
from urllib.parse import urlencode | |||
from urllib.request import urlopen | |||
import mwparserfromhell | |||
class TestDocs(unittest.TestCase): | |||
"""Integration test cases for mwparserfromhell's documentation.""" | |||
import pytest | |||
def assertPrint(self, value, output): | |||
"""Assertion check that *value*, when printed, produces *output*.""" | |||
buff = StringIO() | |||
print(value, end="", file=buff) | |||
buff.seek(0) | |||
self.assertEqual(output, buff.read()) | |||
import mwparserfromhell | |||
def test_readme_1(self): | |||
"""test a block of example code in the README""" | |||
text = "I has a template! {{foo|bar|baz|eggs=spam}} See it?" | |||
wikicode = mwparserfromhell.parse(text) | |||
self.assertPrint(wikicode, | |||
"I has a template! {{foo|bar|baz|eggs=spam}} See it?") | |||
templates = wikicode.filter_templates() | |||
self.assertPrint(templates, "['{{foo|bar|baz|eggs=spam}}']") | |||
template = templates[0] | |||
self.assertPrint(template.name, "foo") | |||
self.assertPrint(template.params, "['bar', 'baz', 'eggs=spam']") | |||
self.assertPrint(template.get(1).value, "bar") | |||
self.assertPrint(template.get("eggs").value, "spam") | |||
def assert_print(value, output): | |||
"""Assertion check that *value*, when printed, produces *output*.""" | |||
buff = StringIO() | |||
print(value, end="", file=buff) | |||
buff.seek(0) | |||
assert output == buff.read() | |||
def test_readme_2(self): | |||
"""test a block of example code in the README""" | |||
text = "{{foo|{{bar}}={{baz|{{spam}}}}}}" | |||
temps = mwparserfromhell.parse(text).filter_templates() | |||
res = "['{{foo|{{bar}}={{baz|{{spam}}}}}}', '{{bar}}', '{{baz|{{spam}}}}', '{{spam}}']" | |||
self.assertPrint(temps, res) | |||
def test_readme_1(): | |||
"""test a block of example code in the README""" | |||
text = "I has a template! {{foo|bar|baz|eggs=spam}} See it?" | |||
wikicode = mwparserfromhell.parse(text) | |||
assert_print(wikicode, "I has a template! {{foo|bar|baz|eggs=spam}} See it?") | |||
templates = wikicode.filter_templates() | |||
assert_print(templates, "['{{foo|bar|baz|eggs=spam}}']") | |||
template = templates[0] | |||
assert_print(template.name, "foo") | |||
assert_print(template.params, "['bar', 'baz', 'eggs=spam']") | |||
assert_print(template.get(1).value, "bar") | |||
assert_print(template.get("eggs").value, "spam") | |||
def test_readme_3(self): | |||
"""test a block of example code in the README""" | |||
code = mwparserfromhell.parse("{{foo|this {{includes a|template}}}}") | |||
self.assertPrint(code.filter_templates(recursive=False), | |||
"['{{foo|this {{includes a|template}}}}']") | |||
foo = code.filter_templates(recursive=False)[0] | |||
self.assertPrint(foo.get(1).value, "this {{includes a|template}}") | |||
self.assertPrint(foo.get(1).value.filter_templates()[0], | |||
"{{includes a|template}}") | |||
self.assertPrint(foo.get(1).value.filter_templates()[0].get(1).value, | |||
"template") | |||
def test_readme_2(): | |||
"""test a block of example code in the README""" | |||
text = "{{foo|{{bar}}={{baz|{{spam}}}}}}" | |||
temps = mwparserfromhell.parse(text).filter_templates() | |||
res = "['{{foo|{{bar}}={{baz|{{spam}}}}}}', '{{bar}}', '{{baz|{{spam}}}}', '{{spam}}']" | |||
assert_print(temps, res) | |||
def test_readme_4(self): | |||
"""test a block of example code in the README""" | |||
text = "{{cleanup}} '''Foo''' is a [[bar]]. {{uncategorized}}" | |||
code = mwparserfromhell.parse(text) | |||
for template in code.filter_templates(): | |||
if template.name.matches("Cleanup") and not template.has("date"): | |||
template.add("date", "July 2012") | |||
res = "{{cleanup|date=July 2012}} '''Foo''' is a [[bar]]. {{uncategorized}}" | |||
self.assertPrint(code, res) | |||
code.replace("{{uncategorized}}", "{{bar-stub}}") | |||
res = "{{cleanup|date=July 2012}} '''Foo''' is a [[bar]]. {{bar-stub}}" | |||
self.assertPrint(code, res) | |||
res = "['{{cleanup|date=July 2012}}', '{{bar-stub}}']" | |||
self.assertPrint(code.filter_templates(), res) | |||
text = str(code) | |||
res = "{{cleanup|date=July 2012}} '''Foo''' is a [[bar]]. {{bar-stub}}" | |||
self.assertPrint(text, res) | |||
self.assertEqual(text, code) | |||
def test_readme_3(): | |||
"""test a block of example code in the README""" | |||
code = mwparserfromhell.parse("{{foo|this {{includes a|template}}}}") | |||
assert_print(code.filter_templates(recursive=False), | |||
"['{{foo|this {{includes a|template}}}}']") | |||
foo = code.filter_templates(recursive=False)[0] | |||
assert_print(foo.get(1).value, "this {{includes a|template}}") | |||
assert_print(foo.get(1).value.filter_templates()[0], | |||
"{{includes a|template}}") | |||
assert_print(foo.get(1).value.filter_templates()[0].get(1).value, | |||
"template") | |||
@unittest.skipIf("NOWEB" in os.environ, "web test disabled by environ var") | |||
def test_readme_5(self): | |||
"""test a block of example code in the README; includes a web call""" | |||
url1 = "https://en.wikipedia.org/w/api.php" | |||
url2 = "https://en.wikipedia.org/w/index.php?title={0}&action=raw" | |||
title = "Test" | |||
data = { | |||
"action": "query", | |||
"prop": "revisions", | |||
"rvprop": "content", | |||
"rvslots": "main", | |||
"rvlimit": 1, | |||
"titles": title, | |||
"format": "json", | |||
"formatversion": "2", | |||
} | |||
try: | |||
raw = urlopen(url1, urlencode(data).encode("utf8")).read() | |||
except OSError: | |||
self.skipTest("cannot continue because of unsuccessful web call") | |||
res = json.loads(raw.decode("utf8")) | |||
revision = res["query"]["pages"][0]["revisions"][0] | |||
text = revision["slots"]["main"]["content"] | |||
try: | |||
expected = urlopen(url2.format(title)).read().decode("utf8") | |||
except OSError: | |||
self.skipTest("cannot continue because of unsuccessful web call") | |||
actual = mwparserfromhell.parse(text) | |||
self.assertEqual(expected, actual) | |||
def test_readme_4(): | |||
"""test a block of example code in the README""" | |||
text = "{{cleanup}} '''Foo''' is a [[bar]]. {{uncategorized}}" | |||
code = mwparserfromhell.parse(text) | |||
for template in code.filter_templates(): | |||
if template.name.matches("Cleanup") and not template.has("date"): | |||
template.add("date", "July 2012") | |||
res = "{{cleanup|date=July 2012}} '''Foo''' is a [[bar]]. {{uncategorized}}" | |||
assert_print(code, res) | |||
code.replace("{{uncategorized}}", "{{bar-stub}}") | |||
res = "{{cleanup|date=July 2012}} '''Foo''' is a [[bar]]. {{bar-stub}}" | |||
assert_print(code, res) | |||
res = "['{{cleanup|date=July 2012}}', '{{bar-stub}}']" | |||
assert_print(code.filter_templates(), res) | |||
text = str(code) | |||
res = "{{cleanup|date=July 2012}} '''Foo''' is a [[bar]]. {{bar-stub}}" | |||
assert_print(text, res) | |||
assert text == code | |||
if __name__ == "__main__": | |||
unittest.main(verbosity=2) | |||
@pytest.mark.skipif("NOWEB" in os.environ, reason="web test disabled by environ var") | |||
def test_readme_5(): | |||
"""test a block of example code in the README; includes a web call""" | |||
url1 = "https://en.wikipedia.org/w/api.php" | |||
url2 = "https://en.wikipedia.org/w/index.php?title={0}&action=raw" | |||
title = "Test" | |||
data = { | |||
"action": "query", | |||
"prop": "revisions", | |||
"rvprop": "content", | |||
"rvslots": "main", | |||
"rvlimit": 1, | |||
"titles": title, | |||
"format": "json", | |||
"formatversion": "2", | |||
} | |||
try: | |||
raw = urlopen(url1, urlencode(data).encode("utf8")).read() | |||
except OSError: | |||
pytest.skip("cannot continue because of unsuccessful web call") | |||
res = json.loads(raw.decode("utf8")) | |||
revision = res["query"]["pages"][0]["revisions"][0] | |||
text = revision["slots"]["main"]["content"] | |||
try: | |||
expected = urlopen(url2.format(title)).read().decode("utf8") | |||
except OSError: | |||
pytest.skip("cannot continue because of unsuccessful web call") | |||
actual = mwparserfromhell.parse(text) | |||
assert expected == actual |
@@ -18,103 +18,102 @@ | |||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||
# SOFTWARE. | |||
import unittest | |||
""" | |||
Test cases for the ExternalLink node. | |||
""" | |||
from mwparserfromhell.nodes import ExternalLink, Text | |||
from ._test_tree_equality import TreeEqualityTestCase, wrap, wraptext | |||
import pytest | |||
class TestExternalLink(TreeEqualityTestCase): | |||
"""Test cases for the ExternalLink node.""" | |||
def test_str(self): | |||
"""test ExternalLink.__str__()""" | |||
node = ExternalLink(wraptext("http://example.com/"), brackets=False) | |||
self.assertEqual("http://example.com/", str(node)) | |||
node2 = ExternalLink(wraptext("http://example.com/")) | |||
self.assertEqual("[http://example.com/]", str(node2)) | |||
node3 = ExternalLink(wraptext("http://example.com/"), wrap([])) | |||
self.assertEqual("[http://example.com/ ]", str(node3)) | |||
node4 = ExternalLink(wraptext("http://example.com/"), | |||
wraptext("Example Web Page")) | |||
self.assertEqual("[http://example.com/ Example Web Page]", str(node4)) | |||
from mwparserfromhell.nodes import ExternalLink, Text | |||
from .conftest import assert_wikicode_equal, wrap, wraptext | |||
def test_children(self): | |||
"""test ExternalLink.__children__()""" | |||
node1 = ExternalLink(wraptext("http://example.com/"), brackets=False) | |||
node2 = ExternalLink(wraptext("http://example.com/"), | |||
wrap([Text("Example"), Text("Page")])) | |||
gen1 = node1.__children__() | |||
gen2 = node2.__children__() | |||
self.assertEqual(node1.url, next(gen1)) | |||
self.assertEqual(node2.url, next(gen2)) | |||
self.assertEqual(node2.title, next(gen2)) | |||
self.assertRaises(StopIteration, next, gen1) | |||
self.assertRaises(StopIteration, next, gen2) | |||
def test_str(): | |||
"""test ExternalLink.__str__()""" | |||
node = ExternalLink(wraptext("http://example.com/"), brackets=False) | |||
assert "http://example.com/" == str(node) | |||
node2 = ExternalLink(wraptext("http://example.com/")) | |||
assert "[http://example.com/]" == str(node2) | |||
node3 = ExternalLink(wraptext("http://example.com/"), wrap([])) | |||
assert "[http://example.com/ ]" == str(node3) | |||
node4 = ExternalLink(wraptext("http://example.com/"), | |||
wraptext("Example Web Page")) | |||
assert "[http://example.com/ Example Web Page]" == str(node4) | |||
def test_strip(self): | |||
"""test ExternalLink.__strip__()""" | |||
node1 = ExternalLink(wraptext("http://example.com"), brackets=False) | |||
node2 = ExternalLink(wraptext("http://example.com")) | |||
node3 = ExternalLink(wraptext("http://example.com"), wrap([])) | |||
node4 = ExternalLink(wraptext("http://example.com"), wraptext("Link")) | |||
def test_children(): | |||
"""test ExternalLink.__children__()""" | |||
node1 = ExternalLink(wraptext("http://example.com/"), brackets=False) | |||
node2 = ExternalLink(wraptext("http://example.com/"), | |||
wrap([Text("Example"), Text("Page")])) | |||
gen1 = node1.__children__() | |||
gen2 = node2.__children__() | |||
assert node1.url == next(gen1) | |||
assert node2.url == next(gen2) | |||
assert node2.title == next(gen2) | |||
with pytest.raises(StopIteration): | |||
next(gen1) | |||
with pytest.raises(StopIteration): | |||
next(gen2) | |||
self.assertEqual("http://example.com", node1.__strip__()) | |||
self.assertEqual(None, node2.__strip__()) | |||
self.assertEqual(None, node3.__strip__()) | |||
self.assertEqual("Link", node4.__strip__()) | |||
def test_strip(): | |||
"""test ExternalLink.__strip__()""" | |||
node1 = ExternalLink(wraptext("http://example.com"), brackets=False) | |||
node2 = ExternalLink(wraptext("http://example.com")) | |||
node3 = ExternalLink(wraptext("http://example.com"), wrap([])) | |||
node4 = ExternalLink(wraptext("http://example.com"), wraptext("Link")) | |||
def test_showtree(self): | |||
"""test ExternalLink.__showtree__()""" | |||
output = [] | |||
getter, marker = object(), object() | |||
get = lambda code: output.append((getter, code)) | |||
mark = lambda: output.append(marker) | |||
node1 = ExternalLink(wraptext("http://example.com"), brackets=False) | |||
node2 = ExternalLink(wraptext("http://example.com"), wraptext("Link")) | |||
node1.__showtree__(output.append, get, mark) | |||
node2.__showtree__(output.append, get, mark) | |||
valid = [ | |||
(getter, node1.url), "[", (getter, node2.url), | |||
(getter, node2.title), "]"] | |||
self.assertEqual(valid, output) | |||
assert "http://example.com" == node1.__strip__() | |||
assert node2.__strip__() is None | |||
assert node3.__strip__() is None | |||
assert "Link" == node4.__strip__() | |||
def test_url(self): | |||
"""test getter/setter for the url attribute""" | |||
url = wraptext("http://example.com/") | |||
node1 = ExternalLink(url, brackets=False) | |||
node2 = ExternalLink(url, wraptext("Example")) | |||
self.assertIs(url, node1.url) | |||
self.assertIs(url, node2.url) | |||
node1.url = "mailto:héhehé@spam.com" | |||
node2.url = "mailto:héhehé@spam.com" | |||
self.assertWikicodeEqual(wraptext("mailto:héhehé@spam.com"), node1.url) | |||
self.assertWikicodeEqual(wraptext("mailto:héhehé@spam.com"), node2.url) | |||
def test_showtree(): | |||
"""test ExternalLink.__showtree__()""" | |||
output = [] | |||
getter, marker = object(), object() | |||
get = lambda code: output.append((getter, code)) | |||
mark = lambda: output.append(marker) | |||
node1 = ExternalLink(wraptext("http://example.com"), brackets=False) | |||
node2 = ExternalLink(wraptext("http://example.com"), wraptext("Link")) | |||
node1.__showtree__(output.append, get, mark) | |||
node2.__showtree__(output.append, get, mark) | |||
valid = [ | |||
(getter, node1.url), "[", (getter, node2.url), | |||
(getter, node2.title), "]"] | |||
assert valid == output | |||
def test_title(self): | |||
"""test getter/setter for the title attribute""" | |||
title = wraptext("Example!") | |||
node1 = ExternalLink(wraptext("http://example.com/"), brackets=False) | |||
node2 = ExternalLink(wraptext("http://example.com/"), title) | |||
self.assertIs(None, node1.title) | |||
self.assertIs(title, node2.title) | |||
node2.title = None | |||
self.assertIs(None, node2.title) | |||
node2.title = "My Website" | |||
self.assertWikicodeEqual(wraptext("My Website"), node2.title) | |||
def test_url(): | |||
"""test getter/setter for the url attribute""" | |||
url = wraptext("http://example.com/") | |||
node1 = ExternalLink(url, brackets=False) | |||
node2 = ExternalLink(url, wraptext("Example")) | |||
assert url is node1.url | |||
assert url is node2.url | |||
node1.url = "mailto:héhehé@spam.com" | |||
node2.url = "mailto:héhehé@spam.com" | |||
assert_wikicode_equal(wraptext("mailto:héhehé@spam.com"), node1.url) | |||
assert_wikicode_equal(wraptext("mailto:héhehé@spam.com"), node2.url) | |||
def test_brackets(self): | |||
"""test getter/setter for the brackets attribute""" | |||
node1 = ExternalLink(wraptext("http://example.com/"), brackets=False) | |||
node2 = ExternalLink(wraptext("http://example.com/"), wraptext("Link")) | |||
self.assertFalse(node1.brackets) | |||
self.assertTrue(node2.brackets) | |||
node1.brackets = True | |||
node2.brackets = False | |||
self.assertTrue(node1.brackets) | |||
self.assertFalse(node2.brackets) | |||
self.assertEqual("[http://example.com/]", str(node1)) | |||
self.assertEqual("http://example.com/", str(node2)) | |||
def test_title(): | |||
"""test getter/setter for the title attribute""" | |||
title = wraptext("Example!") | |||
node1 = ExternalLink(wraptext("http://example.com/"), brackets=False) | |||
node2 = ExternalLink(wraptext("http://example.com/"), title) | |||
assert None is node1.title | |||
assert title is node2.title | |||
node2.title = None | |||
assert None is node2.title | |||
node2.title = "My Website" | |||
assert_wikicode_equal(wraptext("My Website"), node2.title) | |||
if __name__ == "__main__": | |||
unittest.main(verbosity=2) | |||
def test_brackets(): | |||
"""test getter/setter for the brackets attribute""" | |||
node1 = ExternalLink(wraptext("http://example.com/"), brackets=False) | |||
node2 = ExternalLink(wraptext("http://example.com/"), wraptext("Link")) | |||
assert node1.brackets is False | |||
assert node2.brackets is True | |||
node1.brackets = True | |||
node2.brackets = False | |||
assert node1.brackets is True | |||
assert node2.brackets is False | |||
assert "[http://example.com/]" == str(node1) | |||
assert "http://example.com/" == str(node2) |
@@ -18,65 +18,67 @@ | |||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||
# SOFTWARE. | |||
import unittest | |||
""" | |||
Test cases for the Heading node. | |||
""" | |||
from mwparserfromhell.nodes import Heading, Text | |||
from ._test_tree_equality import TreeEqualityTestCase, wrap, wraptext | |||
import pytest | |||
class TestHeading(TreeEqualityTestCase): | |||
"""Test cases for the Heading node.""" | |||
def test_str(self): | |||
"""test Heading.__str__()""" | |||
node = Heading(wraptext("foobar"), 2) | |||
self.assertEqual("==foobar==", str(node)) | |||
node2 = Heading(wraptext(" zzz "), 5) | |||
self.assertEqual("===== zzz =====", str(node2)) | |||
from mwparserfromhell.nodes import Heading, Text | |||
from .conftest import assert_wikicode_equal, wrap, wraptext | |||
def test_children(self): | |||
"""test Heading.__children__()""" | |||
node = Heading(wrap([Text("foo"), Text("bar")]), 3) | |||
gen = node.__children__() | |||
self.assertEqual(node.title, next(gen)) | |||
self.assertRaises(StopIteration, next, gen) | |||
def test_str(): | |||
"""test Heading.__str__()""" | |||
node = Heading(wraptext("foobar"), 2) | |||
assert "==foobar==" == str(node) | |||
node2 = Heading(wraptext(" zzz "), 5) | |||
assert "===== zzz =====" == str(node2) | |||
def test_strip(self): | |||
"""test Heading.__strip__()""" | |||
node = Heading(wraptext("foobar"), 3) | |||
self.assertEqual("foobar", node.__strip__()) | |||
def test_children(): | |||
"""test Heading.__children__()""" | |||
node = Heading(wrap([Text("foo"), Text("bar")]), 3) | |||
gen = node.__children__() | |||
assert node.title == next(gen) | |||
with pytest.raises(StopIteration): | |||
next(gen) | |||
def test_showtree(self): | |||
"""test Heading.__showtree__()""" | |||
output = [] | |||
getter = object() | |||
get = lambda code: output.append((getter, code)) | |||
node1 = Heading(wraptext("foobar"), 3) | |||
node2 = Heading(wraptext(" baz "), 4) | |||
node1.__showtree__(output.append, get, None) | |||
node2.__showtree__(output.append, get, None) | |||
valid = ["===", (getter, node1.title), "===", | |||
"====", (getter, node2.title), "===="] | |||
self.assertEqual(valid, output) | |||
def test_strip(): | |||
"""test Heading.__strip__()""" | |||
node = Heading(wraptext("foobar"), 3) | |||
assert "foobar" == node.__strip__() | |||
def test_title(self): | |||
"""test getter/setter for the title attribute""" | |||
title = wraptext("foobar") | |||
node = Heading(title, 3) | |||
self.assertIs(title, node.title) | |||
node.title = "héhehé" | |||
self.assertWikicodeEqual(wraptext("héhehé"), node.title) | |||
def test_showtree(): | |||
"""test Heading.__showtree__()""" | |||
output = [] | |||
getter = object() | |||
get = lambda code: output.append((getter, code)) | |||
node1 = Heading(wraptext("foobar"), 3) | |||
node2 = Heading(wraptext(" baz "), 4) | |||
node1.__showtree__(output.append, get, None) | |||
node2.__showtree__(output.append, get, None) | |||
valid = ["===", (getter, node1.title), "===", | |||
"====", (getter, node2.title), "===="] | |||
assert valid == output | |||
def test_level(self): | |||
"""test getter/setter for the level attribute""" | |||
node = Heading(wraptext("foobar"), 3) | |||
self.assertEqual(3, node.level) | |||
node.level = 5 | |||
self.assertEqual(5, node.level) | |||
self.assertRaises(ValueError, setattr, node, "level", 0) | |||
self.assertRaises(ValueError, setattr, node, "level", 7) | |||
self.assertRaises(ValueError, setattr, node, "level", "abc") | |||
self.assertRaises(ValueError, setattr, node, "level", False) | |||
def test_title(): | |||
"""test getter/setter for the title attribute""" | |||
title = wraptext("foobar") | |||
node = Heading(title, 3) | |||
assert title is node.title | |||
node.title = "héhehé" | |||
assert_wikicode_equal(wraptext("héhehé"), node.title) | |||
if __name__ == "__main__": | |||
unittest.main(verbosity=2) | |||
def test_level(): | |||
"""test getter/setter for the level attribute""" | |||
node = Heading(wraptext("foobar"), 3) | |||
assert 3 == node.level | |||
node.level = 5 | |||
assert 5 == node.level | |||
with pytest.raises(ValueError): | |||
node.__setattr__("level", 0) | |||
with pytest.raises(ValueError): | |||
node.__setattr__("level", 7) | |||
with pytest.raises(ValueError): | |||
node.__setattr__("level", "abc") | |||
with pytest.raises(ValueError): | |||
node.__setattr__("level", False) |
@@ -18,152 +18,165 @@ | |||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||
# SOFTWARE. | |||
import unittest | |||
""" | |||
Test cases for the HTMLEntity node. | |||
""" | |||
from mwparserfromhell.nodes import HTMLEntity | |||
from ._test_tree_equality import TreeEqualityTestCase | |||
class TestHTMLEntity(TreeEqualityTestCase): | |||
"""Test cases for the HTMLEntity node.""" | |||
def test_str(self): | |||
"""test HTMLEntity.__str__()""" | |||
node1 = HTMLEntity("nbsp", named=True, hexadecimal=False) | |||
node2 = HTMLEntity("107", named=False, hexadecimal=False) | |||
node3 = HTMLEntity("6b", named=False, hexadecimal=True) | |||
node4 = HTMLEntity("6C", named=False, hexadecimal=True, hex_char="X") | |||
self.assertEqual(" ", str(node1)) | |||
self.assertEqual("k", str(node2)) | |||
self.assertEqual("k", str(node3)) | |||
self.assertEqual("l", str(node4)) | |||
def test_children(self): | |||
"""test HTMLEntity.__children__()""" | |||
node = HTMLEntity("nbsp", named=True, hexadecimal=False) | |||
gen = node.__children__() | |||
self.assertRaises(StopIteration, next, gen) | |||
def test_strip(self): | |||
"""test HTMLEntity.__strip__()""" | |||
node1 = HTMLEntity("nbsp", named=True, hexadecimal=False) | |||
node2 = HTMLEntity("107", named=False, hexadecimal=False) | |||
node3 = HTMLEntity("e9", named=False, hexadecimal=True) | |||
self.assertEqual("\xa0", node1.__strip__(normalize=True)) | |||
self.assertEqual(" ", node1.__strip__(normalize=False)) | |||
self.assertEqual("k", node2.__strip__(normalize=True)) | |||
self.assertEqual("k", node2.__strip__(normalize=False)) | |||
self.assertEqual("é", node3.__strip__(normalize=True)) | |||
self.assertEqual("é", node3.__strip__(normalize=False)) | |||
def test_showtree(self): | |||
"""test HTMLEntity.__showtree__()""" | |||
output = [] | |||
node1 = HTMLEntity("nbsp", named=True, hexadecimal=False) | |||
node2 = HTMLEntity("107", named=False, hexadecimal=False) | |||
node3 = HTMLEntity("e9", named=False, hexadecimal=True) | |||
node1.__showtree__(output.append, None, None) | |||
node2.__showtree__(output.append, None, None) | |||
node3.__showtree__(output.append, None, None) | |||
res = [" ", "k", "é"] | |||
self.assertEqual(res, output) | |||
import pytest | |||
def test_value(self): | |||
"""test getter/setter for the value attribute""" | |||
node1 = HTMLEntity("nbsp") | |||
node2 = HTMLEntity("107") | |||
node3 = HTMLEntity("e9") | |||
self.assertEqual("nbsp", node1.value) | |||
self.assertEqual("107", node2.value) | |||
self.assertEqual("e9", node3.value) | |||
node1.value = "ffa4" | |||
node2.value = 72 | |||
node3.value = "Sigma" | |||
self.assertEqual("ffa4", node1.value) | |||
self.assertFalse(node1.named) | |||
self.assertTrue(node1.hexadecimal) | |||
self.assertEqual("72", node2.value) | |||
self.assertFalse(node2.named) | |||
self.assertFalse(node2.hexadecimal) | |||
self.assertEqual("Sigma", node3.value) | |||
self.assertTrue(node3.named) | |||
self.assertFalse(node3.hexadecimal) | |||
node1.value = "10FFFF" | |||
node2.value = 110000 | |||
node2.value = 1114111 | |||
self.assertRaises(ValueError, setattr, node3, "value", "") | |||
self.assertRaises(ValueError, setattr, node3, "value", "foobar") | |||
self.assertRaises(ValueError, setattr, node3, "value", True) | |||
self.assertRaises(ValueError, setattr, node3, "value", -1) | |||
self.assertRaises(ValueError, setattr, node1, "value", 110000) | |||
self.assertRaises(ValueError, setattr, node1, "value", "1114112") | |||
self.assertRaises(ValueError, setattr, node1, "value", "12FFFF") | |||
def test_named(self): | |||
"""test getter/setter for the named attribute""" | |||
node1 = HTMLEntity("nbsp") | |||
node2 = HTMLEntity("107") | |||
node3 = HTMLEntity("e9") | |||
self.assertTrue(node1.named) | |||
self.assertFalse(node2.named) | |||
self.assertFalse(node3.named) | |||
node1.named = 1 | |||
node2.named = 0 | |||
node3.named = 0 | |||
self.assertTrue(node1.named) | |||
self.assertFalse(node2.named) | |||
self.assertFalse(node3.named) | |||
self.assertRaises(ValueError, setattr, node1, "named", False) | |||
self.assertRaises(ValueError, setattr, node2, "named", True) | |||
self.assertRaises(ValueError, setattr, node3, "named", True) | |||
def test_hexadecimal(self): | |||
"""test getter/setter for the hexadecimal attribute""" | |||
node1 = HTMLEntity("nbsp") | |||
node2 = HTMLEntity("107") | |||
node3 = HTMLEntity("e9") | |||
self.assertFalse(node1.hexadecimal) | |||
self.assertFalse(node2.hexadecimal) | |||
self.assertTrue(node3.hexadecimal) | |||
node1.hexadecimal = False | |||
node2.hexadecimal = True | |||
node3.hexadecimal = False | |||
self.assertFalse(node1.hexadecimal) | |||
self.assertTrue(node2.hexadecimal) | |||
self.assertFalse(node3.hexadecimal) | |||
self.assertRaises(ValueError, setattr, node1, "hexadecimal", True) | |||
def test_hex_char(self): | |||
"""test getter/setter for the hex_char attribute""" | |||
node1 = HTMLEntity("e9") | |||
node2 = HTMLEntity("e9", hex_char="X") | |||
self.assertEqual("x", node1.hex_char) | |||
self.assertEqual("X", node2.hex_char) | |||
node1.hex_char = "X" | |||
node2.hex_char = "x" | |||
self.assertEqual("X", node1.hex_char) | |||
self.assertEqual("x", node2.hex_char) | |||
self.assertRaises(ValueError, setattr, node1, "hex_char", 123) | |||
self.assertRaises(ValueError, setattr, node1, "hex_char", "foobar") | |||
self.assertRaises(ValueError, setattr, node1, "hex_char", True) | |||
def test_normalize(self): | |||
"""test getter/setter for the normalize attribute""" | |||
node1 = HTMLEntity("nbsp") | |||
node2 = HTMLEntity("107") | |||
node3 = HTMLEntity("e9") | |||
node4 = HTMLEntity("1f648") | |||
node5 = HTMLEntity("-2") | |||
node6 = HTMLEntity("110000", named=False, hexadecimal=True) | |||
self.assertEqual("\xa0", node1.normalize()) | |||
self.assertEqual("k", node2.normalize()) | |||
self.assertEqual("é", node3.normalize()) | |||
self.assertEqual("\U0001F648", node4.normalize()) | |||
self.assertRaises(ValueError, node5.normalize) | |||
self.assertRaises(ValueError, node6.normalize) | |||
from mwparserfromhell.nodes import HTMLEntity | |||
if __name__ == "__main__": | |||
unittest.main(verbosity=2) | |||
def test_str(): | |||
"""test HTMLEntity.__str__()""" | |||
node1 = HTMLEntity("nbsp", named=True, hexadecimal=False) | |||
node2 = HTMLEntity("107", named=False, hexadecimal=False) | |||
node3 = HTMLEntity("6b", named=False, hexadecimal=True) | |||
node4 = HTMLEntity("6C", named=False, hexadecimal=True, hex_char="X") | |||
assert " " == str(node1) | |||
assert "k" == str(node2) | |||
assert "k" == str(node3) | |||
assert "l" == str(node4) | |||
def test_children(): | |||
"""test HTMLEntity.__children__()""" | |||
node = HTMLEntity("nbsp", named=True, hexadecimal=False) | |||
gen = node.__children__() | |||
with pytest.raises(StopIteration): | |||
next(gen) | |||
def test_strip(): | |||
"""test HTMLEntity.__strip__()""" | |||
node1 = HTMLEntity("nbsp", named=True, hexadecimal=False) | |||
node2 = HTMLEntity("107", named=False, hexadecimal=False) | |||
node3 = HTMLEntity("e9", named=False, hexadecimal=True) | |||
assert "\xa0" == node1.__strip__(normalize=True) | |||
assert " " == node1.__strip__(normalize=False) | |||
assert "k" == node2.__strip__(normalize=True) | |||
assert "k" == node2.__strip__(normalize=False) | |||
assert "é" == node3.__strip__(normalize=True) | |||
assert "é" == node3.__strip__(normalize=False) | |||
def test_showtree(): | |||
"""test HTMLEntity.__showtree__()""" | |||
output = [] | |||
node1 = HTMLEntity("nbsp", named=True, hexadecimal=False) | |||
node2 = HTMLEntity("107", named=False, hexadecimal=False) | |||
node3 = HTMLEntity("e9", named=False, hexadecimal=True) | |||
node1.__showtree__(output.append, None, None) | |||
node2.__showtree__(output.append, None, None) | |||
node3.__showtree__(output.append, None, None) | |||
res = [" ", "k", "é"] | |||
assert res == output | |||
def test_value(): | |||
"""test getter/setter for the value attribute""" | |||
node1 = HTMLEntity("nbsp") | |||
node2 = HTMLEntity("107") | |||
node3 = HTMLEntity("e9") | |||
assert "nbsp" == node1.value | |||
assert "107" == node2.value | |||
assert "e9" == node3.value | |||
node1.value = "ffa4" | |||
node2.value = 72 | |||
node3.value = "Sigma" | |||
assert "ffa4" == node1.value | |||
assert node1.named is False | |||
assert node1.hexadecimal is True | |||
assert "72" == node2.value | |||
assert node2.named is False | |||
assert node2.hexadecimal is False | |||
assert "Sigma" == node3.value | |||
assert node3.named is True | |||
assert node3.hexadecimal is False | |||
node1.value = "10FFFF" | |||
node2.value = 110000 | |||
node2.value = 1114111 | |||
with pytest.raises(ValueError): | |||
node3.__setattr__("value", "") | |||
with pytest.raises(ValueError): | |||
node3.__setattr__("value", "foobar") | |||
with pytest.raises(ValueError): | |||
node3.__setattr__("value", True) | |||
with pytest.raises(ValueError): | |||
node3.__setattr__("value", -1) | |||
with pytest.raises(ValueError): | |||
node1.__setattr__("value", 110000) | |||
with pytest.raises(ValueError): | |||
node1.__setattr__("value", "1114112") | |||
with pytest.raises(ValueError): | |||
node1.__setattr__("value", "12FFFF") | |||
def test_named(): | |||
"""test getter/setter for the named attribute""" | |||
node1 = HTMLEntity("nbsp") | |||
node2 = HTMLEntity("107") | |||
node3 = HTMLEntity("e9") | |||
assert node1.named is True | |||
assert node2.named is False | |||
assert node3.named is False | |||
node1.named = 1 | |||
node2.named = 0 | |||
node3.named = 0 | |||
assert node1.named is True | |||
assert node2.named is False | |||
assert node3.named is False | |||
with pytest.raises(ValueError): | |||
node1.__setattr__("named", False) | |||
with pytest.raises(ValueError): | |||
node2.__setattr__("named", True) | |||
with pytest.raises(ValueError): | |||
node3.__setattr__("named", True) | |||
def test_hexadecimal(): | |||
"""test getter/setter for the hexadecimal attribute""" | |||
node1 = HTMLEntity("nbsp") | |||
node2 = HTMLEntity("107") | |||
node3 = HTMLEntity("e9") | |||
assert node1.hexadecimal is False | |||
assert node2.hexadecimal is False | |||
assert node3.hexadecimal is True | |||
node1.hexadecimal = False | |||
node2.hexadecimal = True | |||
node3.hexadecimal = False | |||
assert node1.hexadecimal is False | |||
assert node2.hexadecimal is True | |||
assert node3.hexadecimal is False | |||
with pytest.raises(ValueError): | |||
node1.__setattr__("hexadecimal", True) | |||
def test_hex_char(): | |||
"""test getter/setter for the hex_char attribute""" | |||
node1 = HTMLEntity("e9") | |||
node2 = HTMLEntity("e9", hex_char="X") | |||
assert "x" == node1.hex_char | |||
assert "X" == node2.hex_char | |||
node1.hex_char = "X" | |||
node2.hex_char = "x" | |||
assert "X" == node1.hex_char | |||
assert "x" == node2.hex_char | |||
with pytest.raises(ValueError): | |||
node1.__setattr__("hex_char", 123) | |||
with pytest.raises(ValueError): | |||
node1.__setattr__("hex_char", "foobar") | |||
with pytest.raises(ValueError): | |||
node1.__setattr__("hex_char", True) | |||
def test_normalize(): | |||
"""test getter/setter for the normalize attribute""" | |||
node1 = HTMLEntity("nbsp") | |||
node2 = HTMLEntity("107") | |||
node3 = HTMLEntity("e9") | |||
node4 = HTMLEntity("1f648") | |||
node5 = HTMLEntity("-2") | |||
node6 = HTMLEntity("110000", named=False, hexadecimal=True) | |||
assert "\xa0" == node1.normalize() | |||
assert "k" == node2.normalize() | |||
assert "é" == node3.normalize() | |||
assert "\U0001F648" == node4.normalize() | |||
with pytest.raises(ValueError): | |||
node5.normalize() | |||
with pytest.raises(ValueError): | |||
node6.normalize() |
@@ -18,54 +18,52 @@ | |||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||
# SOFTWARE. | |||
import unittest | |||
""" | |||
Test cases for the Parameter node extra. | |||
""" | |||
from mwparserfromhell.nodes.extras import Parameter | |||
from ._test_tree_equality import TreeEqualityTestCase, wraptext | |||
import pytest | |||
class TestParameter(TreeEqualityTestCase): | |||
"""Test cases for the Parameter node extra.""" | |||
def test_str(self): | |||
"""test Parameter.__str__()""" | |||
node = Parameter(wraptext("1"), wraptext("foo"), showkey=False) | |||
self.assertEqual("foo", str(node)) | |||
node2 = Parameter(wraptext("foo"), wraptext("bar")) | |||
self.assertEqual("foo=bar", str(node2)) | |||
from mwparserfromhell.nodes.extras import Parameter | |||
from .conftest import assert_wikicode_equal, wraptext | |||
def test_name(self): | |||
"""test getter/setter for the name attribute""" | |||
name1 = wraptext("1") | |||
name2 = wraptext("foobar") | |||
node1 = Parameter(name1, wraptext("foobar"), showkey=False) | |||
node2 = Parameter(name2, wraptext("baz")) | |||
self.assertIs(name1, node1.name) | |||
self.assertIs(name2, node2.name) | |||
node1.name = "héhehé" | |||
node2.name = "héhehé" | |||
self.assertWikicodeEqual(wraptext("héhehé"), node1.name) | |||
self.assertWikicodeEqual(wraptext("héhehé"), node2.name) | |||
def test_str(): | |||
"""test Parameter.__str__()""" | |||
node = Parameter(wraptext("1"), wraptext("foo"), showkey=False) | |||
assert "foo" == str(node) | |||
node2 = Parameter(wraptext("foo"), wraptext("bar")) | |||
assert "foo=bar" == str(node2) | |||
def test_value(self): | |||
"""test getter/setter for the value attribute""" | |||
value = wraptext("bar") | |||
node = Parameter(wraptext("foo"), value) | |||
self.assertIs(value, node.value) | |||
node.value = "héhehé" | |||
self.assertWikicodeEqual(wraptext("héhehé"), node.value) | |||
def test_name(): | |||
"""test getter/setter for the name attribute""" | |||
name1 = wraptext("1") | |||
name2 = wraptext("foobar") | |||
node1 = Parameter(name1, wraptext("foobar"), showkey=False) | |||
node2 = Parameter(name2, wraptext("baz")) | |||
assert name1 is node1.name | |||
assert name2 is node2.name | |||
node1.name = "héhehé" | |||
node2.name = "héhehé" | |||
assert_wikicode_equal(wraptext("héhehé"), node1.name) | |||
assert_wikicode_equal(wraptext("héhehé"), node2.name) | |||
def test_showkey(self): | |||
"""test getter/setter for the showkey attribute""" | |||
node1 = Parameter(wraptext("1"), wraptext("foo"), showkey=False) | |||
node2 = Parameter(wraptext("foo"), wraptext("bar")) | |||
self.assertFalse(node1.showkey) | |||
self.assertTrue(node2.showkey) | |||
node1.showkey = True | |||
self.assertTrue(node1.showkey) | |||
node1.showkey = "" | |||
self.assertFalse(node1.showkey) | |||
self.assertRaises(ValueError, setattr, node2, "showkey", False) | |||
def test_value(): | |||
"""test getter/setter for the value attribute""" | |||
value = wraptext("bar") | |||
node = Parameter(wraptext("foo"), value) | |||
assert value is node.value | |||
node.value = "héhehé" | |||
assert_wikicode_equal(wraptext("héhehé"), node.value) | |||
if __name__ == "__main__": | |||
unittest.main(verbosity=2) | |||
def test_showkey(): | |||
"""test getter/setter for the showkey attribute""" | |||
node1 = Parameter(wraptext("1"), wraptext("foo"), showkey=False) | |||
node2 = Parameter(wraptext("foo"), wraptext("bar")) | |||
assert node1.showkey is False | |||
assert node2.showkey is True | |||
node1.showkey = True | |||
assert node1.showkey is True | |||
node1.showkey = "" | |||
assert node1.showkey is False | |||
with pytest.raises(ValueError): | |||
node2.__setattr__("showkey", False) |
@@ -18,68 +18,60 @@ | |||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||
# SOFTWARE. | |||
import unittest | |||
""" | |||
Tests for the Parser class itself, which tokenizes and builds nodes. | |||
""" | |||
import pytest | |||
from mwparserfromhell import parser | |||
from mwparserfromhell.nodes import Tag, Template, Text, Wikilink | |||
from mwparserfromhell.nodes.extras import Parameter | |||
from .conftest import assert_wikicode_equal, wrap, wraptext | |||
from ._test_tree_equality import TreeEqualityTestCase, wrap, wraptext | |||
class TestParser(TreeEqualityTestCase): | |||
"""Tests for the Parser class itself, which tokenizes and builds nodes.""" | |||
@pytest.fixture() | |||
def pyparser(): | |||
"""make sure the correct tokenizer is used""" | |||
restore = parser.use_c | |||
if parser.use_c: | |||
parser.use_c = False | |||
yield | |||
parser.use_c = restore | |||
def test_use_c(self): | |||
"""make sure the correct tokenizer is used""" | |||
restore = parser.use_c | |||
if parser.use_c: | |||
self.assertTrue(parser.Parser()._tokenizer.USES_C) | |||
parser.use_c = False | |||
self.assertFalse(parser.Parser()._tokenizer.USES_C) | |||
parser.use_c = restore | |||
def test_use_c(pyparser): | |||
assert parser.Parser()._tokenizer.USES_C is False | |||
def test_parsing(self): | |||
"""integration test for parsing overall""" | |||
text = "this is text; {{this|is=a|template={{with|[[links]]|in}}it}}" | |||
expected = wrap([ | |||
Text("this is text; "), | |||
Template(wraptext("this"), [ | |||
Parameter(wraptext("is"), wraptext("a")), | |||
Parameter(wraptext("template"), wrap([ | |||
Template(wraptext("with"), [ | |||
Parameter(wraptext("1"), | |||
wrap([Wikilink(wraptext("links"))]), | |||
showkey=False), | |||
Parameter(wraptext("2"), | |||
wraptext("in"), showkey=False) | |||
]), | |||
Text("it") | |||
])) | |||
]) | |||
def test_parsing(pyparser): | |||
"""integration test for parsing overall""" | |||
text = "this is text; {{this|is=a|template={{with|[[links]]|in}}it}}" | |||
expected = wrap([ | |||
Text("this is text; "), | |||
Template(wraptext("this"), [ | |||
Parameter(wraptext("is"), wraptext("a")), | |||
Parameter(wraptext("template"), wrap([ | |||
Template(wraptext("with"), [ | |||
Parameter(wraptext("1"), | |||
wrap([Wikilink(wraptext("links"))]), | |||
showkey=False), | |||
Parameter(wraptext("2"), | |||
wraptext("in"), showkey=False) | |||
]), | |||
Text("it") | |||
])) | |||
]) | |||
actual = parser.Parser().parse(text) | |||
self.assertWikicodeEqual(expected, actual) | |||
def test_skip_style_tags(self): | |||
"""test Parser.parse(skip_style_tags=True)""" | |||
def test(): | |||
with_style = parser.Parser().parse(text, skip_style_tags=False) | |||
without_style = parser.Parser().parse(text, skip_style_tags=True) | |||
self.assertWikicodeEqual(a, with_style) | |||
self.assertWikicodeEqual(b, without_style) | |||
text = "This is an example with ''italics''!" | |||
a = wrap([Text("This is an example with "), | |||
Tag(wraptext("i"), wraptext("italics"), wiki_markup="''"), | |||
Text("!")]) | |||
b = wraptext("This is an example with ''italics''!") | |||
]) | |||
actual = parser.Parser().parse(text) | |||
assert_wikicode_equal(expected, actual) | |||
restore = parser.use_c | |||
if parser.use_c: | |||
test() | |||
parser.use_c = False | |||
test() | |||
parser.use_c = restore | |||
def test_skip_style_tags(pyparser): | |||
"""test Parser.parse(skip_style_tags=True)""" | |||
text = "This is an example with ''italics''!" | |||
a = wrap([Text("This is an example with "), | |||
Tag(wraptext("i"), wraptext("italics"), wiki_markup="''"), | |||
Text("!")]) | |||
b = wraptext("This is an example with ''italics''!") | |||
if __name__ == "__main__": | |||
unittest.main(verbosity=2) | |||
with_style = parser.Parser().parse(text, skip_style_tags=False) | |||
without_style = parser.Parser().parse(text, skip_style_tags=True) | |||
assert_wikicode_equal(a, with_style) | |||
assert_wikicode_equal(b, without_style) |
@@ -1,47 +0,0 @@ | |||
# Copyright (C) 2012-2019 Ben Kurtovic <ben.kurtovic@gmail.com> | |||
# | |||
# Permission is hereby granted, free of charge, to any person obtaining a copy | |||
# of this software and associated documentation files (the "Software"), to deal | |||
# in the Software without restriction, including without limitation the rights | |||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | |||
# copies of the Software, and to permit persons to whom the Software is | |||
# furnished to do so, subject to the following conditions: | |||
# | |||
# The above copyright notice and this permission notice shall be included in | |||
# all copies or substantial portions of the Software. | |||
# | |||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | |||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | |||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | |||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||
# SOFTWARE. | |||
import unittest | |||
from mwparserfromhell.parser import contexts | |||
from mwparserfromhell.parser.tokenizer import Tokenizer | |||
from ._test_tokenizer import TokenizerTestCase | |||
class TestPyTokenizer(TokenizerTestCase, unittest.TestCase): | |||
"""Test cases for the Python tokenizer.""" | |||
@classmethod | |||
def setUpClass(cls): | |||
cls.tokenizer = Tokenizer | |||
if not TokenizerTestCase.skip_others: | |||
def test_uses_c(self): | |||
"""make sure the Python tokenizer identifies as not using C""" | |||
self.assertFalse(Tokenizer.USES_C) | |||
self.assertFalse(Tokenizer().USES_C) | |||
def test_describe_context(self): | |||
self.assertEqual("", contexts.describe(0)) | |||
ctx = contexts.describe(contexts.TEMPLATE_PARAM_KEY|contexts.HAS_TEXT) | |||
self.assertEqual("TEMPLATE_PARAM_KEY|HAS_TEXT", ctx) | |||
if __name__ == "__main__": | |||
unittest.main(verbosity=2) |
@@ -1,34 +0,0 @@ | |||
# Copyright (C) 2012-2016 Ben Kurtovic <ben.kurtovic@gmail.com> | |||
# | |||
# Permission is hereby granted, free of charge, to any person obtaining a copy | |||
# of this software and associated documentation files (the "Software"), to deal | |||
# in the Software without restriction, including without limitation the rights | |||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | |||
# copies of the Software, and to permit persons to whom the Software is | |||
# furnished to do so, subject to the following conditions: | |||
# | |||
# The above copyright notice and this permission notice shall be included in | |||
# all copies or substantial portions of the Software. | |||
# | |||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | |||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | |||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | |||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||
# SOFTWARE. | |||
import unittest | |||
from ._test_tokenizer import TokenizerTestCase | |||
class TestRoundtripping(TokenizerTestCase, unittest.TestCase): | |||
"""Test cases for roundtripping tokens back to wikitext.""" | |||
@classmethod | |||
def setUpClass(cls): | |||
cls.roundtrip = True | |||
if __name__ == "__main__": | |||
unittest.main(verbosity=2) |
@@ -18,392 +18,392 @@ | |||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||
# SOFTWARE. | |||
import unittest | |||
""" | |||
Test cases for the SmartList class and its child, ListProxy. | |||
""" | |||
import pytest | |||
from mwparserfromhell.smart_list import SmartList | |||
from mwparserfromhell.smart_list.list_proxy import ListProxy | |||
class TestSmartList(unittest.TestCase): | |||
"""Test cases for the SmartList class and its child, ListProxy.""" | |||
def _test_get_set_del_item(self, builder): | |||
"""Run tests on __get/set/delitem__ of a list built with *builder*.""" | |||
def assign(L, s1, s2, s3, val): | |||
L[s1:s2:s3] = val | |||
def delete(L, s1): | |||
del L[s1] | |||
list1 = builder([0, 1, 2, 3, "one", "two"]) | |||
list2 = builder(list(range(10))) | |||
self.assertEqual(1, list1[1]) | |||
self.assertEqual("one", list1[-2]) | |||
self.assertEqual([2, 3], list1[2:4]) | |||
self.assertRaises(IndexError, lambda: list1[6]) | |||
self.assertRaises(IndexError, lambda: list1[-7]) | |||
self.assertEqual([0, 1, 2], list1[:3]) | |||
self.assertEqual([0, 1, 2, 3, "one", "two"], list1[:]) | |||
self.assertEqual([3, "one", "two"], list1[3:]) | |||
self.assertEqual([3, "one", "two"], list1[3:100]) | |||
self.assertEqual(["one", "two"], list1[-2:]) | |||
self.assertEqual([0, 1], list1[:-4]) | |||
self.assertEqual([], list1[6:]) | |||
self.assertEqual([], list1[4:2]) | |||
self.assertEqual([0, 2, "one"], list1[0:5:2]) | |||
self.assertEqual([0, 2], list1[0:-3:2]) | |||
self.assertEqual([0, 1, 2, 3, "one", "two"], list1[::]) | |||
self.assertEqual([2, 3, "one", "two"], list1[2::]) | |||
self.assertEqual([0, 1, 2, 3], list1[:4:]) | |||
self.assertEqual([2, 3], list1[2:4:]) | |||
self.assertEqual([0, 2, 4, 6, 8], list2[::2]) | |||
self.assertEqual([2, 5, 8], list2[2::3]) | |||
self.assertEqual([0, 3], list2[:6:3]) | |||
self.assertEqual([2, 5, 8], list2[-8:9:3]) | |||
self.assertEqual([], list2[100000:1000:-100]) | |||
list1[3] = 100 | |||
self.assertEqual(100, list1[3]) | |||
list1[-3] = 101 | |||
self.assertEqual([0, 1, 2, 101, "one", "two"], list1) | |||
list1[5:] = [6, 7, 8] | |||
self.assertEqual([6, 7, 8], list1[5:]) | |||
self.assertEqual([0, 1, 2, 101, "one", 6, 7, 8], list1) | |||
list1[2:4] = [-1, -2, -3, -4, -5] | |||
self.assertEqual([0, 1, -1, -2, -3, -4, -5, "one", 6, 7, 8], list1) | |||
list1[0:-3] = [99] | |||
self.assertEqual([99, 6, 7, 8], list1) | |||
list2[0:6:2] = [100, 102, 104] | |||
self.assertEqual([100, 1, 102, 3, 104, 5, 6, 7, 8, 9], list2) | |||
list2[::3] = [200, 203, 206, 209] | |||
self.assertEqual([200, 1, 102, 203, 104, 5, 206, 7, 8, 209], list2) | |||
list2[::] = range(7) | |||
self.assertEqual([0, 1, 2, 3, 4, 5, 6], list2) | |||
self.assertRaises(ValueError, assign, list2, 0, 5, 2, | |||
[100, 102, 104, 106]) | |||
with self.assertRaises(IndexError): | |||
list2[7] = "foo" | |||
with self.assertRaises(IndexError): | |||
list2[-8] = "foo" | |||
del list2[2] | |||
self.assertEqual([0, 1, 3, 4, 5, 6], list2) | |||
del list2[-3] | |||
self.assertEqual([0, 1, 3, 5, 6], list2) | |||
self.assertRaises(IndexError, delete, list2, 100) | |||
self.assertRaises(IndexError, delete, list2, -6) | |||
list2[:] = range(10) | |||
del list2[3:6] | |||
self.assertEqual([0, 1, 2, 6, 7, 8, 9], list2) | |||
del list2[-2:] | |||
self.assertEqual([0, 1, 2, 6, 7], list2) | |||
del list2[:2] | |||
self.assertEqual([2, 6, 7], list2) | |||
list2[:] = range(10) | |||
del list2[2:8:2] | |||
self.assertEqual([0, 1, 3, 5, 7, 8, 9], list2) | |||
def _test_add_radd_iadd(self, builder): | |||
"""Run tests on __r/i/add__ of a list built with *builder*.""" | |||
list1 = builder(range(5)) | |||
list2 = builder(range(5, 10)) | |||
self.assertEqual([0, 1, 2, 3, 4, 5, 6], list1 + [5, 6]) | |||
self.assertEqual([0, 1, 2, 3, 4], list1) | |||
self.assertEqual(list(range(10)), list1 + list2) | |||
self.assertEqual([-2, -1, 0, 1, 2, 3, 4], [-2, -1] + list1) | |||
self.assertEqual([0, 1, 2, 3, 4], list1) | |||
list1 += ["foo", "bar", "baz"] | |||
self.assertEqual([0, 1, 2, 3, 4, "foo", "bar", "baz"], list1) | |||
def _test_other_magic_methods(self, builder): | |||
"""Run tests on other magic methods of a list built with *builder*.""" | |||
list1 = builder([0, 1, 2, 3, "one", "two"]) | |||
list2 = builder([]) | |||
list3 = builder([0, 2, 3, 4]) | |||
list4 = builder([0, 1, 2]) | |||
self.assertEqual("[0, 1, 2, 3, 'one', 'two']", str(list1)) | |||
self.assertEqual(b"\x00\x01\x02", bytes(list4)) | |||
self.assertEqual("[0, 1, 2, 3, 'one', 'two']", repr(list1)) | |||
self.assertLess(list1, list3) | |||
self.assertLessEqual(list1, list3) | |||
self.assertNotEqual(list1, list3) | |||
self.assertNotEqual(list1, list3) | |||
self.assertLessEqual(list1, list3) | |||
self.assertLess(list1, list3) | |||
other1 = [0, 2, 3, 4] | |||
self.assertLess(list1, other1) | |||
self.assertLessEqual(list1, other1) | |||
self.assertNotEqual(list1, other1) | |||
self.assertNotEqual(list1, other1) | |||
self.assertLessEqual(list1, other1) | |||
self.assertLess(list1, other1) | |||
other2 = [0, 0, 1, 2] | |||
self.assertGreaterEqual(list1, other2) | |||
self.assertGreater(list1, other2) | |||
self.assertNotEqual(list1, other2) | |||
self.assertNotEqual(list1, other2) | |||
self.assertGreater(list1, other2) | |||
self.assertGreaterEqual(list1, other2) | |||
other3 = [0, 1, 2, 3, "one", "two"] | |||
self.assertGreaterEqual(list1, other3) | |||
self.assertLessEqual(list1, other3) | |||
self.assertEqual(list1, other3) | |||
self.assertEqual(list1, other3) | |||
self.assertLessEqual(list1, other3) | |||
self.assertGreaterEqual(list1, other3) | |||
self.assertTrue(bool(list1)) | |||
self.assertFalse(bool(list2)) | |||
self.assertEqual(6, len(list1)) | |||
self.assertEqual(0, len(list2)) | |||
out = [] | |||
for obj in list1: | |||
out.append(obj) | |||
self.assertEqual([0, 1, 2, 3, "one", "two"], out) | |||
out = [] | |||
for ch in list2: | |||
out.append(ch) | |||
self.assertEqual([], out) | |||
gen1 = iter(list1) | |||
out = [] | |||
for _ in range(len(list1)): | |||
out.append(next(gen1)) | |||
self.assertRaises(StopIteration, next, gen1) | |||
self.assertEqual([0, 1, 2, 3, "one", "two"], out) | |||
gen2 = iter(list2) | |||
self.assertRaises(StopIteration, next, gen2) | |||
self.assertEqual(["two", "one", 3, 2, 1, 0], list(reversed(list1))) | |||
self.assertEqual([], list(reversed(list2))) | |||
self.assertIn("one", list1) | |||
self.assertIn(3, list1) | |||
self.assertNotIn(10, list1) | |||
self.assertNotIn(0, list2) | |||
self.assertEqual([], list2 * 5) | |||
self.assertEqual([], 5 * list2) | |||
self.assertEqual([0, 1, 2, 0, 1, 2, 0, 1, 2], list4 * 3) | |||
self.assertEqual([0, 1, 2, 0, 1, 2, 0, 1, 2], 3 * list4) | |||
list4 *= 2 | |||
self.assertEqual([0, 1, 2, 0, 1, 2], list4) | |||
def _test_list_methods(self, builder): | |||
"""Run tests on the public methods of a list built with *builder*.""" | |||
list1 = builder(range(5)) | |||
list2 = builder(["foo"]) | |||
list3 = builder([("a", 5), ("d", 2), ("b", 8), ("c", 3)]) | |||
list1.append(5) | |||
list1.append(1) | |||
list1.append(2) | |||
self.assertEqual([0, 1, 2, 3, 4, 5, 1, 2], list1) | |||
self.assertEqual(0, list1.count(6)) | |||
self.assertEqual(2, list1.count(1)) | |||
list1.extend(range(5, 8)) | |||
self.assertEqual([0, 1, 2, 3, 4, 5, 1, 2, 5, 6, 7], list1) | |||
self.assertEqual(1, list1.index(1)) | |||
self.assertEqual(6, list1.index(1, 3)) | |||
self.assertEqual(6, list1.index(1, 3, 7)) | |||
self.assertRaises(ValueError, list1.index, 1, 3, 5) | |||
list1.insert(0, -1) | |||
self.assertEqual([-1, 0, 1, 2, 3, 4, 5, 1, 2, 5, 6, 7], list1) | |||
list1.insert(-1, 6.5) | |||
self.assertEqual([-1, 0, 1, 2, 3, 4, 5, 1, 2, 5, 6, 6.5, 7], list1) | |||
list1.insert(13, 8) | |||
self.assertEqual([-1, 0, 1, 2, 3, 4, 5, 1, 2, 5, 6, 6.5, 7, 8], list1) | |||
self.assertEqual(8, list1.pop()) | |||
self.assertEqual(7, list1.pop()) | |||
self.assertEqual([-1, 0, 1, 2, 3, 4, 5, 1, 2, 5, 6, 6.5], list1) | |||
self.assertEqual(-1, list1.pop(0)) | |||
self.assertEqual(5, list1.pop(5)) | |||
self.assertEqual(6.5, list1.pop(-1)) | |||
self.assertEqual([0, 1, 2, 3, 4, 1, 2, 5, 6], list1) | |||
self.assertEqual("foo", list2.pop()) | |||
self.assertRaises(IndexError, list2.pop) | |||
self.assertEqual([], list2) | |||
list1.remove(6) | |||
self.assertEqual([0, 1, 2, 3, 4, 1, 2, 5], list1) | |||
def _test_get_set_del_item(builder): | |||
"""Run tests on __get/set/delitem__ of a list built with *builder*.""" | |||
list1 = builder([0, 1, 2, 3, "one", "two"]) | |||
list2 = builder(list(range(10))) | |||
assert 1 == list1[1] | |||
assert "one" == list1[-2] | |||
assert [2, 3] == list1[2:4] | |||
with pytest.raises(IndexError): | |||
list1[6] | |||
with pytest.raises(IndexError): | |||
list1[-7] | |||
assert [0, 1, 2] == list1[:3] | |||
assert [0, 1, 2, 3, "one", "two"] == list1[:] | |||
assert [3, "one", "two"] == list1[3:] | |||
assert [3, "one", "two"] == list1[3:100] | |||
assert ["one", "two"] == list1[-2:] | |||
assert [0, 1] == list1[:-4] | |||
assert [] == list1[6:] | |||
assert [] == list1[4:2] | |||
assert [0, 2, "one"] == list1[0:5:2] | |||
assert [0, 2] == list1[0:-3:2] | |||
assert [0, 1, 2, 3, "one", "two"] == list1[::] | |||
assert [2, 3, "one", "two"] == list1[2::] | |||
assert [0, 1, 2, 3] == list1[:4:] | |||
assert [2, 3] == list1[2:4:] | |||
assert [0, 2, 4, 6, 8] == list2[::2] | |||
assert [2, 5, 8] == list2[2::3] | |||
assert [0, 3] == list2[:6:3] | |||
assert [2, 5, 8] == list2[-8:9:3] | |||
assert [] == list2[100000:1000:-100] | |||
list1[3] = 100 | |||
assert 100 == list1[3] | |||
list1[-3] = 101 | |||
assert [0, 1, 2, 101, "one", "two"] == list1 | |||
list1[5:] = [6, 7, 8] | |||
assert [6, 7, 8] == list1[5:] | |||
assert [0, 1, 2, 101, "one", 6, 7, 8] == list1 | |||
list1[2:4] = [-1, -2, -3, -4, -5] | |||
assert [0, 1, -1, -2, -3, -4, -5, "one", 6, 7, 8] == list1 | |||
list1[0:-3] = [99] | |||
assert [99, 6, 7, 8] == list1 | |||
list2[0:6:2] = [100, 102, 104] | |||
assert [100, 1, 102, 3, 104, 5, 6, 7, 8, 9] == list2 | |||
list2[::3] = [200, 203, 206, 209] | |||
assert [200, 1, 102, 203, 104, 5, 206, 7, 8, 209] == list2 | |||
list2[::] = range(7) | |||
assert [0, 1, 2, 3, 4, 5, 6] == list2 | |||
with pytest.raises(ValueError): | |||
list2[0:5:2] = [100, 102, 104, 106] | |||
with pytest.raises(IndexError): | |||
list2[7] = "foo" | |||
with pytest.raises(IndexError): | |||
list2[-8] = "foo" | |||
del list2[2] | |||
assert [0, 1, 3, 4, 5, 6] == list2 | |||
del list2[-3] | |||
assert [0, 1, 3, 5, 6] == list2 | |||
with pytest.raises(IndexError): | |||
del list2[100] | |||
with pytest.raises(IndexError): | |||
del list2[-6] | |||
list2[:] = range(10) | |||
del list2[3:6] | |||
assert [0, 1, 2, 6, 7, 8, 9] == list2 | |||
del list2[-2:] | |||
assert [0, 1, 2, 6, 7] == list2 | |||
del list2[:2] | |||
assert [2, 6, 7] == list2 | |||
list2[:] = range(10) | |||
del list2[2:8:2] | |||
assert [0, 1, 3, 5, 7, 8, 9] == list2 | |||
def _test_add_radd_iadd(builder): | |||
"""Run tests on __r/i/add__ of a list built with *builder*.""" | |||
list1 = builder(range(5)) | |||
list2 = builder(range(5, 10)) | |||
assert [0, 1, 2, 3, 4, 5, 6] == list1 + [5, 6] | |||
assert [0, 1, 2, 3, 4] == list1 | |||
assert list(range(10)) == list1 + list2 | |||
assert [-2, -1, 0, 1, 2, 3, 4], [-2, -1] + list1 | |||
assert [0, 1, 2, 3, 4] == list1 | |||
list1 += ["foo", "bar", "baz"] | |||
assert [0, 1, 2, 3, 4, "foo", "bar", "baz"] == list1 | |||
def _test_other_magic_methods(builder): | |||
"""Run tests on other magic methods of a list built with *builder*.""" | |||
list1 = builder([0, 1, 2, 3, "one", "two"]) | |||
list2 = builder([]) | |||
list3 = builder([0, 2, 3, 4]) | |||
list4 = builder([0, 1, 2]) | |||
assert "[0, 1, 2, 3, 'one', 'two']" == str(list1) | |||
assert b"\x00\x01\x02" == bytes(list4) | |||
assert "[0, 1, 2, 3, 'one', 'two']" == repr(list1) | |||
assert list1 < list3 | |||
assert list1 <= list3 | |||
assert list1 != list3 | |||
assert list1 != list3 | |||
assert list1 <= list3 | |||
assert list1 < list3 | |||
other1 = [0, 2, 3, 4] | |||
assert list1 < other1 | |||
assert list1 <= other1 | |||
assert list1 != other1 | |||
assert list1 != other1 | |||
assert list1 <= other1 | |||
assert list1 < other1 | |||
other2 = [0, 0, 1, 2] | |||
assert list1 >= other2 | |||
assert list1 > other2 | |||
assert list1 != other2 | |||
assert list1 != other2 | |||
assert list1 > other2 | |||
assert list1 >= other2 | |||
other3 = [0, 1, 2, 3, "one", "two"] | |||
assert list1 >= other3 | |||
assert list1 <= other3 | |||
assert list1 == other3 | |||
assert list1 == other3 | |||
assert list1 <= other3 | |||
assert list1 >= other3 | |||
assert bool(list1) is True | |||
assert bool(list2) is False | |||
assert 6 == len(list1) | |||
assert 0 == len(list2) | |||
out = [] | |||
for obj in list1: | |||
out.append(obj) | |||
assert [0, 1, 2, 3, "one", "two"] == out | |||
out = [] | |||
for ch in list2: | |||
out.append(ch) | |||
assert [] == out | |||
gen1 = iter(list1) | |||
out = [] | |||
for _ in range(len(list1)): | |||
out.append(next(gen1)) | |||
with pytest.raises(StopIteration): | |||
next(gen1) | |||
assert [0, 1, 2, 3, "one", "two"] == out | |||
gen2 = iter(list2) | |||
with pytest.raises(StopIteration): | |||
next(gen2) | |||
assert ["two", "one", 3, 2, 1, 0] == list(reversed(list1)) | |||
assert [] == list(reversed(list2)) | |||
assert "one" in list1 | |||
assert 3 in list1 | |||
assert 10 not in list1 | |||
assert 0 not in list2 | |||
assert [] == list2 * 5 | |||
assert [] == 5 * list2 | |||
assert [0, 1, 2, 0, 1, 2, 0, 1, 2] == list4 * 3 | |||
assert [0, 1, 2, 0, 1, 2, 0, 1, 2] == 3 * list4 | |||
list4 *= 2 | |||
assert [0, 1, 2, 0, 1, 2] == list4 | |||
def _test_list_methods(builder): | |||
"""Run tests on the public methods of a list built with *builder*.""" | |||
list1 = builder(range(5)) | |||
list2 = builder(["foo"]) | |||
list3 = builder([("a", 5), ("d", 2), ("b", 8), ("c", 3)]) | |||
list1.append(5) | |||
list1.append(1) | |||
list1.append(2) | |||
assert [0, 1, 2, 3, 4, 5, 1, 2] == list1 | |||
assert 0 == list1.count(6) | |||
assert 2 == list1.count(1) | |||
list1.extend(range(5, 8)) | |||
assert [0, 1, 2, 3, 4, 5, 1, 2, 5, 6, 7] == list1 | |||
assert 1 == list1.index(1) | |||
assert 6 == list1.index(1, 3) | |||
assert 6 == list1.index(1, 3, 7) | |||
with pytest.raises(ValueError): | |||
list1.index(1, 3, 5) | |||
list1.insert(0, -1) | |||
assert [-1, 0, 1, 2, 3, 4, 5, 1, 2, 5, 6, 7] == list1 | |||
list1.insert(-1, 6.5) | |||
assert [-1, 0, 1, 2, 3, 4, 5, 1, 2, 5, 6, 6.5, 7] == list1 | |||
list1.insert(13, 8) | |||
assert [-1, 0, 1, 2, 3, 4, 5, 1, 2, 5, 6, 6.5, 7, 8] == list1 | |||
assert 8 == list1.pop() | |||
assert 7 == list1.pop() | |||
assert [-1, 0, 1, 2, 3, 4, 5, 1, 2, 5, 6, 6.5] == list1 | |||
assert -1 == list1.pop(0) | |||
assert 5 == list1.pop(5) | |||
assert 6.5 == list1.pop(-1) | |||
assert [0, 1, 2, 3, 4, 1, 2, 5, 6] == list1 | |||
assert "foo" == list2.pop() | |||
with pytest.raises(IndexError): | |||
list2.pop() | |||
assert [] == list2 | |||
list1.remove(6) | |||
assert [0, 1, 2, 3, 4, 1, 2, 5] == list1 | |||
list1.remove(1) | |||
assert [0, 2, 3, 4, 1, 2, 5] == list1 | |||
list1.remove(1) | |||
assert [0, 2, 3, 4, 2, 5] == list1 | |||
with pytest.raises(ValueError): | |||
list1.remove(1) | |||
self.assertEqual([0, 2, 3, 4, 1, 2, 5], list1) | |||
list1.remove(1) | |||
self.assertEqual([0, 2, 3, 4, 2, 5], list1) | |||
self.assertRaises(ValueError, list1.remove, 1) | |||
list1.reverse() | |||
self.assertEqual([5, 2, 4, 3, 2, 0], list1) | |||
list1.sort() | |||
self.assertEqual([0, 2, 2, 3, 4, 5], list1) | |||
list1.sort(reverse=True) | |||
self.assertEqual([5, 4, 3, 2, 2, 0], list1) | |||
list3.sort(key=lambda i: i[1]) | |||
self.assertEqual([("d", 2), ("c", 3), ("a", 5), ("b", 8)], list3) | |||
list3.sort(key=lambda i: i[1], reverse=True) | |||
self.assertEqual([("b", 8), ("a", 5), ("c", 3), ("d", 2)], list3) | |||
@staticmethod | |||
def _dispatch_test_for_children(meth): | |||
"""Run a test method on various different types of children.""" | |||
meth(lambda L: SmartList(list(L))[:]) | |||
meth(lambda L: SmartList([999] + list(L))[1:]) | |||
meth(lambda L: SmartList(list(L) + [999])[:-1]) | |||
meth(lambda L: SmartList([101, 102] + list(L) + [201, 202])[2:-2]) | |||
def test_docs(self): | |||
"""make sure the methods of SmartList/ListProxy have docstrings""" | |||
methods = ["append", "count", "extend", "index", "insert", "pop", | |||
"remove", "reverse", "sort"] | |||
for meth in methods: | |||
expected = getattr(list, meth).__doc__ | |||
smartlist_doc = getattr(SmartList, meth).__doc__ | |||
listproxy_doc = getattr(ListProxy, meth).__doc__ | |||
self.assertEqual(expected, smartlist_doc) | |||
self.assertEqual(expected, listproxy_doc) | |||
def test_doctest(self): | |||
"""make sure the test embedded in SmartList's docstring passes""" | |||
parent = SmartList([0, 1, 2, 3]) | |||
self.assertEqual([0, 1, 2, 3], parent) | |||
child = parent[2:] | |||
self.assertEqual([2, 3], child) | |||
child.append(4) | |||
self.assertEqual([2, 3, 4], child) | |||
self.assertEqual([0, 1, 2, 3, 4], parent) | |||
def test_parent_get_set_del(self): | |||
"""make sure SmartList's getitem/setitem/delitem work""" | |||
self._test_get_set_del_item(SmartList) | |||
def test_parent_add(self): | |||
"""make sure SmartList's add/radd/iadd work""" | |||
self._test_add_radd_iadd(SmartList) | |||
def test_parent_other_magics(self): | |||
"""make sure SmartList's other magically implemented features work""" | |||
self._test_other_magic_methods(SmartList) | |||
def test_parent_methods(self): | |||
"""make sure SmartList's non-magic methods work, like append()""" | |||
self._test_list_methods(SmartList) | |||
def test_child_get_set_del(self): | |||
"""make sure ListProxy's getitem/setitem/delitem work""" | |||
self._dispatch_test_for_children(self._test_get_set_del_item) | |||
def test_child_add(self): | |||
"""make sure ListProxy's add/radd/iadd work""" | |||
self._dispatch_test_for_children(self._test_add_radd_iadd) | |||
def test_child_other_magics(self): | |||
"""make sure ListProxy's other magically implemented features work""" | |||
self._dispatch_test_for_children(self._test_other_magic_methods) | |||
def test_child_methods(self): | |||
"""make sure ListProxy's non-magic methods work, like append()""" | |||
self._dispatch_test_for_children(self._test_list_methods) | |||
def test_influence(self): | |||
"""make sure changes are propagated from parents to children""" | |||
parent = SmartList([0, 1, 2, 3, 4, 5]) | |||
child1 = parent[2:] | |||
child2 = parent[2:5] | |||
self.assertEqual([0, 1, 2, 3, 4, 5], parent) | |||
self.assertEqual([2, 3, 4, 5], child1) | |||
self.assertEqual([2, 3, 4], child2) | |||
self.assertEqual(2, len(parent._children)) | |||
parent.append(6) | |||
child1.append(7) | |||
child2.append(4.5) | |||
self.assertEqual([0, 1, 2, 3, 4, 4.5, 5, 6, 7], parent) | |||
self.assertEqual([2, 3, 4, 4.5, 5, 6, 7], child1) | |||
self.assertEqual([2, 3, 4, 4.5], child2) | |||
parent.insert(0, -1) | |||
parent.insert(4, 2.5) | |||
parent.insert(10, 6.5) | |||
self.assertEqual([-1, 0, 1, 2, 2.5, 3, 4, 4.5, 5, 6, 6.5, 7], parent) | |||
self.assertEqual([2, 2.5, 3, 4, 4.5, 5, 6, 6.5, 7], child1) | |||
self.assertEqual([2, 2.5, 3, 4, 4.5], child2) | |||
self.assertEqual(7, parent.pop()) | |||
self.assertEqual(6.5, child1.pop()) | |||
self.assertEqual(4.5, child2.pop()) | |||
self.assertEqual([-1, 0, 1, 2, 2.5, 3, 4, 5, 6], parent) | |||
self.assertEqual([2, 2.5, 3, 4, 5, 6], child1) | |||
self.assertEqual([2, 2.5, 3, 4], child2) | |||
parent.remove(-1) | |||
child1.remove(2.5) | |||
self.assertEqual([0, 1, 2, 3, 4, 5, 6], parent) | |||
self.assertEqual([2, 3, 4, 5, 6], child1) | |||
self.assertEqual([2, 3, 4], child2) | |||
self.assertEqual(0, parent.pop(0)) | |||
self.assertEqual([1, 2, 3, 4, 5, 6], parent) | |||
self.assertEqual([2, 3, 4, 5, 6], child1) | |||
self.assertEqual([2, 3, 4], child2) | |||
child2.reverse() | |||
self.assertEqual([1, 4, 3, 2, 5, 6], parent) | |||
self.assertEqual([4, 3, 2, 5, 6], child1) | |||
self.assertEqual([4, 3, 2], child2) | |||
parent.extend([7, 8]) | |||
child1.extend([8.1, 8.2]) | |||
child2.extend([1.9, 1.8]) | |||
self.assertEqual([1, 4, 3, 2, 1.9, 1.8, 5, 6, 7, 8, 8.1, 8.2], parent) | |||
self.assertEqual([4, 3, 2, 1.9, 1.8, 5, 6, 7, 8, 8.1, 8.2], child1) | |||
self.assertEqual([4, 3, 2, 1.9, 1.8], child2) | |||
child3 = parent[9:] | |||
self.assertEqual([8, 8.1, 8.2], child3) | |||
del parent[8:] | |||
self.assertEqual([1, 4, 3, 2, 1.9, 1.8, 5, 6], parent) | |||
self.assertEqual([4, 3, 2, 1.9, 1.8, 5, 6], child1) | |||
self.assertEqual([4, 3, 2, 1.9, 1.8], child2) | |||
self.assertEqual([], child3) | |||
self.assertEqual(0, len(child3)) | |||
del child1 | |||
self.assertEqual([1, 4, 3, 2, 1.9, 1.8, 5, 6], parent) | |||
self.assertEqual([4, 3, 2, 1.9, 1.8], child2) | |||
self.assertEqual([], child3) | |||
self.assertEqual(2, len(parent._children)) | |||
del child3 | |||
self.assertEqual([1, 4, 3, 2, 1.9, 1.8, 5, 6], parent) | |||
self.assertEqual([4, 3, 2, 1.9, 1.8], child2) | |||
self.assertEqual(1, len(parent._children)) | |||
parent.remove(1.9) | |||
parent.remove(1.8) | |||
self.assertEqual([1, 4, 3, 2, 5, 6], parent) | |||
self.assertEqual([4, 3, 2], child2) | |||
parent.reverse() | |||
self.assertEqual([6, 5, 2, 3, 4, 1], parent) | |||
self.assertEqual([4, 3, 2], child2) | |||
self.assertEqual(0, len(parent._children)) | |||
if __name__ == "__main__": | |||
unittest.main(verbosity=2) | |||
list1.reverse() | |||
assert [5, 2, 4, 3, 2, 0] == list1 | |||
list1.sort() | |||
assert [0, 2, 2, 3, 4, 5] == list1 | |||
list1.sort(reverse=True) | |||
assert [5, 4, 3, 2, 2, 0] == list1 | |||
list3.sort(key=lambda i: i[1]) | |||
assert [("d", 2), ("c", 3), ("a", 5), ("b", 8)] == list3 | |||
list3.sort(key=lambda i: i[1], reverse=True) | |||
assert [("b", 8), ("a", 5), ("c", 3), ("d", 2)] == list3 | |||
def _dispatch_test_for_children(meth): | |||
"""Run a test method on various different types of children.""" | |||
meth(lambda L: SmartList(list(L))[:]) | |||
meth(lambda L: SmartList([999] + list(L))[1:]) | |||
meth(lambda L: SmartList(list(L) + [999])[:-1]) | |||
meth(lambda L: SmartList([101, 102] + list(L) + [201, 202])[2:-2]) | |||
def test_docs(): | |||
"""make sure the methods of SmartList/ListProxy have docstrings""" | |||
methods = ["append", "count", "extend", "index", "insert", "pop", | |||
"remove", "reverse", "sort"] | |||
for meth in methods: | |||
expected = getattr(list, meth).__doc__ | |||
smartlist_doc = getattr(SmartList, meth).__doc__ | |||
listproxy_doc = getattr(ListProxy, meth).__doc__ | |||
assert expected == smartlist_doc | |||
assert expected == listproxy_doc | |||
def test_doctest(): | |||
"""make sure the test embedded in SmartList's docstring passes""" | |||
parent = SmartList([0, 1, 2, 3]) | |||
assert [0, 1, 2, 3] == parent | |||
child = parent[2:] | |||
assert [2, 3] == child | |||
child.append(4) | |||
assert [2, 3, 4] == child | |||
assert [0, 1, 2, 3, 4] == parent | |||
def test_parent_get_set_del(): | |||
"""make sure SmartList's getitem/setitem/delitem work""" | |||
_test_get_set_del_item(SmartList) | |||
def test_parent_add(): | |||
"""make sure SmartList's add/radd/iadd work""" | |||
_test_add_radd_iadd(SmartList) | |||
def test_parent_other_magics(): | |||
"""make sure SmartList's other magically implemented features work""" | |||
_test_other_magic_methods(SmartList) | |||
def test_parent_methods(): | |||
"""make sure SmartList's non-magic methods work, like append()""" | |||
_test_list_methods(SmartList) | |||
def test_child_get_set_del(): | |||
"""make sure ListProxy's getitem/setitem/delitem work""" | |||
_dispatch_test_for_children(_test_get_set_del_item) | |||
def test_child_add(): | |||
"""make sure ListProxy's add/radd/iadd work""" | |||
_dispatch_test_for_children(_test_add_radd_iadd) | |||
def test_child_other_magics(): | |||
"""make sure ListProxy's other magically implemented features work""" | |||
_dispatch_test_for_children(_test_other_magic_methods) | |||
def test_child_methods(): | |||
"""make sure ListProxy's non-magic methods work, like append()""" | |||
_dispatch_test_for_children(_test_list_methods) | |||
def test_influence(): | |||
"""make sure changes are propagated from parents to children""" | |||
parent = SmartList([0, 1, 2, 3, 4, 5]) | |||
child1 = parent[2:] | |||
child2 = parent[2:5] | |||
assert [0, 1, 2, 3, 4, 5] == parent | |||
assert [2, 3, 4, 5] == child1 | |||
assert [2, 3, 4] == child2 | |||
assert 2 == len(parent._children) | |||
parent.append(6) | |||
child1.append(7) | |||
child2.append(4.5) | |||
assert [0, 1, 2, 3, 4, 4.5, 5, 6, 7] == parent | |||
assert [2, 3, 4, 4.5, 5, 6, 7] == child1 | |||
assert [2, 3, 4, 4.5] == child2 | |||
parent.insert(0, -1) | |||
parent.insert(4, 2.5) | |||
parent.insert(10, 6.5) | |||
assert [-1, 0, 1, 2, 2.5, 3, 4, 4.5, 5, 6, 6.5, 7] == parent | |||
assert [2, 2.5, 3, 4, 4.5, 5, 6, 6.5, 7] == child1 | |||
assert [2, 2.5, 3, 4, 4.5] == child2 | |||
assert 7 == parent.pop() | |||
assert 6.5 == child1.pop() | |||
assert 4.5 == child2.pop() | |||
assert [-1, 0, 1, 2, 2.5, 3, 4, 5, 6] == parent | |||
assert [2, 2.5, 3, 4, 5, 6] == child1 | |||
assert [2, 2.5, 3, 4] == child2 | |||
parent.remove(-1) | |||
child1.remove(2.5) | |||
assert [0, 1, 2, 3, 4, 5, 6] == parent | |||
assert [2, 3, 4, 5, 6] == child1 | |||
assert [2, 3, 4] == child2 | |||
assert 0 == parent.pop(0) | |||
assert [1, 2, 3, 4, 5, 6] == parent | |||
assert [2, 3, 4, 5, 6] == child1 | |||
assert [2, 3, 4] == child2 | |||
child2.reverse() | |||
assert [1, 4, 3, 2, 5, 6] == parent | |||
assert [4, 3, 2, 5, 6] == child1 | |||
assert [4, 3, 2] == child2 | |||
parent.extend([7, 8]) | |||
child1.extend([8.1, 8.2]) | |||
child2.extend([1.9, 1.8]) | |||
assert [1, 4, 3, 2, 1.9, 1.8, 5, 6, 7, 8, 8.1, 8.2] == parent | |||
assert [4, 3, 2, 1.9, 1.8, 5, 6, 7, 8, 8.1, 8.2] == child1 | |||
assert [4, 3, 2, 1.9, 1.8] == child2 | |||
child3 = parent[9:] | |||
assert [8, 8.1, 8.2] == child3 | |||
del parent[8:] | |||
assert [1, 4, 3, 2, 1.9, 1.8, 5, 6] == parent | |||
assert [4, 3, 2, 1.9, 1.8, 5, 6] == child1 | |||
assert [4, 3, 2, 1.9, 1.8] == child2 | |||
assert [] == child3 | |||
assert 0 == len(child3) | |||
del child1 | |||
assert [1, 4, 3, 2, 1.9, 1.8, 5, 6] == parent | |||
assert [4, 3, 2, 1.9, 1.8] == child2 | |||
assert [] == child3 | |||
assert 2 == len(parent._children) | |||
del child3 | |||
assert [1, 4, 3, 2, 1.9, 1.8, 5, 6] == parent | |||
assert [4, 3, 2, 1.9, 1.8] == child2 | |||
assert 1 == len(parent._children) | |||
parent.remove(1.9) | |||
parent.remove(1.8) | |||
assert [1, 4, 3, 2, 5, 6] == parent | |||
assert [4, 3, 2] == child2 | |||
parent.reverse() | |||
assert [6, 5, 2, 3, 4, 1] == parent | |||
assert [4, 3, 2] == child2 | |||
assert 0 == len(parent._children) |
@@ -18,9 +18,14 @@ | |||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||
# SOFTWARE. | |||
from sys import getdefaultencoding | |||
""" | |||
Test cases for the StringMixIn class. | |||
""" | |||
import sys | |||
from types import GeneratorType | |||
import unittest | |||
import pytest | |||
from mwparserfromhell.string_mixin import StringMixIn | |||
@@ -31,373 +36,378 @@ class _FakeString(StringMixIn): | |||
def __str__(self): | |||
return self._data | |||
class TestStringMixIn(unittest.TestCase): | |||
"""Test cases for the StringMixIn class.""" | |||
def test_docs(self): | |||
"""make sure the various methods of StringMixIn have docstrings""" | |||
methods = [ | |||
"capitalize", "casefold", "center", "count", "encode", "endswith", | |||
"expandtabs", "find", "format", "format_map", "index", "isalnum", | |||
"isalpha", "isdecimal", "isdigit", "isidentifier", "islower", | |||
"isnumeric", "isprintable", "isspace", "istitle", "isupper", | |||
"join", "ljust", "lower", "lstrip", "maketrans", "partition", | |||
"replace", "rfind", "rindex", "rjust", "rpartition", "rsplit", | |||
"rstrip", "split", "splitlines", "startswith", "strip", "swapcase", | |||
"title", "translate", "upper", "zfill" | |||
] | |||
for meth in methods: | |||
expected = getattr("foo", meth).__doc__ | |||
actual = getattr(_FakeString("foo"), meth).__doc__ | |||
self.assertEqual(expected, actual) | |||
def test_types(self): | |||
"""make sure StringMixIns convert to different types correctly""" | |||
fstr = _FakeString("fake string") | |||
self.assertEqual(str(fstr), "fake string") | |||
self.assertEqual(bytes(fstr), b"fake string") | |||
self.assertEqual(repr(fstr), "'fake string'") | |||
self.assertIsInstance(str(fstr), str) | |||
self.assertIsInstance(bytes(fstr), bytes) | |||
self.assertIsInstance(repr(fstr), str) | |||
def test_comparisons(self): | |||
"""make sure comparison operators work""" | |||
str1 = _FakeString("this is a fake string") | |||
str2 = _FakeString("this is a fake string") | |||
str3 = _FakeString("fake string, this is") | |||
str4 = "this is a fake string" | |||
str5 = "fake string, this is" | |||
self.assertLessEqual(str1, str2) | |||
self.assertGreaterEqual(str1, str2) | |||
self.assertEqual(str1, str2) | |||
self.assertEqual(str1, str2) | |||
self.assertGreaterEqual(str1, str2) | |||
self.assertLessEqual(str1, str2) | |||
self.assertGreater(str1, str3) | |||
self.assertGreaterEqual(str1, str3) | |||
self.assertNotEqual(str1, str3) | |||
self.assertNotEqual(str1, str3) | |||
self.assertGreaterEqual(str1, str3) | |||
self.assertGreater(str1, str3) | |||
self.assertLessEqual(str1, str4) | |||
self.assertGreaterEqual(str1, str4) | |||
self.assertEqual(str1, str4) | |||
self.assertEqual(str1, str4) | |||
self.assertGreaterEqual(str1, str4) | |||
self.assertLessEqual(str1, str4) | |||
self.assertLessEqual(str5, str1) | |||
self.assertLess(str5, str1) | |||
self.assertNotEqual(str5, str1) | |||
self.assertNotEqual(str5, str1) | |||
self.assertLess(str5, str1) | |||
self.assertLessEqual(str5, str1) | |||
def test_other_magics(self): | |||
"""test other magically implemented features, like len() and iter()""" | |||
str1 = _FakeString("fake string") | |||
str2 = _FakeString("") | |||
expected = ["f", "a", "k", "e", " ", "s", "t", "r", "i", "n", "g"] | |||
self.assertTrue(str1) | |||
self.assertFalse(str2) | |||
self.assertEqual(11, len(str1)) | |||
self.assertEqual(0, len(str2)) | |||
out = [] | |||
for ch in str1: | |||
out.append(ch) | |||
self.assertEqual(expected, out) | |||
out = [] | |||
for ch in str2: | |||
out.append(ch) | |||
self.assertEqual([], out) | |||
gen1 = iter(str1) | |||
gen2 = iter(str2) | |||
self.assertIsInstance(gen1, GeneratorType) | |||
self.assertIsInstance(gen2, GeneratorType) | |||
out = [] | |||
for _ in range(len(str1)): | |||
out.append(next(gen1)) | |||
self.assertRaises(StopIteration, next, gen1) | |||
self.assertEqual(expected, out) | |||
self.assertRaises(StopIteration, next, gen2) | |||
self.assertEqual("gnirts ekaf", "".join(list(reversed(str1)))) | |||
self.assertEqual([], list(reversed(str2))) | |||
self.assertEqual("f", str1[0]) | |||
self.assertEqual(" ", str1[4]) | |||
self.assertEqual("g", str1[10]) | |||
self.assertEqual("n", str1[-2]) | |||
self.assertRaises(IndexError, lambda: str1[11]) | |||
self.assertRaises(IndexError, lambda: str2[0]) | |||
self.assertIn("k", str1) | |||
self.assertIn("fake", str1) | |||
self.assertIn("str", str1) | |||
self.assertIn("", str1) | |||
self.assertIn("", str2) | |||
self.assertNotIn("real", str1) | |||
self.assertNotIn("s", str2) | |||
def test_other_methods(self): | |||
"""test the remaining non-magic methods of StringMixIn""" | |||
str1 = _FakeString("fake string") | |||
self.assertEqual("Fake string", str1.capitalize()) | |||
self.assertEqual(" fake string ", str1.center(15)) | |||
self.assertEqual(" fake string ", str1.center(16)) | |||
self.assertEqual("qqfake stringqq", str1.center(15, "q")) | |||
self.assertEqual(1, str1.count("e")) | |||
self.assertEqual(0, str1.count("z")) | |||
self.assertEqual(1, str1.count("r", 7)) | |||
self.assertEqual(0, str1.count("r", 8)) | |||
self.assertEqual(1, str1.count("r", 5, 9)) | |||
self.assertEqual(0, str1.count("r", 5, 7)) | |||
str3 = _FakeString("𐌲𐌿𐍄") | |||
actual = b"\xF0\x90\x8C\xB2\xF0\x90\x8C\xBF\xF0\x90\x8D\x84" | |||
self.assertEqual(b"fake string", str1.encode()) | |||
self.assertEqual(actual, str3.encode("utf-8")) | |||
self.assertEqual(actual, str3.encode(encoding="utf-8")) | |||
if getdefaultencoding() == "ascii": | |||
self.assertRaises(UnicodeEncodeError, str3.encode) | |||
elif getdefaultencoding() == "utf-8": | |||
self.assertEqual(actual, str3.encode()) | |||
self.assertRaises(UnicodeEncodeError, str3.encode, "ascii") | |||
self.assertRaises(UnicodeEncodeError, str3.encode, "ascii", "strict") | |||
if getdefaultencoding() == "ascii": | |||
self.assertRaises(UnicodeEncodeError, str3.encode, errors="strict") | |||
elif getdefaultencoding() == "utf-8": | |||
self.assertEqual(actual, str3.encode(errors="strict")) | |||
self.assertEqual(b"", str3.encode("ascii", "ignore")) | |||
if getdefaultencoding() == "ascii": | |||
self.assertEqual(b"", str3.encode(errors="ignore")) | |||
elif getdefaultencoding() == "utf-8": | |||
self.assertEqual(actual, str3.encode(errors="ignore")) | |||
self.assertTrue(str1.endswith("ing")) | |||
self.assertFalse(str1.endswith("ingh")) | |||
str4 = _FakeString("\tfoobar") | |||
self.assertEqual("fake string", str1) | |||
self.assertEqual(" foobar", str4.expandtabs()) | |||
self.assertEqual(" foobar", str4.expandtabs(4)) | |||
self.assertEqual(3, str1.find("e")) | |||
self.assertEqual(-1, str1.find("z")) | |||
self.assertEqual(7, str1.find("r", 7)) | |||
self.assertEqual(-1, str1.find("r", 8)) | |||
self.assertEqual(7, str1.find("r", 5, 9)) | |||
self.assertEqual(-1, str1.find("r", 5, 7)) | |||
str5 = _FakeString("foo{0}baz") | |||
str6 = _FakeString("foo{abc}baz") | |||
str7 = _FakeString("foo{0}{abc}buzz") | |||
str8 = _FakeString("{0}{1}") | |||
self.assertEqual("fake string", str1.format()) | |||
self.assertEqual("foobarbaz", str5.format("bar")) | |||
self.assertEqual("foobarbaz", str6.format(abc="bar")) | |||
self.assertEqual("foobarbazbuzz", str7.format("bar", abc="baz")) | |||
self.assertRaises(IndexError, str8.format, "abc") | |||
self.assertEqual("fake string", str1.format_map({})) | |||
self.assertEqual("foobarbaz", str6.format_map({"abc": "bar"})) | |||
self.assertRaises(ValueError, str5.format_map, {0: "abc"}) | |||
self.assertEqual(3, str1.index("e")) | |||
self.assertRaises(ValueError, str1.index, "z") | |||
self.assertEqual(7, str1.index("r", 7)) | |||
self.assertRaises(ValueError, str1.index, "r", 8) | |||
self.assertEqual(7, str1.index("r", 5, 9)) | |||
self.assertRaises(ValueError, str1.index, "r", 5, 7) | |||
str9 = _FakeString("foobar") | |||
str10 = _FakeString("foobar123") | |||
str11 = _FakeString("foo bar") | |||
self.assertTrue(str9.isalnum()) | |||
self.assertTrue(str10.isalnum()) | |||
self.assertFalse(str11.isalnum()) | |||
self.assertTrue(str9.isalpha()) | |||
self.assertFalse(str10.isalpha()) | |||
self.assertFalse(str11.isalpha()) | |||
str12 = _FakeString("123") | |||
str13 = _FakeString("\u2155") | |||
str14 = _FakeString("\u00B2") | |||
self.assertFalse(str9.isdecimal()) | |||
self.assertTrue(str12.isdecimal()) | |||
self.assertFalse(str13.isdecimal()) | |||
self.assertFalse(str14.isdecimal()) | |||
self.assertFalse(str9.isdigit()) | |||
self.assertTrue(str12.isdigit()) | |||
self.assertFalse(str13.isdigit()) | |||
self.assertTrue(str14.isdigit()) | |||
self.assertTrue(str9.isidentifier()) | |||
self.assertTrue(str10.isidentifier()) | |||
self.assertFalse(str11.isidentifier()) | |||
self.assertFalse(str12.isidentifier()) | |||
str15 = _FakeString("") | |||
str16 = _FakeString("FooBar") | |||
self.assertTrue(str9.islower()) | |||
self.assertFalse(str15.islower()) | |||
self.assertFalse(str16.islower()) | |||
self.assertFalse(str9.isnumeric()) | |||
self.assertTrue(str12.isnumeric()) | |||
self.assertTrue(str13.isnumeric()) | |||
self.assertTrue(str14.isnumeric()) | |||
str16B = _FakeString("\x01\x02") | |||
self.assertTrue(str9.isprintable()) | |||
self.assertTrue(str13.isprintable()) | |||
self.assertTrue(str14.isprintable()) | |||
self.assertTrue(str15.isprintable()) | |||
self.assertFalse(str16B.isprintable()) | |||
str17 = _FakeString(" ") | |||
str18 = _FakeString("\t \t \r\n") | |||
self.assertFalse(str1.isspace()) | |||
self.assertFalse(str9.isspace()) | |||
self.assertTrue(str17.isspace()) | |||
self.assertTrue(str18.isspace()) | |||
str19 = _FakeString("This Sentence Looks Like A Title") | |||
str20 = _FakeString("This sentence doesn't LookLikeATitle") | |||
self.assertFalse(str15.istitle()) | |||
self.assertTrue(str19.istitle()) | |||
self.assertFalse(str20.istitle()) | |||
str21 = _FakeString("FOOBAR") | |||
self.assertFalse(str9.isupper()) | |||
self.assertFalse(str15.isupper()) | |||
self.assertTrue(str21.isupper()) | |||
self.assertEqual("foobar", str15.join(["foo", "bar"])) | |||
self.assertEqual("foo123bar123baz", str12.join(("foo", "bar", "baz"))) | |||
self.assertEqual("fake string ", str1.ljust(15)) | |||
self.assertEqual("fake string ", str1.ljust(16)) | |||
self.assertEqual("fake stringqqqq", str1.ljust(15, "q")) | |||
str22 = _FakeString("ß") | |||
self.assertEqual("", str15.lower()) | |||
self.assertEqual("foobar", str16.lower()) | |||
self.assertEqual("ß", str22.lower()) | |||
self.assertEqual("", str15.casefold()) | |||
self.assertEqual("foobar", str16.casefold()) | |||
self.assertEqual("ss", str22.casefold()) | |||
str23 = _FakeString(" fake string ") | |||
self.assertEqual("fake string", str1.lstrip()) | |||
self.assertEqual("fake string ", str23.lstrip()) | |||
self.assertEqual("ke string", str1.lstrip("abcdef")) | |||
self.assertEqual(("fa", "ke", " string"), str1.partition("ke")) | |||
self.assertEqual(("fake string", "", ""), str1.partition("asdf")) | |||
str24 = _FakeString("boo foo moo") | |||
self.assertEqual("real string", str1.replace("fake", "real")) | |||
self.assertEqual("bu fu moo", str24.replace("oo", "u", 2)) | |||
self.assertEqual(3, str1.rfind("e")) | |||
self.assertEqual(-1, str1.rfind("z")) | |||
self.assertEqual(7, str1.rfind("r", 7)) | |||
self.assertEqual(-1, str1.rfind("r", 8)) | |||
self.assertEqual(7, str1.rfind("r", 5, 9)) | |||
self.assertEqual(-1, str1.rfind("r", 5, 7)) | |||
self.assertEqual(3, str1.rindex("e")) | |||
self.assertRaises(ValueError, str1.rindex, "z") | |||
self.assertEqual(7, str1.rindex("r", 7)) | |||
self.assertRaises(ValueError, str1.rindex, "r", 8) | |||
self.assertEqual(7, str1.rindex("r", 5, 9)) | |||
self.assertRaises(ValueError, str1.rindex, "r", 5, 7) | |||
self.assertEqual(" fake string", str1.rjust(15)) | |||
self.assertEqual(" fake string", str1.rjust(16)) | |||
self.assertEqual("qqqqfake string", str1.rjust(15, "q")) | |||
self.assertEqual(("fa", "ke", " string"), str1.rpartition("ke")) | |||
self.assertEqual(("", "", "fake string"), str1.rpartition("asdf")) | |||
str25 = _FakeString(" this is a sentence with whitespace ") | |||
actual = ["this", "is", "a", "sentence", "with", "whitespace"] | |||
self.assertEqual(actual, str25.rsplit()) | |||
self.assertEqual(actual, str25.rsplit(None)) | |||
actual = ["", "", "", "this", "is", "a", "", "", "sentence", "with", | |||
"", "whitespace", ""] | |||
self.assertEqual(actual, str25.rsplit(" ")) | |||
actual = [" this is a", "sentence", "with", "whitespace"] | |||
self.assertEqual(actual, str25.rsplit(None, 3)) | |||
actual = [" this is a sentence with", "", "whitespace", ""] | |||
self.assertEqual(actual, str25.rsplit(" ", 3)) | |||
actual = [" this is a", "sentence", "with", "whitespace"] | |||
self.assertEqual(actual, str25.rsplit(maxsplit=3)) | |||
self.assertEqual("fake string", str1.rstrip()) | |||
self.assertEqual(" fake string", str23.rstrip()) | |||
self.assertEqual("fake stri", str1.rstrip("ngr")) | |||
actual = ["this", "is", "a", "sentence", "with", "whitespace"] | |||
self.assertEqual(actual, str25.split()) | |||
self.assertEqual(actual, str25.split(None)) | |||
actual = ["", "", "", "this", "is", "a", "", "", "sentence", "with", | |||
"", "whitespace", ""] | |||
self.assertEqual(actual, str25.split(" ")) | |||
actual = ["this", "is", "a", "sentence with whitespace "] | |||
self.assertEqual(actual, str25.split(None, 3)) | |||
actual = ["", "", "", "this is a sentence with whitespace "] | |||
self.assertEqual(actual, str25.split(" ", 3)) | |||
actual = ["this", "is", "a", "sentence with whitespace "] | |||
self.assertEqual(actual, str25.split(maxsplit=3)) | |||
str26 = _FakeString("lines\nof\ntext\r\nare\r\npresented\nhere") | |||
self.assertEqual(["lines", "of", "text", "are", "presented", "here"], | |||
str26.splitlines()) | |||
self.assertEqual(["lines\n", "of\n", "text\r\n", "are\r\n", | |||
"presented\n", "here"], str26.splitlines(True)) | |||
self.assertTrue(str1.startswith("fake")) | |||
self.assertFalse(str1.startswith("faker")) | |||
self.assertEqual("fake string", str1.strip()) | |||
self.assertEqual("fake string", str23.strip()) | |||
self.assertEqual("ke stri", str1.strip("abcdefngr")) | |||
self.assertEqual("fOObAR", str16.swapcase()) | |||
self.assertEqual("Fake String", str1.title()) | |||
table1 = StringMixIn.maketrans({97: "1", 101: "2", 105: "3", | |||
111: "4", 117: "5"}) | |||
table2 = StringMixIn.maketrans("aeiou", "12345") | |||
table3 = StringMixIn.maketrans("aeiou", "12345", "rts") | |||
self.assertEqual("f1k2 str3ng", str1.translate(table1)) | |||
self.assertEqual("f1k2 str3ng", str1.translate(table2)) | |||
self.assertEqual("f1k2 3ng", str1.translate(table3)) | |||
self.assertEqual("", str15.upper()) | |||
self.assertEqual("FOOBAR", str16.upper()) | |||
self.assertEqual("123", str12.zfill(3)) | |||
self.assertEqual("000123", str12.zfill(6)) | |||
if __name__ == "__main__": | |||
unittest.main(verbosity=2) | |||
@pytest.mark.parametrize('method', [ | |||
"capitalize", "casefold", "center", "count", "encode", "endswith", | |||
"expandtabs", "find", "format", "format_map", "index", "isalnum", | |||
"isalpha", "isdecimal", "isdigit", "isidentifier", "islower", | |||
"isnumeric", "isprintable", "isspace", "istitle", "isupper", | |||
"join", "ljust", "lower", "lstrip", "maketrans", "partition", | |||
"replace", "rfind", "rindex", "rjust", "rpartition", "rsplit", | |||
"rstrip", "split", "splitlines", "startswith", "strip", "swapcase", | |||
"title", "translate", "upper", "zfill" | |||
]) | |||
def test_docs(method): | |||
"""make sure the various methods of StringMixIn have docstrings""" | |||
expected = getattr("foo", method).__doc__ | |||
actual = getattr(_FakeString("foo"), method).__doc__ | |||
assert expected == actual | |||
def test_types(): | |||
"""make sure StringMixIns convert to different types correctly""" | |||
fstr = _FakeString("fake string") | |||
assert str(fstr) == "fake string" | |||
assert bytes(fstr) == b"fake string" | |||
assert repr(fstr) == "'fake string'" | |||
assert isinstance(str(fstr), str) | |||
assert isinstance(bytes(fstr), bytes) | |||
assert isinstance(repr(fstr), str) | |||
def test_comparisons(): | |||
"""make sure comparison operators work""" | |||
str1 = _FakeString("this is a fake string") | |||
str2 = _FakeString("this is a fake string") | |||
str3 = _FakeString("fake string, this is") | |||
str4 = "this is a fake string" | |||
str5 = "fake string, this is" | |||
assert str1 <= str2 | |||
assert str1 >= str2 | |||
assert str1 == str2 | |||
assert str1 == str2 | |||
assert str1 >= str2 | |||
assert str1 <= str2 | |||
assert str1 > str3 | |||
assert str1 >= str3 | |||
assert str1 != str3 | |||
assert str1 != str3 | |||
assert str1 >= str3 | |||
assert str1 > str3 | |||
assert str1 <= str4 | |||
assert str1 >= str4 | |||
assert str1 == str4 | |||
assert str1 == str4 | |||
assert str1 >= str4 | |||
assert str1 <= str4 | |||
assert str5 <= str1 | |||
assert str5 < str1 | |||
assert str5 != str1 | |||
assert str5 != str1 | |||
assert str5 < str1 | |||
assert str5 <= str1 | |||
def test_other_magics(): | |||
"""test other magically implemented features, like len() and iter()""" | |||
str1 = _FakeString("fake string") | |||
str2 = _FakeString("") | |||
expected = ["f", "a", "k", "e", " ", "s", "t", "r", "i", "n", "g"] | |||
assert bool(str1) is True | |||
assert bool(str2) is False | |||
assert 11 == len(str1) | |||
assert 0 == len(str2) | |||
out = [] | |||
for ch in str1: | |||
out.append(ch) | |||
assert expected == out | |||
out = [] | |||
for ch in str2: | |||
out.append(ch) | |||
assert [] == out | |||
gen1 = iter(str1) | |||
gen2 = iter(str2) | |||
assert isinstance(gen1, GeneratorType) | |||
assert isinstance(gen2, GeneratorType) | |||
out = [] | |||
for _ in range(len(str1)): | |||
out.append(next(gen1)) | |||
with pytest.raises(StopIteration): | |||
next(gen1) | |||
assert expected == out | |||
with pytest.raises(StopIteration): | |||
next(gen2) | |||
assert "gnirts ekaf" == "".join(list(reversed(str1))) | |||
assert [] == list(reversed(str2)) | |||
assert "f" == str1[0] | |||
assert " " == str1[4] | |||
assert "g" == str1[10] | |||
assert "n" == str1[-2] | |||
with pytest.raises(IndexError): | |||
str1[11] | |||
with pytest.raises(IndexError): | |||
str2[0] | |||
assert "k" in str1 | |||
assert "fake" in str1 | |||
assert "str" in str1 | |||
assert "" in str1 | |||
assert "" in str2 | |||
assert "real" not in str1 | |||
assert "s" not in str2 | |||
def test_other_methods(): | |||
"""test the remaining non-magic methods of StringMixIn""" | |||
str1 = _FakeString("fake string") | |||
assert "Fake string" == str1.capitalize() | |||
assert " fake string " == str1.center(15) | |||
assert " fake string " == str1.center(16) | |||
assert "qqfake stringqq" == str1.center(15, "q") | |||
assert 1 == str1.count("e") | |||
assert 0 == str1.count("z") | |||
assert 1 == str1.count("r", 7) | |||
assert 0 == str1.count("r", 8) | |||
assert 1 == str1.count("r", 5, 9) | |||
assert 0 == str1.count("r", 5, 7) | |||
str3 = _FakeString("𐌲𐌿𐍄") | |||
actual = b"\xF0\x90\x8C\xB2\xF0\x90\x8C\xBF\xF0\x90\x8D\x84" | |||
assert b"fake string" == str1.encode() | |||
assert actual == str3.encode("utf-8") | |||
assert actual == str3.encode(encoding="utf-8") | |||
if sys.getdefaultencoding() == "ascii": | |||
with pytest.raises(UnicodeEncodeError): | |||
str3.encode() | |||
elif sys.getdefaultencoding() == "utf-8": | |||
assert actual == str3.encode() | |||
with pytest.raises(UnicodeEncodeError): | |||
str3.encode("ascii") | |||
with pytest.raises(UnicodeEncodeError): | |||
str3.encode("ascii", "strict") | |||
if sys.getdefaultencoding() == "ascii": | |||
with pytest.raises(UnicodeEncodeError): | |||
str3.encode("ascii", errors="strict") | |||
elif sys.getdefaultencoding() == "utf-8": | |||
assert actual == str3.encode(errors="strict") | |||
assert b"" == str3.encode("ascii", "ignore") | |||
if sys.getdefaultencoding() == "ascii": | |||
assert b"" == str3.encode(errors="ignore") | |||
elif sys.getdefaultencoding() == "utf-8": | |||
assert actual == str3.encode(errors="ignore") | |||
assert str1.endswith("ing") is True | |||
assert str1.endswith("ingh") is False | |||
str4 = _FakeString("\tfoobar") | |||
assert "fake string" == str1 | |||
assert " foobar" == str4.expandtabs() | |||
assert " foobar" == str4.expandtabs(4) | |||
assert 3 == str1.find("e") | |||
assert -1 == str1.find("z") | |||
assert 7 == str1.find("r", 7) | |||
assert -1 == str1.find("r", 8) | |||
assert 7 == str1.find("r", 5, 9) | |||
assert -1 == str1.find("r", 5, 7) | |||
str5 = _FakeString("foo{0}baz") | |||
str6 = _FakeString("foo{abc}baz") | |||
str7 = _FakeString("foo{0}{abc}buzz") | |||
str8 = _FakeString("{0}{1}") | |||
assert "fake string" == str1.format() | |||
assert "foobarbaz" == str5.format("bar") | |||
assert "foobarbaz" == str6.format(abc="bar") | |||
assert "foobarbazbuzz" == str7.format("bar", abc="baz") | |||
with pytest.raises(IndexError): | |||
str8.format("abc") | |||
assert "fake string" == str1.format_map({}) | |||
assert "foobarbaz" == str6.format_map({"abc": "bar"}) | |||
with pytest.raises(ValueError): | |||
str5.format_map({0: "abc"}) | |||
assert 3 == str1.index("e") | |||
with pytest.raises(ValueError): | |||
str1.index("z") | |||
assert 7 == str1.index("r", 7) | |||
with pytest.raises(ValueError): | |||
str1.index("r", 8) | |||
assert 7 == str1.index("r", 5, 9) | |||
with pytest.raises(ValueError): | |||
str1.index("r", 5, 7) | |||
str9 = _FakeString("foobar") | |||
str10 = _FakeString("foobar123") | |||
str11 = _FakeString("foo bar") | |||
assert str9.isalnum() is True | |||
assert str10.isalnum() is True | |||
assert str11.isalnum() is False | |||
assert str9.isalpha() is True | |||
assert str10.isalpha() is False | |||
assert str11.isalpha() is False | |||
str12 = _FakeString("123") | |||
str13 = _FakeString("\u2155") | |||
str14 = _FakeString("\u00B2") | |||
assert str9.isdecimal() is False | |||
assert str12.isdecimal() is True | |||
assert str13.isdecimal() is False | |||
assert str14.isdecimal() is False | |||
assert str9.isdigit() is False | |||
assert str12.isdigit() is True | |||
assert str13.isdigit() is False | |||
assert str14.isdigit() is True | |||
assert str9.isidentifier() is True | |||
assert str10.isidentifier() is True | |||
assert str11.isidentifier() is False | |||
assert str12.isidentifier() is False | |||
str15 = _FakeString("") | |||
str16 = _FakeString("FooBar") | |||
assert str9.islower() is True | |||
assert str15.islower() is False | |||
assert str16.islower() is False | |||
assert str9.isnumeric() is False | |||
assert str12.isnumeric() is True | |||
assert str13.isnumeric() is True | |||
assert str14.isnumeric() is True | |||
str16B = _FakeString("\x01\x02") | |||
assert str9.isprintable() is True | |||
assert str13.isprintable() is True | |||
assert str14.isprintable() is True | |||
assert str15.isprintable() is True | |||
assert str16B.isprintable() is False | |||
str17 = _FakeString(" ") | |||
str18 = _FakeString("\t \t \r\n") | |||
assert str1.isspace() is False | |||
assert str9.isspace() is False | |||
assert str17.isspace() is True | |||
assert str18.isspace() is True | |||
str19 = _FakeString("This Sentence Looks Like A Title") | |||
str20 = _FakeString("This sentence doesn't LookLikeATitle") | |||
assert str15.istitle() is False | |||
assert str19.istitle() is True | |||
assert str20.istitle() is False | |||
str21 = _FakeString("FOOBAR") | |||
assert str9.isupper() is False | |||
assert str15.isupper() is False | |||
assert str21.isupper() is True | |||
assert "foobar" == str15.join(["foo", "bar"]) | |||
assert "foo123bar123baz" == str12.join(("foo", "bar", "baz")) | |||
assert "fake string " == str1.ljust(15) | |||
assert "fake string " == str1.ljust(16) | |||
assert "fake stringqqqq" == str1.ljust(15, "q") | |||
str22 = _FakeString("ß") | |||
assert "" == str15.lower() | |||
assert "foobar" == str16.lower() | |||
assert "ß" == str22.lower() | |||
assert "" == str15.casefold() | |||
assert "foobar" == str16.casefold() | |||
assert "ss" == str22.casefold() | |||
str23 = _FakeString(" fake string ") | |||
assert "fake string" == str1.lstrip() | |||
assert "fake string " == str23.lstrip() | |||
assert "ke string" == str1.lstrip("abcdef") | |||
assert ("fa", "ke", " string") == str1.partition("ke") | |||
assert ("fake string", "", "") == str1.partition("asdf") | |||
str24 = _FakeString("boo foo moo") | |||
assert "real string" == str1.replace("fake", "real") | |||
assert "bu fu moo" == str24.replace("oo", "u", 2) | |||
assert 3 == str1.rfind("e") | |||
assert -1 == str1.rfind("z") | |||
assert 7 == str1.rfind("r", 7) | |||
assert -1 == str1.rfind("r", 8) | |||
assert 7 == str1.rfind("r", 5, 9) | |||
assert -1 == str1.rfind("r", 5, 7) | |||
assert 3 == str1.rindex("e") | |||
with pytest.raises(ValueError): | |||
str1.rindex("z") | |||
assert 7 == str1.rindex("r", 7) | |||
with pytest.raises(ValueError): | |||
str1.rindex("r", 8) | |||
assert 7 == str1.rindex("r", 5, 9) | |||
with pytest.raises(ValueError): | |||
str1.rindex("r", 5, 7) | |||
assert " fake string" == str1.rjust(15) | |||
assert " fake string" == str1.rjust(16) | |||
assert "qqqqfake string" == str1.rjust(15, "q") | |||
assert ("fa", "ke", " string") == str1.rpartition("ke") | |||
assert ("", "", "fake string") == str1.rpartition("asdf") | |||
str25 = _FakeString(" this is a sentence with whitespace ") | |||
actual = ["this", "is", "a", "sentence", "with", "whitespace"] | |||
assert actual == str25.rsplit() | |||
assert actual == str25.rsplit(None) | |||
actual = ["", "", "", "this", "is", "a", "", "", "sentence", "with", | |||
"", "whitespace", ""] | |||
assert actual == str25.rsplit(" ") | |||
actual = [" this is a", "sentence", "with", "whitespace"] | |||
assert actual == str25.rsplit(None, 3) | |||
actual = [" this is a sentence with", "", "whitespace", ""] | |||
assert actual == str25.rsplit(" ", 3) | |||
actual = [" this is a", "sentence", "with", "whitespace"] | |||
assert actual == str25.rsplit(maxsplit=3) | |||
assert "fake string" == str1.rstrip() | |||
assert " fake string" == str23.rstrip() | |||
assert "fake stri" == str1.rstrip("ngr") | |||
actual = ["this", "is", "a", "sentence", "with", "whitespace"] | |||
assert actual == str25.split() | |||
assert actual == str25.split(None) | |||
actual = ["", "", "", "this", "is", "a", "", "", "sentence", "with", | |||
"", "whitespace", ""] | |||
assert actual == str25.split(" ") | |||
actual = ["this", "is", "a", "sentence with whitespace "] | |||
assert actual == str25.split(None, 3) | |||
actual = ["", "", "", "this is a sentence with whitespace "] | |||
assert actual == str25.split(" ", 3) | |||
actual = ["this", "is", "a", "sentence with whitespace "] | |||
assert actual == str25.split(maxsplit=3) | |||
str26 = _FakeString("lines\nof\ntext\r\nare\r\npresented\nhere") | |||
assert ["lines", "of", "text", "are", "presented", "here"] \ | |||
== str26.splitlines() | |||
assert ["lines\n", "of\n", "text\r\n", "are\r\n", "presented\n", "here"] \ | |||
== str26.splitlines(True) | |||
assert str1.startswith("fake") is True | |||
assert str1.startswith("faker") is False | |||
assert "fake string" == str1.strip() | |||
assert "fake string" == str23.strip() | |||
assert "ke stri" == str1.strip("abcdefngr") | |||
assert "fOObAR" == str16.swapcase() | |||
assert "Fake String" == str1.title() | |||
table1 = StringMixIn.maketrans({97: "1", 101: "2", 105: "3", | |||
111: "4", 117: "5"}) | |||
table2 = StringMixIn.maketrans("aeiou", "12345") | |||
table3 = StringMixIn.maketrans("aeiou", "12345", "rts") | |||
assert "f1k2 str3ng" == str1.translate(table1) | |||
assert "f1k2 str3ng" == str1.translate(table2) | |||
assert "f1k2 3ng" == str1.translate(table3) | |||
assert "" == str15.upper() | |||
assert "FOOBAR" == str16.upper() | |||
assert "123" == str12.zfill(3) | |||
assert "000123" == str12.zfill(6) |
@@ -18,11 +18,15 @@ | |||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||
# SOFTWARE. | |||
import unittest | |||
""" | |||
Test cases for the Tag node. | |||
""" | |||
import pytest | |||
from mwparserfromhell.nodes import Tag, Template, Text | |||
from mwparserfromhell.nodes.extras import Attribute | |||
from ._test_tree_equality import TreeEqualityTestCase, wrap, wraptext | |||
from .conftest import assert_wikicode_equal, wrap, wraptext | |||
agen = lambda name, value: Attribute(wraptext(name), wraptext(value)) | |||
agennv = lambda name: Attribute(wraptext(name)) | |||
@@ -30,315 +34,321 @@ agennq = lambda name, value: Attribute(wraptext(name), wraptext(value), None) | |||
agenp = lambda name, v, a, b, c: Attribute(wraptext(name), v, '"', a, b, c) | |||
agenpnv = lambda name, a, b, c: Attribute(wraptext(name), None, '"', a, b, c) | |||
class TestTag(TreeEqualityTestCase): | |||
"""Test cases for the Tag node.""" | |||
def test_str(self): | |||
"""test Tag.__str__()""" | |||
node1 = Tag(wraptext("ref")) | |||
node2 = Tag(wraptext("span"), wraptext("foo"), | |||
[agen("style", "color: red;")]) | |||
node3 = Tag(wraptext("ref"), | |||
attrs=[agennq("name", "foo"), | |||
agenpnv("some_attr", " ", "", "")], | |||
self_closing=True) | |||
node4 = Tag(wraptext("br"), self_closing=True, padding=" ") | |||
node5 = Tag(wraptext("br"), self_closing=True, implicit=True) | |||
node6 = Tag(wraptext("br"), self_closing=True, invalid=True, | |||
implicit=True) | |||
node7 = Tag(wraptext("br"), self_closing=True, invalid=True, | |||
padding=" ") | |||
node8 = Tag(wraptext("hr"), wiki_markup="----", self_closing=True) | |||
node9 = Tag(wraptext("i"), wraptext("italics!"), wiki_markup="''") | |||
def test_str(): | |||
"""test Tag.__str__()""" | |||
node1 = Tag(wraptext("ref")) | |||
node2 = Tag(wraptext("span"), wraptext("foo"), | |||
[agen("style", "color: red;")]) | |||
node3 = Tag(wraptext("ref"), | |||
attrs=[agennq("name", "foo"), | |||
agenpnv("some_attr", " ", "", "")], | |||
self_closing=True) | |||
node4 = Tag(wraptext("br"), self_closing=True, padding=" ") | |||
node5 = Tag(wraptext("br"), self_closing=True, implicit=True) | |||
node6 = Tag(wraptext("br"), self_closing=True, invalid=True, | |||
implicit=True) | |||
node7 = Tag(wraptext("br"), self_closing=True, invalid=True, | |||
padding=" ") | |||
node8 = Tag(wraptext("hr"), wiki_markup="----", self_closing=True) | |||
node9 = Tag(wraptext("i"), wraptext("italics!"), wiki_markup="''") | |||
self.assertEqual("<ref></ref>", str(node1)) | |||
self.assertEqual('<span style="color: red;">foo</span>', str(node2)) | |||
self.assertEqual("<ref name=foo some_attr/>", str(node3)) | |||
self.assertEqual("<br />", str(node4)) | |||
self.assertEqual("<br>", str(node5)) | |||
self.assertEqual("</br>", str(node6)) | |||
self.assertEqual("</br />", str(node7)) | |||
self.assertEqual("----", str(node8)) | |||
self.assertEqual("''italics!''", str(node9)) | |||
assert "<ref></ref>" == str(node1) | |||
assert '<span style="color: red;">foo</span>' == str(node2) | |||
assert "<ref name=foo some_attr/>" == str(node3) | |||
assert "<br />" == str(node4) | |||
assert "<br>" == str(node5) | |||
assert "</br>" == str(node6) | |||
assert "</br />" == str(node7) | |||
assert "----" == str(node8) | |||
assert "''italics!''" == str(node9) | |||
def test_children(self): | |||
"""test Tag.__children__()""" | |||
# <ref>foobar</ref> | |||
node1 = Tag(wraptext("ref"), wraptext("foobar")) | |||
# '''bold text''' | |||
node2 = Tag(wraptext("b"), wraptext("bold text"), wiki_markup="'''") | |||
# <img id="foo" class="bar" selected /> | |||
node3 = Tag(wraptext("img"), | |||
attrs=[agen("id", "foo"), agen("class", "bar"), | |||
agennv("selected")], | |||
self_closing=True, padding=" ") | |||
def test_children(): | |||
"""test Tag.__children__()""" | |||
# <ref>foobar</ref> | |||
node1 = Tag(wraptext("ref"), wraptext("foobar")) | |||
# '''bold text''' | |||
node2 = Tag(wraptext("b"), wraptext("bold text"), wiki_markup="'''") | |||
# <img id="foo" class="bar" selected /> | |||
node3 = Tag(wraptext("img"), | |||
attrs=[agen("id", "foo"), agen("class", "bar"), | |||
agennv("selected")], | |||
self_closing=True, padding=" ") | |||
gen1 = node1.__children__() | |||
gen2 = node2.__children__() | |||
gen3 = node3.__children__() | |||
self.assertEqual(node1.tag, next(gen1)) | |||
self.assertEqual(node3.tag, next(gen3)) | |||
self.assertEqual(node3.attributes[0].name, next(gen3)) | |||
self.assertEqual(node3.attributes[0].value, next(gen3)) | |||
self.assertEqual(node3.attributes[1].name, next(gen3)) | |||
self.assertEqual(node3.attributes[1].value, next(gen3)) | |||
self.assertEqual(node3.attributes[2].name, next(gen3)) | |||
self.assertEqual(node1.contents, next(gen1)) | |||
self.assertEqual(node2.contents, next(gen2)) | |||
self.assertEqual(node1.closing_tag, next(gen1)) | |||
self.assertRaises(StopIteration, next, gen1) | |||
self.assertRaises(StopIteration, next, gen2) | |||
self.assertRaises(StopIteration, next, gen3) | |||
gen1 = node1.__children__() | |||
gen2 = node2.__children__() | |||
gen3 = node3.__children__() | |||
assert node1.tag == next(gen1) | |||
assert node3.tag == next(gen3) | |||
assert node3.attributes[0].name == next(gen3) | |||
assert node3.attributes[0].value == next(gen3) | |||
assert node3.attributes[1].name == next(gen3) | |||
assert node3.attributes[1].value == next(gen3) | |||
assert node3.attributes[2].name == next(gen3) | |||
assert node1.contents == next(gen1) | |||
assert node2.contents == next(gen2) | |||
assert node1.closing_tag == next(gen1) | |||
with pytest.raises(StopIteration): | |||
next(gen1) | |||
with pytest.raises(StopIteration): | |||
next(gen2) | |||
with pytest.raises(StopIteration): | |||
next(gen3) | |||
def test_strip(self): | |||
"""test Tag.__strip__()""" | |||
node1 = Tag(wraptext("i"), wraptext("foobar")) | |||
node2 = Tag(wraptext("math"), wraptext("foobar")) | |||
node3 = Tag(wraptext("br"), self_closing=True) | |||
def test_strip(): | |||
"""test Tag.__strip__()""" | |||
node1 = Tag(wraptext("i"), wraptext("foobar")) | |||
node2 = Tag(wraptext("math"), wraptext("foobar")) | |||
node3 = Tag(wraptext("br"), self_closing=True) | |||
self.assertEqual("foobar", node1.__strip__()) | |||
self.assertEqual(None, node2.__strip__()) | |||
self.assertEqual(None, node3.__strip__()) | |||
assert "foobar" == node1.__strip__() | |||
assert node2.__strip__() is None | |||
assert node3.__strip__() is None | |||
def test_showtree(self): | |||
"""test Tag.__showtree__()""" | |||
output = [] | |||
getter, marker = object(), object() | |||
get = lambda code: output.append((getter, code)) | |||
mark = lambda: output.append(marker) | |||
node1 = Tag(wraptext("ref"), wraptext("text"), | |||
[agen("name", "foo"), agennv("selected")]) | |||
node2 = Tag(wraptext("br"), self_closing=True, padding=" ") | |||
node3 = Tag(wraptext("br"), self_closing=True, invalid=True, | |||
implicit=True, padding=" ") | |||
node1.__showtree__(output.append, get, mark) | |||
node2.__showtree__(output.append, get, mark) | |||
node3.__showtree__(output.append, get, mark) | |||
valid = [ | |||
"<", (getter, node1.tag), (getter, node1.attributes[0].name), | |||
" = ", marker, (getter, node1.attributes[0].value), | |||
(getter, node1.attributes[1].name), ">", (getter, node1.contents), | |||
"</", (getter, node1.closing_tag), ">", "<", (getter, node2.tag), | |||
"/>", "</", (getter, node3.tag), ">"] | |||
self.assertEqual(valid, output) | |||
def test_showtree(): | |||
"""test Tag.__showtree__()""" | |||
output = [] | |||
getter, marker = object(), object() | |||
get = lambda code: output.append((getter, code)) | |||
mark = lambda: output.append(marker) | |||
node1 = Tag(wraptext("ref"), wraptext("text"), | |||
[agen("name", "foo"), agennv("selected")]) | |||
node2 = Tag(wraptext("br"), self_closing=True, padding=" ") | |||
node3 = Tag(wraptext("br"), self_closing=True, invalid=True, | |||
implicit=True, padding=" ") | |||
node1.__showtree__(output.append, get, mark) | |||
node2.__showtree__(output.append, get, mark) | |||
node3.__showtree__(output.append, get, mark) | |||
valid = [ | |||
"<", (getter, node1.tag), (getter, node1.attributes[0].name), | |||
" = ", marker, (getter, node1.attributes[0].value), | |||
(getter, node1.attributes[1].name), ">", (getter, node1.contents), | |||
"</", (getter, node1.closing_tag), ">", "<", (getter, node2.tag), | |||
"/>", "</", (getter, node3.tag), ">"] | |||
assert valid == output | |||
def test_tag(self): | |||
"""test getter/setter for the tag attribute""" | |||
tag = wraptext("ref") | |||
node = Tag(tag, wraptext("text")) | |||
self.assertIs(tag, node.tag) | |||
self.assertIs(tag, node.closing_tag) | |||
node.tag = "span" | |||
self.assertWikicodeEqual(wraptext("span"), node.tag) | |||
self.assertWikicodeEqual(wraptext("span"), node.closing_tag) | |||
self.assertEqual("<span>text</span>", node) | |||
def test_tag(): | |||
"""test getter/setter for the tag attribute""" | |||
tag = wraptext("ref") | |||
node = Tag(tag, wraptext("text")) | |||
assert tag is node.tag | |||
assert tag is node.closing_tag | |||
node.tag = "span" | |||
assert_wikicode_equal(wraptext("span"), node.tag) | |||
assert_wikicode_equal(wraptext("span"), node.closing_tag) | |||
assert "<span>text</span>" == node | |||
def test_contents(self): | |||
"""test getter/setter for the contents attribute""" | |||
contents = wraptext("text") | |||
node = Tag(wraptext("ref"), contents) | |||
self.assertIs(contents, node.contents) | |||
node.contents = "text and a {{template}}" | |||
parsed = wrap([Text("text and a "), Template(wraptext("template"))]) | |||
self.assertWikicodeEqual(parsed, node.contents) | |||
self.assertEqual("<ref>text and a {{template}}</ref>", node) | |||
def test_contents(): | |||
"""test getter/setter for the contents attribute""" | |||
contents = wraptext("text") | |||
node = Tag(wraptext("ref"), contents) | |||
assert contents is node.contents | |||
node.contents = "text and a {{template}}" | |||
parsed = wrap([Text("text and a "), Template(wraptext("template"))]) | |||
assert_wikicode_equal(parsed, node.contents) | |||
assert "<ref>text and a {{template}}</ref>" == node | |||
def test_attributes(self): | |||
"""test getter for the attributes attribute""" | |||
attrs = [agen("name", "bar")] | |||
node1 = Tag(wraptext("ref"), wraptext("foo")) | |||
node2 = Tag(wraptext("ref"), wraptext("foo"), attrs) | |||
self.assertEqual([], node1.attributes) | |||
self.assertIs(attrs, node2.attributes) | |||
def test_attributes(): | |||
"""test getter for the attributes attribute""" | |||
attrs = [agen("name", "bar")] | |||
node1 = Tag(wraptext("ref"), wraptext("foo")) | |||
node2 = Tag(wraptext("ref"), wraptext("foo"), attrs) | |||
assert [] == node1.attributes | |||
assert attrs is node2.attributes | |||
def test_wiki_markup(self): | |||
"""test getter/setter for the wiki_markup attribute""" | |||
node = Tag(wraptext("i"), wraptext("italic text")) | |||
self.assertIs(None, node.wiki_markup) | |||
node.wiki_markup = "''" | |||
self.assertEqual("''", node.wiki_markup) | |||
self.assertEqual("''italic text''", node) | |||
node.wiki_markup = False | |||
self.assertFalse(node.wiki_markup) | |||
self.assertEqual("<i>italic text</i>", node) | |||
def test_wiki_markup(): | |||
"""test getter/setter for the wiki_markup attribute""" | |||
node = Tag(wraptext("i"), wraptext("italic text")) | |||
assert None is node.wiki_markup | |||
node.wiki_markup = "''" | |||
assert "''" == node.wiki_markup | |||
assert "''italic text''" == node | |||
node.wiki_markup = False | |||
assert node.wiki_markup is None | |||
assert "<i>italic text</i>" == node | |||
def test_self_closing(self): | |||
"""test getter/setter for the self_closing attribute""" | |||
node = Tag(wraptext("ref"), wraptext("foobar")) | |||
self.assertFalse(node.self_closing) | |||
node.self_closing = True | |||
self.assertTrue(node.self_closing) | |||
self.assertEqual("<ref/>", node) | |||
node.self_closing = 0 | |||
self.assertFalse(node.self_closing) | |||
self.assertEqual("<ref>foobar</ref>", node) | |||
def test_self_closing(): | |||
"""test getter/setter for the self_closing attribute""" | |||
node = Tag(wraptext("ref"), wraptext("foobar")) | |||
assert node.self_closing is False | |||
node.self_closing = True | |||
assert node.self_closing is True | |||
assert "<ref/>" == node | |||
node.self_closing = 0 | |||
assert node.self_closing is False | |||
assert "<ref>foobar</ref>" == node | |||
def test_invalid(self): | |||
"""test getter/setter for the invalid attribute""" | |||
node = Tag(wraptext("br"), self_closing=True, implicit=True) | |||
self.assertFalse(node.invalid) | |||
node.invalid = True | |||
self.assertTrue(node.invalid) | |||
self.assertEqual("</br>", node) | |||
node.invalid = 0 | |||
self.assertFalse(node.invalid) | |||
self.assertEqual("<br>", node) | |||
def test_invalid(): | |||
"""test getter/setter for the invalid attribute""" | |||
node = Tag(wraptext("br"), self_closing=True, implicit=True) | |||
assert node.invalid is False | |||
node.invalid = True | |||
assert node.invalid is True | |||
assert "</br>" == node | |||
node.invalid = 0 | |||
assert node.invalid is False | |||
assert "<br>" == node | |||
def test_implicit(self): | |||
"""test getter/setter for the implicit attribute""" | |||
node = Tag(wraptext("br"), self_closing=True) | |||
self.assertFalse(node.implicit) | |||
node.implicit = True | |||
self.assertTrue(node.implicit) | |||
self.assertEqual("<br>", node) | |||
node.implicit = 0 | |||
self.assertFalse(node.implicit) | |||
self.assertEqual("<br/>", node) | |||
def test_implicit(): | |||
"""test getter/setter for the implicit attribute""" | |||
node = Tag(wraptext("br"), self_closing=True) | |||
assert node.implicit is False | |||
node.implicit = True | |||
assert node.implicit is True | |||
assert "<br>" == node | |||
node.implicit = 0 | |||
assert node.implicit is False | |||
assert "<br/>" == node | |||
def test_padding(self): | |||
"""test getter/setter for the padding attribute""" | |||
node = Tag(wraptext("ref"), wraptext("foobar")) | |||
self.assertEqual("", node.padding) | |||
node.padding = " " | |||
self.assertEqual(" ", node.padding) | |||
self.assertEqual("<ref >foobar</ref>", node) | |||
node.padding = None | |||
self.assertEqual("", node.padding) | |||
self.assertEqual("<ref>foobar</ref>", node) | |||
self.assertRaises(ValueError, setattr, node, "padding", True) | |||
def test_padding(): | |||
"""test getter/setter for the padding attribute""" | |||
node = Tag(wraptext("ref"), wraptext("foobar")) | |||
assert "" == node.padding | |||
node.padding = " " | |||
assert " " == node.padding | |||
assert "<ref >foobar</ref>" == node | |||
node.padding = None | |||
assert "" == node.padding | |||
assert "<ref>foobar</ref>" == node | |||
with pytest.raises(ValueError): | |||
node.__setattr__("padding", True) | |||
def test_closing_tag(self): | |||
"""test getter/setter for the closing_tag attribute""" | |||
tag = wraptext("ref") | |||
node = Tag(tag, wraptext("foobar")) | |||
self.assertIs(tag, node.closing_tag) | |||
node.closing_tag = "ref {{ignore me}}" | |||
parsed = wrap([Text("ref "), Template(wraptext("ignore me"))]) | |||
self.assertWikicodeEqual(parsed, node.closing_tag) | |||
self.assertEqual("<ref>foobar</ref {{ignore me}}>", node) | |||
def test_closing_tag(): | |||
"""test getter/setter for the closing_tag attribute""" | |||
tag = wraptext("ref") | |||
node = Tag(tag, wraptext("foobar")) | |||
assert tag is node.closing_tag | |||
node.closing_tag = "ref {{ignore me}}" | |||
parsed = wrap([Text("ref "), Template(wraptext("ignore me"))]) | |||
assert_wikicode_equal(parsed, node.closing_tag) | |||
assert "<ref>foobar</ref {{ignore me}}>" == node | |||
def test_wiki_style_separator(self): | |||
"""test getter/setter for wiki_style_separator attribute""" | |||
node = Tag(wraptext("table"), wraptext("\n")) | |||
self.assertIs(None, node.wiki_style_separator) | |||
node.wiki_style_separator = "|" | |||
self.assertEqual("|", node.wiki_style_separator) | |||
node.wiki_markup = "{" | |||
self.assertEqual("{|\n{", node) | |||
node2 = Tag(wraptext("table"), wraptext("\n"), wiki_style_separator="|") | |||
self.assertEqual("|", node2.wiki_style_separator) | |||
def test_wiki_style_separator(): | |||
"""test getter/setter for wiki_style_separator attribute""" | |||
node = Tag(wraptext("table"), wraptext("\n")) | |||
assert None is node.wiki_style_separator | |||
node.wiki_style_separator = "|" | |||
assert "|" == node.wiki_style_separator | |||
node.wiki_markup = "{" | |||
assert "{|\n{" == node | |||
node2 = Tag(wraptext("table"), wraptext("\n"), wiki_style_separator="|") | |||
assert "|" == node2.wiki_style_separator | |||
def test_closing_wiki_markup(self): | |||
"""test getter/setter for closing_wiki_markup attribute""" | |||
node = Tag(wraptext("table"), wraptext("\n")) | |||
self.assertIs(None, node.closing_wiki_markup) | |||
node.wiki_markup = "{|" | |||
self.assertEqual("{|", node.closing_wiki_markup) | |||
node.closing_wiki_markup = "|}" | |||
self.assertEqual("|}", node.closing_wiki_markup) | |||
self.assertEqual("{|\n|}", node) | |||
node.wiki_markup = "!!" | |||
self.assertEqual("|}", node.closing_wiki_markup) | |||
self.assertEqual("!!\n|}", node) | |||
node.wiki_markup = False | |||
self.assertFalse(node.closing_wiki_markup) | |||
self.assertEqual("<table>\n</table>", node) | |||
node2 = Tag(wraptext("table"), wraptext("\n"), | |||
attrs=[agen("id", "foo")], wiki_markup="{|", | |||
closing_wiki_markup="|}") | |||
self.assertEqual("|}", node2.closing_wiki_markup) | |||
self.assertEqual('{| id="foo"\n|}', node2) | |||
def test_closing_wiki_markup(): | |||
"""test getter/setter for closing_wiki_markup attribute""" | |||
node = Tag(wraptext("table"), wraptext("\n")) | |||
assert None is node.closing_wiki_markup | |||
node.wiki_markup = "{|" | |||
assert "{|" == node.closing_wiki_markup | |||
node.closing_wiki_markup = "|}" | |||
assert "|}" == node.closing_wiki_markup | |||
assert "{|\n|}" == node | |||
node.wiki_markup = "!!" | |||
assert "|}" == node.closing_wiki_markup | |||
assert "!!\n|}" == node | |||
node.wiki_markup = False | |||
assert node.closing_wiki_markup is None | |||
assert "<table>\n</table>" == node | |||
node2 = Tag(wraptext("table"), wraptext("\n"), | |||
attrs=[agen("id", "foo")], wiki_markup="{|", | |||
closing_wiki_markup="|}") | |||
assert "|}" == node2.closing_wiki_markup | |||
assert '{| id="foo"\n|}' == node2 | |||
def test_has(self): | |||
"""test Tag.has()""" | |||
node = Tag(wraptext("ref"), wraptext("cite"), [agen("name", "foo")]) | |||
self.assertTrue(node.has("name")) | |||
self.assertTrue(node.has(" name ")) | |||
self.assertTrue(node.has(wraptext("name"))) | |||
self.assertFalse(node.has("Name")) | |||
self.assertFalse(node.has("foo")) | |||
def test_has(): | |||
"""test Tag.has()""" | |||
node = Tag(wraptext("ref"), wraptext("cite"), [agen("name", "foo")]) | |||
assert node.has("name") is True | |||
assert node.has(" name ") is True | |||
assert node.has(wraptext("name")) is True | |||
assert node.has("Name") is False | |||
assert node.has("foo") is False | |||
attrs = [agen("id", "foo"), agenp("class", "bar", " ", "\n", "\n"), | |||
agen("foo", "bar"), agenpnv("foo", " ", " \n ", " \t")] | |||
node2 = Tag(wraptext("div"), attrs=attrs, self_closing=True) | |||
self.assertTrue(node2.has("id")) | |||
self.assertTrue(node2.has("class")) | |||
self.assertTrue(node2.has(attrs[1].pad_first + str(attrs[1].name) + | |||
attrs[1].pad_before_eq)) | |||
self.assertTrue(node2.has(attrs[3])) | |||
self.assertTrue(node2.has(str(attrs[3]))) | |||
self.assertFalse(node2.has("idclass")) | |||
self.assertFalse(node2.has("id class")) | |||
self.assertFalse(node2.has("id=foo")) | |||
attrs = [agen("id", "foo"), agenp("class", "bar", " ", "\n", "\n"), | |||
agen("foo", "bar"), agenpnv("foo", " ", " \n ", " \t")] | |||
node2 = Tag(wraptext("div"), attrs=attrs, self_closing=True) | |||
assert node2.has("id") is True | |||
assert node2.has("class") is True | |||
assert node2.has(attrs[1].pad_first + str(attrs[1].name) + | |||
attrs[1].pad_before_eq) is True | |||
assert node2.has(attrs[3]) is True | |||
assert node2.has(str(attrs[3])) is True | |||
assert node2.has("idclass") is False | |||
assert node2.has("id class") is False | |||
assert node2.has("id=foo") is False | |||
def test_get(self): | |||
"""test Tag.get()""" | |||
attrs = [agen("name", "foo")] | |||
node = Tag(wraptext("ref"), wraptext("cite"), attrs) | |||
self.assertIs(attrs[0], node.get("name")) | |||
self.assertIs(attrs[0], node.get(" name ")) | |||
self.assertIs(attrs[0], node.get(wraptext("name"))) | |||
self.assertRaises(ValueError, node.get, "Name") | |||
self.assertRaises(ValueError, node.get, "foo") | |||
def test_get(): | |||
"""test Tag.get()""" | |||
attrs = [agen("name", "foo")] | |||
node = Tag(wraptext("ref"), wraptext("cite"), attrs) | |||
assert attrs[0] is node.get("name") | |||
assert attrs[0] is node.get(" name ") | |||
assert attrs[0] is node.get(wraptext("name")) | |||
with pytest.raises(ValueError): | |||
node.get("Name") | |||
with pytest.raises(ValueError): | |||
node.get("foo") | |||
attrs = [agen("id", "foo"), agenp("class", "bar", " ", "\n", "\n"), | |||
agen("foo", "bar"), agenpnv("foo", " ", " \n ", " \t")] | |||
node2 = Tag(wraptext("div"), attrs=attrs, self_closing=True) | |||
self.assertIs(attrs[0], node2.get("id")) | |||
self.assertIs(attrs[1], node2.get("class")) | |||
self.assertIs(attrs[1], node2.get( | |||
attrs[1].pad_first + str(attrs[1].name) + attrs[1].pad_before_eq)) | |||
self.assertIs(attrs[3], node2.get(attrs[3])) | |||
self.assertIs(attrs[3], node2.get(str(attrs[3]))) | |||
self.assertIs(attrs[3], node2.get(" foo")) | |||
self.assertRaises(ValueError, node2.get, "idclass") | |||
self.assertRaises(ValueError, node2.get, "id class") | |||
self.assertRaises(ValueError, node2.get, "id=foo") | |||
attrs = [agen("id", "foo"), agenp("class", "bar", " ", "\n", "\n"), | |||
agen("foo", "bar"), agenpnv("foo", " ", " \n ", " \t")] | |||
node2 = Tag(wraptext("div"), attrs=attrs, self_closing=True) | |||
assert attrs[0] is node2.get("id") | |||
assert attrs[1] is node2.get("class") | |||
assert attrs[1] is node2.get( | |||
attrs[1].pad_first + str(attrs[1].name) + attrs[1].pad_before_eq) | |||
assert attrs[3] is node2.get(attrs[3]) | |||
assert attrs[3] is node2.get(str(attrs[3])) | |||
assert attrs[3] is node2.get(" foo") | |||
with pytest.raises(ValueError): | |||
node2.get("idclass") | |||
with pytest.raises(ValueError): | |||
node2.get("id class") | |||
with pytest.raises(ValueError): | |||
node2.get("id=foo") | |||
def test_add(self): | |||
"""test Tag.add()""" | |||
node = Tag(wraptext("ref"), wraptext("cite")) | |||
node.add("name", "value") | |||
node.add("name", "value", quotes=None) | |||
node.add("name", "value", quotes="'") | |||
node.add("name") | |||
node.add(1, False) | |||
node.add("style", "{{foobar}}") | |||
node.add("name", "value", '"', "\n", " ", " ") | |||
attr1 = ' name="value"' | |||
attr2 = " name=value" | |||
attr3 = " name='value'" | |||
attr4 = " name" | |||
attr5 = ' 1="False"' | |||
attr6 = ' style="{{foobar}}"' | |||
attr7 = '\nname = "value"' | |||
self.assertEqual(attr1, node.attributes[0]) | |||
self.assertEqual(attr2, node.attributes[1]) | |||
self.assertEqual(attr3, node.attributes[2]) | |||
self.assertEqual(attr4, node.attributes[3]) | |||
self.assertEqual(attr5, node.attributes[4]) | |||
self.assertEqual(attr6, node.attributes[5]) | |||
self.assertEqual(attr7, node.attributes[6]) | |||
self.assertEqual(attr7, node.get("name")) | |||
self.assertWikicodeEqual(wrap([Template(wraptext("foobar"))]), | |||
node.attributes[5].value) | |||
self.assertEqual("".join(("<ref", attr1, attr2, attr3, attr4, attr5, | |||
attr6, attr7, ">cite</ref>")), node) | |||
self.assertRaises(ValueError, node.add, "name", "foo", quotes="bar") | |||
self.assertRaises(ValueError, node.add, "name", "a bc d", quotes=None) | |||
def test_add(): | |||
"""test Tag.add()""" | |||
node = Tag(wraptext("ref"), wraptext("cite")) | |||
node.add("name", "value") | |||
node.add("name", "value", quotes=None) | |||
node.add("name", "value", quotes="'") | |||
node.add("name") | |||
node.add(1, False) | |||
node.add("style", "{{foobar}}") | |||
node.add("name", "value", '"', "\n", " ", " ") | |||
attr1 = ' name="value"' | |||
attr2 = " name=value" | |||
attr3 = " name='value'" | |||
attr4 = " name" | |||
attr5 = ' 1="False"' | |||
attr6 = ' style="{{foobar}}"' | |||
attr7 = '\nname = "value"' | |||
assert attr1 == node.attributes[0] | |||
assert attr2 == node.attributes[1] | |||
assert attr3 == node.attributes[2] | |||
assert attr4 == node.attributes[3] | |||
assert attr5 == node.attributes[4] | |||
assert attr6 == node.attributes[5] | |||
assert attr7 == node.attributes[6] | |||
assert attr7 == node.get("name") | |||
assert_wikicode_equal(wrap([Template(wraptext("foobar"))]), | |||
node.attributes[5].value) | |||
assert "".join(("<ref", attr1, attr2, attr3, attr4, attr5, | |||
attr6, attr7, ">cite</ref>")) == node | |||
with pytest.raises(ValueError): | |||
node.add("name", "foo", quotes="bar") | |||
with pytest.raises(ValueError): | |||
node.add("name", "a bc d", quotes=None) | |||
def test_remove(self): | |||
"""test Tag.remove()""" | |||
attrs = [agen("id", "foo"), agenp("class", "bar", " ", "\n", "\n"), | |||
agen("foo", "bar"), agenpnv("foo", " ", " \n ", " \t")] | |||
node = Tag(wraptext("div"), attrs=attrs, self_closing=True) | |||
node.remove("class") | |||
self.assertEqual('<div id="foo" foo="bar" foo \n />', node) | |||
def test_remove(): | |||
"""test Tag.remove()""" | |||
attrs = [agen("id", "foo"), agenp("class", "bar", " ", "\n", "\n"), | |||
agen("foo", "bar"), agenpnv("foo", " ", " \n ", " \t")] | |||
node = Tag(wraptext("div"), attrs=attrs, self_closing=True) | |||
node.remove("class") | |||
assert '<div id="foo" foo="bar" foo \n />' == node | |||
node.remove("foo") | |||
assert '<div id="foo"/>' == node | |||
with pytest.raises(ValueError): | |||
node.remove("foo") | |||
self.assertEqual('<div id="foo"/>', node) | |||
self.assertRaises(ValueError, node.remove, "foo") | |||
node.remove("id") | |||
self.assertEqual('<div/>', node) | |||
if __name__ == "__main__": | |||
unittest.main(verbosity=2) | |||
node.remove("id") | |||
assert '<div/>' == node |
@@ -18,426 +18,436 @@ | |||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||
# SOFTWARE. | |||
""" | |||
Test cases for the Template node. | |||
""" | |||
from difflib import unified_diff | |||
import unittest | |||
import pytest | |||
from mwparserfromhell.nodes import HTMLEntity, Template, Text | |||
from mwparserfromhell.nodes.extras import Parameter | |||
from mwparserfromhell import parse | |||
from ._test_tree_equality import TreeEqualityTestCase, wrap, wraptext | |||
from .conftest import assert_wikicode_equal, wrap, wraptext | |||
pgens = lambda k, v: Parameter(wraptext(k), wraptext(v), showkey=True) | |||
pgenh = lambda k, v: Parameter(wraptext(k), wraptext(v), showkey=False) | |||
class TestTemplate(TreeEqualityTestCase): | |||
"""Test cases for the Template node.""" | |||
def test_str(): | |||
"""test Template.__str__()""" | |||
node = Template(wraptext("foobar")) | |||
assert "{{foobar}}" == str(node) | |||
node2 = Template(wraptext("foo"), | |||
[pgenh("1", "bar"), pgens("abc", "def")]) | |||
assert "{{foo|bar|abc=def}}" == str(node2) | |||
def test_str(self): | |||
"""test Template.__str__()""" | |||
node = Template(wraptext("foobar")) | |||
self.assertEqual("{{foobar}}", str(node)) | |||
node2 = Template(wraptext("foo"), | |||
[pgenh("1", "bar"), pgens("abc", "def")]) | |||
self.assertEqual("{{foo|bar|abc=def}}", str(node2)) | |||
def test_children(): | |||
"""test Template.__children__()""" | |||
node2p1 = Parameter(wraptext("1"), wraptext("bar"), showkey=False) | |||
node2p2 = Parameter(wraptext("abc"), wrap([Text("def"), Text("ghi")]), | |||
showkey=True) | |||
node1 = Template(wraptext("foobar")) | |||
node2 = Template(wraptext("foo"), [node2p1, node2p2]) | |||
def test_children(self): | |||
"""test Template.__children__()""" | |||
node2p1 = Parameter(wraptext("1"), wraptext("bar"), showkey=False) | |||
node2p2 = Parameter(wraptext("abc"), wrap([Text("def"), Text("ghi")]), | |||
showkey=True) | |||
node1 = Template(wraptext("foobar")) | |||
node2 = Template(wraptext("foo"), [node2p1, node2p2]) | |||
gen1 = node1.__children__() | |||
gen2 = node2.__children__() | |||
assert node1.name == next(gen1) | |||
assert node2.name == next(gen2) | |||
assert node2.params[0].value == next(gen2) | |||
assert node2.params[1].name == next(gen2) | |||
assert node2.params[1].value == next(gen2) | |||
with pytest.raises(StopIteration): | |||
next(gen1) | |||
with pytest.raises(StopIteration): | |||
next(gen2) | |||
gen1 = node1.__children__() | |||
gen2 = node2.__children__() | |||
self.assertEqual(node1.name, next(gen1)) | |||
self.assertEqual(node2.name, next(gen2)) | |||
self.assertEqual(node2.params[0].value, next(gen2)) | |||
self.assertEqual(node2.params[1].name, next(gen2)) | |||
self.assertEqual(node2.params[1].value, next(gen2)) | |||
self.assertRaises(StopIteration, next, gen1) | |||
self.assertRaises(StopIteration, next, gen2) | |||
def test_strip(): | |||
"""test Template.__strip__()""" | |||
node1 = Template(wraptext("foobar")) | |||
node2 = Template(wraptext("foo"), [ | |||
pgenh("1", "bar"), pgens("foo", ""), pgens("abc", "def")]) | |||
node3 = Template(wraptext("foo"), [ | |||
pgenh("1", "foo"), | |||
Parameter(wraptext("2"), wrap([Template(wraptext("hello"))]), | |||
showkey=False), | |||
pgenh("3", "bar")]) | |||
def test_strip(self): | |||
"""test Template.__strip__()""" | |||
node1 = Template(wraptext("foobar")) | |||
node2 = Template(wraptext("foo"), [ | |||
pgenh("1", "bar"), pgens("foo", ""), pgens("abc", "def")]) | |||
node3 = Template(wraptext("foo"), [ | |||
pgenh("1", "foo"), | |||
Parameter(wraptext("2"), wrap([Template(wraptext("hello"))]), | |||
showkey=False), | |||
pgenh("3", "bar")]) | |||
assert node1.__strip__(keep_template_params=False) is None | |||
assert node2.__strip__(keep_template_params=False) is None | |||
assert "" == node1.__strip__(keep_template_params=True) | |||
assert "bar def" == node2.__strip__(keep_template_params=True) | |||
assert "foo bar" == node3.__strip__(keep_template_params=True) | |||
self.assertEqual(None, node1.__strip__(keep_template_params=False)) | |||
self.assertEqual(None, node2.__strip__(keep_template_params=False)) | |||
self.assertEqual("", node1.__strip__(keep_template_params=True)) | |||
self.assertEqual("bar def", node2.__strip__(keep_template_params=True)) | |||
self.assertEqual("foo bar", node3.__strip__(keep_template_params=True)) | |||
def test_showtree(): | |||
"""test Template.__showtree__()""" | |||
output = [] | |||
getter, marker = object(), object() | |||
get = lambda code: output.append((getter, code)) | |||
mark = lambda: output.append(marker) | |||
node1 = Template(wraptext("foobar")) | |||
node2 = Template(wraptext("foo"), | |||
[pgenh("1", "bar"), pgens("abc", "def")]) | |||
node1.__showtree__(output.append, get, mark) | |||
node2.__showtree__(output.append, get, mark) | |||
valid = [ | |||
"{{", (getter, node1.name), "}}", "{{", (getter, node2.name), | |||
" | ", marker, (getter, node2.params[0].name), " = ", marker, | |||
(getter, node2.params[0].value), " | ", marker, | |||
(getter, node2.params[1].name), " = ", marker, | |||
(getter, node2.params[1].value), "}}"] | |||
assert valid == output | |||
def test_showtree(self): | |||
"""test Template.__showtree__()""" | |||
output = [] | |||
getter, marker = object(), object() | |||
get = lambda code: output.append((getter, code)) | |||
mark = lambda: output.append(marker) | |||
node1 = Template(wraptext("foobar")) | |||
node2 = Template(wraptext("foo"), | |||
[pgenh("1", "bar"), pgens("abc", "def")]) | |||
node1.__showtree__(output.append, get, mark) | |||
node2.__showtree__(output.append, get, mark) | |||
valid = [ | |||
"{{", (getter, node1.name), "}}", "{{", (getter, node2.name), | |||
" | ", marker, (getter, node2.params[0].name), " = ", marker, | |||
(getter, node2.params[0].value), " | ", marker, | |||
(getter, node2.params[1].name), " = ", marker, | |||
(getter, node2.params[1].value), "}}"] | |||
self.assertEqual(valid, output) | |||
def test_name(): | |||
"""test getter/setter for the name attribute""" | |||
name = wraptext("foobar") | |||
node1 = Template(name) | |||
node2 = Template(name, [pgenh("1", "bar")]) | |||
assert name is node1.name | |||
assert name is node2.name | |||
node1.name = "asdf" | |||
node2.name = "téstïng" | |||
assert_wikicode_equal(wraptext("asdf"), node1.name) | |||
assert_wikicode_equal(wraptext("téstïng"), node2.name) | |||
def test_name(self): | |||
"""test getter/setter for the name attribute""" | |||
name = wraptext("foobar") | |||
node1 = Template(name) | |||
node2 = Template(name, [pgenh("1", "bar")]) | |||
self.assertIs(name, node1.name) | |||
self.assertIs(name, node2.name) | |||
node1.name = "asdf" | |||
node2.name = "téstïng" | |||
self.assertWikicodeEqual(wraptext("asdf"), node1.name) | |||
self.assertWikicodeEqual(wraptext("téstïng"), node2.name) | |||
def test_params(): | |||
"""test getter for the params attribute""" | |||
node1 = Template(wraptext("foobar")) | |||
plist = [pgenh("1", "bar"), pgens("abc", "def")] | |||
node2 = Template(wraptext("foo"), plist) | |||
assert [] == node1.params | |||
assert plist is node2.params | |||
def test_params(self): | |||
"""test getter for the params attribute""" | |||
node1 = Template(wraptext("foobar")) | |||
plist = [pgenh("1", "bar"), pgens("abc", "def")] | |||
node2 = Template(wraptext("foo"), plist) | |||
self.assertEqual([], node1.params) | |||
self.assertIs(plist, node2.params) | |||
def test_has(): | |||
"""test Template.has()""" | |||
node1 = Template(wraptext("foobar")) | |||
node2 = Template(wraptext("foo"), | |||
[pgenh("1", "bar"), pgens("\nabc ", "def")]) | |||
node3 = Template(wraptext("foo"), | |||
[pgenh("1", "a"), pgens("b", "c"), pgens("1", "d")]) | |||
node4 = Template(wraptext("foo"), [pgenh("1", "a"), pgens("b", " ")]) | |||
assert node1.has("foobar", False) is False | |||
assert node2.has(1, False) is True | |||
assert node2.has("abc", False) is True | |||
assert node2.has("def", False) is False | |||
assert node3.has("1", False) is True | |||
assert node3.has(" b ", False) is True | |||
assert node4.has("b", False) is True | |||
assert node3.has("b", True) is True | |||
assert node4.has("b", True) is False | |||
assert node1.has_param("foobar", False) is False | |||
assert node2.has_param(1, False) is True | |||
def test_has(self): | |||
"""test Template.has()""" | |||
node1 = Template(wraptext("foobar")) | |||
node2 = Template(wraptext("foo"), | |||
[pgenh("1", "bar"), pgens("\nabc ", "def")]) | |||
node3 = Template(wraptext("foo"), | |||
[pgenh("1", "a"), pgens("b", "c"), pgens("1", "d")]) | |||
node4 = Template(wraptext("foo"), [pgenh("1", "a"), pgens("b", " ")]) | |||
self.assertFalse(node1.has("foobar", False)) | |||
self.assertTrue(node2.has(1, False)) | |||
self.assertTrue(node2.has("abc", False)) | |||
self.assertFalse(node2.has("def", False)) | |||
self.assertTrue(node3.has("1", False)) | |||
self.assertTrue(node3.has(" b ", False)) | |||
self.assertTrue(node4.has("b", False)) | |||
self.assertTrue(node3.has("b", True)) | |||
self.assertFalse(node4.has("b", True)) | |||
self.assertFalse(node1.has_param("foobar", False)) | |||
self.assertTrue(node2.has_param(1, False)) | |||
def test_get(): | |||
"""test Template.get()""" | |||
node1 = Template(wraptext("foobar")) | |||
node2p1 = pgenh("1", "bar") | |||
node2p2 = pgens("abc", "def") | |||
node2 = Template(wraptext("foo"), [node2p1, node2p2]) | |||
node3p1 = pgens("b", "c") | |||
node3p2 = pgens("1", "d") | |||
node3 = Template(wraptext("foo"), [pgenh("1", "a"), node3p1, node3p2]) | |||
node4p1 = pgens(" b", " ") | |||
node4 = Template(wraptext("foo"), [pgenh("1", "a"), node4p1]) | |||
with pytest.raises(ValueError): | |||
node1.get("foobar") | |||
assert node2p1 is node2.get(1) | |||
assert node2p2 is node2.get("abc") | |||
with pytest.raises(ValueError): | |||
node2.get("def") | |||
assert node3p1 is node3.get("b") | |||
assert node3p2 is node3.get("1") | |||
assert node4p1 is node4.get("b ") | |||
def test_get(self): | |||
"""test Template.get()""" | |||
node1 = Template(wraptext("foobar")) | |||
node2p1 = pgenh("1", "bar") | |||
node2p2 = pgens("abc", "def") | |||
node2 = Template(wraptext("foo"), [node2p1, node2p2]) | |||
node3p1 = pgens("b", "c") | |||
node3p2 = pgens("1", "d") | |||
node3 = Template(wraptext("foo"), [pgenh("1", "a"), node3p1, node3p2]) | |||
node4p1 = pgens(" b", " ") | |||
node4 = Template(wraptext("foo"), [pgenh("1", "a"), node4p1]) | |||
self.assertRaises(ValueError, node1.get, "foobar") | |||
self.assertIs(node2p1, node2.get(1)) | |||
self.assertIs(node2p2, node2.get("abc")) | |||
self.assertRaises(ValueError, node2.get, "def") | |||
self.assertIs(node3p1, node3.get("b")) | |||
self.assertIs(node3p2, node3.get("1")) | |||
self.assertIs(node4p1, node4.get("b ")) | |||
def test_add(): | |||
"""test Template.add()""" | |||
node1 = Template(wraptext("a"), [pgens("b", "c"), pgenh("1", "d")]) | |||
node2 = Template(wraptext("a"), [pgens("b", "c"), pgenh("1", "d")]) | |||
node3 = Template(wraptext("a"), [pgens("b", "c"), pgenh("1", "d")]) | |||
node4 = Template(wraptext("a"), [pgens("b", "c"), pgenh("1", "d")]) | |||
node5 = Template(wraptext("a"), [pgens("b", "c"), | |||
pgens(" d ", "e")]) | |||
node6 = Template(wraptext("a"), [pgens("b", "c"), pgens("b", "d"), | |||
pgens("b", "e")]) | |||
node7 = Template(wraptext("a"), [pgens("b", "c"), pgenh("1", "d")]) | |||
node8p = pgenh("1", "d") | |||
node8 = Template(wraptext("a"), [pgens("b", "c"), node8p]) | |||
node9 = Template(wraptext("a"), [pgens("b", "c"), pgenh("1", "d")]) | |||
node10 = Template(wraptext("a"), [pgens("b", "c"), pgenh("1", "e")]) | |||
node11 = Template(wraptext("a"), [pgens("b", "c")]) | |||
node12 = Template(wraptext("a"), [pgens("b", "c")]) | |||
node13 = Template(wraptext("a"), [ | |||
pgens("\nb ", " c"), pgens("\nd ", " e"), pgens("\nf ", " g")]) | |||
node14 = Template(wraptext("a\n"), [ | |||
pgens("b ", "c\n"), pgens("d ", " e"), pgens("f ", "g\n"), | |||
pgens("h ", " i\n")]) | |||
node15 = Template(wraptext("a"), [ | |||
pgens("b ", " c\n"), pgens("\nd ", " e"), pgens("\nf ", "g ")]) | |||
node16 = Template(wraptext("a"), [ | |||
pgens("\nb ", " c"), pgens("\nd ", " e"), pgens("\nf ", " g")]) | |||
node17 = Template(wraptext("a"), [pgenh("1", "b")]) | |||
node18 = Template(wraptext("a"), [pgenh("1", "b")]) | |||
node19 = Template(wraptext("a"), [pgenh("1", "b")]) | |||
node20 = Template(wraptext("a"), [pgenh("1", "b"), pgenh("2", "c"), | |||
pgenh("3", "d"), pgenh("4", "e")]) | |||
node21 = Template(wraptext("a"), [pgenh("1", "b"), pgenh("2", "c"), | |||
pgens("4", "d"), pgens("5", "e")]) | |||
node22 = Template(wraptext("a"), [pgenh("1", "b"), pgenh("2", "c"), | |||
pgens("4", "d"), pgens("5", "e")]) | |||
node23 = Template(wraptext("a"), [pgenh("1", "b")]) | |||
node24 = Template(wraptext("a"), [pgenh("1", "b")]) | |||
node25 = Template(wraptext("a"), [pgens("b", "c")]) | |||
node26 = Template(wraptext("a"), [pgenh("1", "b")]) | |||
node27 = Template(wraptext("a"), [pgenh("1", "b")]) | |||
node28 = Template(wraptext("a"), [pgens("1", "b")]) | |||
node29 = Template(wraptext("a"), [ | |||
pgens("\nb ", " c"), pgens("\nd ", " e"), pgens("\nf ", " g")]) | |||
node30 = Template(wraptext("a\n"), [ | |||
pgens("b ", "c\n"), pgens("d ", " e"), pgens("f ", "g\n"), | |||
pgens("h ", " i\n")]) | |||
node31 = Template(wraptext("a"), [ | |||
pgens("b ", " c\n"), pgens("\nd ", " e"), pgens("\nf ", "g ")]) | |||
node32 = Template(wraptext("a"), [ | |||
pgens("\nb ", " c "), pgens("\nd ", " e "), pgens("\nf ", " g ")]) | |||
node33 = Template(wraptext("a"), [pgens("b", "c"), pgens("d", "e"), | |||
pgens("b", "f"), pgens("b", "h"), | |||
pgens("i", "j")]) | |||
node34 = Template(wraptext("a"), [pgens("1", "b"), pgens("x", "y"), | |||
pgens("1", "c"), pgens("2", "d")]) | |||
node35 = Template(wraptext("a"), [pgens("1", "b"), pgens("x", "y"), | |||
pgenh("1", "c"), pgenh("2", "d")]) | |||
node36 = Template(wraptext("a"), [pgens("b", "c"), pgens("d", "e"), | |||
pgens("f", "g")]) | |||
node37 = Template(wraptext("a"), [pgenh("1", "")]) | |||
node38 = Template(wraptext("abc")) | |||
node39 = Template(wraptext("a"), [pgenh("1", " b ")]) | |||
node40 = Template(wraptext("a"), [pgenh("1", " b"), pgenh("2", " c")]) | |||
node41 = Template(wraptext("a"), [pgens("1", " b"), pgens("2", " c")]) | |||
node42 = Template(wraptext("a"), [pgens("b", " \n")]) | |||
def test_add(self): | |||
"""test Template.add()""" | |||
node1 = Template(wraptext("a"), [pgens("b", "c"), pgenh("1", "d")]) | |||
node2 = Template(wraptext("a"), [pgens("b", "c"), pgenh("1", "d")]) | |||
node3 = Template(wraptext("a"), [pgens("b", "c"), pgenh("1", "d")]) | |||
node4 = Template(wraptext("a"), [pgens("b", "c"), pgenh("1", "d")]) | |||
node5 = Template(wraptext("a"), [pgens("b", "c"), | |||
pgens(" d ", "e")]) | |||
node6 = Template(wraptext("a"), [pgens("b", "c"), pgens("b", "d"), | |||
pgens("b", "e")]) | |||
node7 = Template(wraptext("a"), [pgens("b", "c"), pgenh("1", "d")]) | |||
node8p = pgenh("1", "d") | |||
node8 = Template(wraptext("a"), [pgens("b", "c"), node8p]) | |||
node9 = Template(wraptext("a"), [pgens("b", "c"), pgenh("1", "d")]) | |||
node10 = Template(wraptext("a"), [pgens("b", "c"), pgenh("1", "e")]) | |||
node11 = Template(wraptext("a"), [pgens("b", "c")]) | |||
node12 = Template(wraptext("a"), [pgens("b", "c")]) | |||
node13 = Template(wraptext("a"), [ | |||
pgens("\nb ", " c"), pgens("\nd ", " e"), pgens("\nf ", " g")]) | |||
node14 = Template(wraptext("a\n"), [ | |||
pgens("b ", "c\n"), pgens("d ", " e"), pgens("f ", "g\n"), | |||
pgens("h ", " i\n")]) | |||
node15 = Template(wraptext("a"), [ | |||
pgens("b ", " c\n"), pgens("\nd ", " e"), pgens("\nf ", "g ")]) | |||
node16 = Template(wraptext("a"), [ | |||
pgens("\nb ", " c"), pgens("\nd ", " e"), pgens("\nf ", " g")]) | |||
node17 = Template(wraptext("a"), [pgenh("1", "b")]) | |||
node18 = Template(wraptext("a"), [pgenh("1", "b")]) | |||
node19 = Template(wraptext("a"), [pgenh("1", "b")]) | |||
node20 = Template(wraptext("a"), [pgenh("1", "b"), pgenh("2", "c"), | |||
pgenh("3", "d"), pgenh("4", "e")]) | |||
node21 = Template(wraptext("a"), [pgenh("1", "b"), pgenh("2", "c"), | |||
pgens("4", "d"), pgens("5", "e")]) | |||
node22 = Template(wraptext("a"), [pgenh("1", "b"), pgenh("2", "c"), | |||
pgens("4", "d"), pgens("5", "e")]) | |||
node23 = Template(wraptext("a"), [pgenh("1", "b")]) | |||
node24 = Template(wraptext("a"), [pgenh("1", "b")]) | |||
node25 = Template(wraptext("a"), [pgens("b", "c")]) | |||
node26 = Template(wraptext("a"), [pgenh("1", "b")]) | |||
node27 = Template(wraptext("a"), [pgenh("1", "b")]) | |||
node28 = Template(wraptext("a"), [pgens("1", "b")]) | |||
node29 = Template(wraptext("a"), [ | |||
pgens("\nb ", " c"), pgens("\nd ", " e"), pgens("\nf ", " g")]) | |||
node30 = Template(wraptext("a\n"), [ | |||
pgens("b ", "c\n"), pgens("d ", " e"), pgens("f ", "g\n"), | |||
pgens("h ", " i\n")]) | |||
node31 = Template(wraptext("a"), [ | |||
pgens("b ", " c\n"), pgens("\nd ", " e"), pgens("\nf ", "g ")]) | |||
node32 = Template(wraptext("a"), [ | |||
pgens("\nb ", " c "), pgens("\nd ", " e "), pgens("\nf ", " g ")]) | |||
node33 = Template(wraptext("a"), [pgens("b", "c"), pgens("d", "e"), | |||
pgens("b", "f"), pgens("b", "h"), | |||
pgens("i", "j")]) | |||
node34 = Template(wraptext("a"), [pgens("1", "b"), pgens("x", "y"), | |||
pgens("1", "c"), pgens("2", "d")]) | |||
node35 = Template(wraptext("a"), [pgens("1", "b"), pgens("x", "y"), | |||
pgenh("1", "c"), pgenh("2", "d")]) | |||
node36 = Template(wraptext("a"), [pgens("b", "c"), pgens("d", "e"), | |||
pgens("f", "g")]) | |||
node37 = Template(wraptext("a"), [pgenh("1", "")]) | |||
node38 = Template(wraptext("abc")) | |||
node39 = Template(wraptext("a"), [pgenh("1", " b ")]) | |||
node40 = Template(wraptext("a"), [pgenh("1", " b"), pgenh("2", " c")]) | |||
node41 = Template(wraptext("a"), [pgens("1", " b"), pgens("2", " c")]) | |||
node42 = Template(wraptext("a"), [pgens("b", " \n")]) | |||
node1.add("e", "f", showkey=True) | |||
node2.add(2, "g", showkey=False) | |||
node3.add("e", "foo|bar", showkey=True) | |||
node4.add("e", "f", showkey=True, before="b") | |||
node5.add("f", "g", showkey=True, before=" d ") | |||
node6.add("f", "g", showkey=True, before="b") | |||
with pytest.raises(ValueError): | |||
node7.add("e", "f", showkey=True, before="q") | |||
node8.add("e", "f", showkey=True, before=node8p) | |||
node9.add("e", "f", showkey=True, before=pgenh("1", "d")) | |||
with pytest.raises(ValueError): | |||
node10.add("e", "f", showkey=True, before=pgenh("1", "d")) | |||
node11.add("d", "foo=bar", showkey=True) | |||
node12.add("1", "foo=bar", showkey=False) | |||
node13.add("h", "i", showkey=True) | |||
node14.add("j", "k", showkey=True) | |||
node15.add("h", "i", showkey=True) | |||
node16.add("h", "i", showkey=True, preserve_spacing=False) | |||
node17.add("2", "c") | |||
node18.add("3", "c") | |||
node19.add("c", "d") | |||
node20.add("5", "f") | |||
node21.add("3", "f") | |||
node22.add("6", "f") | |||
node23.add("c", "foo=bar") | |||
node24.add("2", "foo=bar") | |||
node25.add("b", "d") | |||
node26.add("1", "foo=bar") | |||
node27.add("1", "foo=bar", showkey=True) | |||
node28.add("1", "foo=bar", showkey=False) | |||
node29.add("d", "foo") | |||
node30.add("f", "foo") | |||
node31.add("f", "foo") | |||
node32.add("d", "foo", preserve_spacing=False) | |||
node33.add("b", "k") | |||
node34.add("1", "e") | |||
node35.add("1", "e") | |||
node36.add("d", "h", before="b") | |||
node37.add(1, "b") | |||
node38.add("1", "foo") | |||
with pytest.raises(ValueError): | |||
node38.add("z", "bar", showkey=False) | |||
node39.add("1", "c") | |||
node40.add("3", "d") | |||
node41.add("3", "d") | |||
node42.add("b", "hello") | |||
node1.add("e", "f", showkey=True) | |||
node2.add(2, "g", showkey=False) | |||
node3.add("e", "foo|bar", showkey=True) | |||
node4.add("e", "f", showkey=True, before="b") | |||
node5.add("f", "g", showkey=True, before=" d ") | |||
node6.add("f", "g", showkey=True, before="b") | |||
self.assertRaises(ValueError, node7.add, "e", "f", showkey=True, | |||
before="q") | |||
node8.add("e", "f", showkey=True, before=node8p) | |||
node9.add("e", "f", showkey=True, before=pgenh("1", "d")) | |||
self.assertRaises(ValueError, node10.add, "e", "f", showkey=True, | |||
before=pgenh("1", "d")) | |||
node11.add("d", "foo=bar", showkey=True) | |||
node12.add("1", "foo=bar", showkey=False) | |||
node13.add("h", "i", showkey=True) | |||
node14.add("j", "k", showkey=True) | |||
node15.add("h", "i", showkey=True) | |||
node16.add("h", "i", showkey=True, preserve_spacing=False) | |||
node17.add("2", "c") | |||
node18.add("3", "c") | |||
node19.add("c", "d") | |||
node20.add("5", "f") | |||
node21.add("3", "f") | |||
node22.add("6", "f") | |||
node23.add("c", "foo=bar") | |||
node24.add("2", "foo=bar") | |||
node25.add("b", "d") | |||
node26.add("1", "foo=bar") | |||
node27.add("1", "foo=bar", showkey=True) | |||
node28.add("1", "foo=bar", showkey=False) | |||
node29.add("d", "foo") | |||
node30.add("f", "foo") | |||
node31.add("f", "foo") | |||
node32.add("d", "foo", preserve_spacing=False) | |||
node33.add("b", "k") | |||
node34.add("1", "e") | |||
node35.add("1", "e") | |||
node36.add("d", "h", before="b") | |||
node37.add(1, "b") | |||
node38.add("1", "foo") | |||
self.assertRaises(ValueError, node38.add, "z", "bar", showkey=False) | |||
node39.add("1", "c") | |||
node40.add("3", "d") | |||
node41.add("3", "d") | |||
node42.add("b", "hello") | |||
assert "{{a|b=c|d|e=f}}" == node1 | |||
assert "{{a|b=c|d|g}}" == node2 | |||
assert "{{a|b=c|d|e=foo|bar}}" == node3 | |||
assert isinstance(node3.params[2].value.get(1), HTMLEntity) | |||
assert "{{a|e=f|b=c|d}}" == node4 | |||
assert "{{a|b=c|f=g| d =e}}" == node5 | |||
assert "{{a|b=c|b=d|f=g|b=e}}" == node6 | |||
assert "{{a|b=c|d}}" == node7 | |||
assert "{{a|b=c|e=f|d}}" == node8 | |||
assert "{{a|b=c|e=f|d}}" == node9 | |||
assert "{{a|b=c|e}}" == node10 | |||
assert "{{a|b=c|d=foo=bar}}" == node11 | |||
assert "{{a|b=c|foo=bar}}" == node12 | |||
assert isinstance(node12.params[1].value.get(1), HTMLEntity) | |||
assert "{{a|\nb = c|\nd = e|\nf = g|\nh = i}}" == node13 | |||
assert "{{a\n|b =c\n|d = e|f =g\n|h = i\n|j =k\n}}" == node14 | |||
assert "{{a|b = c\n|\nd = e|\nf =g |\nh = i}}" == node15 | |||
assert "{{a|\nb = c|\nd = e|\nf = g|h=i}}" == node16 | |||
assert "{{a|b|c}}" == node17 | |||
assert "{{a|b|3=c}}" == node18 | |||
assert "{{a|b|c=d}}" == node19 | |||
assert "{{a|b|c|d|e|f}}" == node20 | |||
assert "{{a|b|c|4=d|5=e|f}}" == node21 | |||
assert "{{a|b|c|4=d|5=e|6=f}}" == node22 | |||
assert "{{a|b|c=foo=bar}}" == node23 | |||
assert "{{a|b|foo=bar}}" == node24 | |||
assert isinstance(node24.params[1].value.get(1), HTMLEntity) | |||
assert "{{a|b=d}}" == node25 | |||
assert "{{a|foo=bar}}" == node26 | |||
assert isinstance(node26.params[0].value.get(1), HTMLEntity) | |||
assert "{{a|1=foo=bar}}" == node27 | |||
assert "{{a|foo=bar}}" == node28 | |||
assert isinstance(node28.params[0].value.get(1), HTMLEntity) | |||
assert "{{a|\nb = c|\nd = foo|\nf = g}}" == node29 | |||
assert "{{a\n|b =c\n|d = e|f =foo\n|h = i\n}}" == node30 | |||
assert "{{a|b = c\n|\nd = e|\nf =foo }}" == node31 | |||
assert "{{a|\nb = c |\nd =foo|\nf = g }}" == node32 | |||
assert "{{a|b=k|d=e|i=j}}" == node33 | |||
assert "{{a|1=e|x=y|2=d}}" == node34 | |||
assert "{{a|x=y|e|d}}" == node35 | |||
assert "{{a|b=c|d=h|f=g}}" == node36 | |||
assert "{{a|b}}" == node37 | |||
assert "{{abc|foo}}" == node38 | |||
assert "{{a|c}}" == node39 | |||
assert "{{a| b| c|d}}" == node40 | |||
assert "{{a|1= b|2= c|3= d}}" == node41 | |||
assert "{{a|b=hello \n}}" == node42 | |||
self.assertEqual("{{a|b=c|d|e=f}}", node1) | |||
self.assertEqual("{{a|b=c|d|g}}", node2) | |||
self.assertEqual("{{a|b=c|d|e=foo|bar}}", node3) | |||
self.assertIsInstance(node3.params[2].value.get(1), HTMLEntity) | |||
self.assertEqual("{{a|e=f|b=c|d}}", node4) | |||
self.assertEqual("{{a|b=c|f=g| d =e}}", node5) | |||
self.assertEqual("{{a|b=c|b=d|f=g|b=e}}", node6) | |||
self.assertEqual("{{a|b=c|d}}", node7) | |||
self.assertEqual("{{a|b=c|e=f|d}}", node8) | |||
self.assertEqual("{{a|b=c|e=f|d}}", node9) | |||
self.assertEqual("{{a|b=c|e}}", node10) | |||
self.assertEqual("{{a|b=c|d=foo=bar}}", node11) | |||
self.assertEqual("{{a|b=c|foo=bar}}", node12) | |||
self.assertIsInstance(node12.params[1].value.get(1), HTMLEntity) | |||
self.assertEqual("{{a|\nb = c|\nd = e|\nf = g|\nh = i}}", node13) | |||
self.assertEqual("{{a\n|b =c\n|d = e|f =g\n|h = i\n|j =k\n}}", node14) | |||
self.assertEqual("{{a|b = c\n|\nd = e|\nf =g |\nh = i}}", node15) | |||
self.assertEqual("{{a|\nb = c|\nd = e|\nf = g|h=i}}", node16) | |||
self.assertEqual("{{a|b|c}}", node17) | |||
self.assertEqual("{{a|b|3=c}}", node18) | |||
self.assertEqual("{{a|b|c=d}}", node19) | |||
self.assertEqual("{{a|b|c|d|e|f}}", node20) | |||
self.assertEqual("{{a|b|c|4=d|5=e|f}}", node21) | |||
self.assertEqual("{{a|b|c|4=d|5=e|6=f}}", node22) | |||
self.assertEqual("{{a|b|c=foo=bar}}", node23) | |||
self.assertEqual("{{a|b|foo=bar}}", node24) | |||
self.assertIsInstance(node24.params[1].value.get(1), HTMLEntity) | |||
self.assertEqual("{{a|b=d}}", node25) | |||
self.assertEqual("{{a|foo=bar}}", node26) | |||
self.assertIsInstance(node26.params[0].value.get(1), HTMLEntity) | |||
self.assertEqual("{{a|1=foo=bar}}", node27) | |||
self.assertEqual("{{a|foo=bar}}", node28) | |||
self.assertIsInstance(node28.params[0].value.get(1), HTMLEntity) | |||
self.assertEqual("{{a|\nb = c|\nd = foo|\nf = g}}", node29) | |||
self.assertEqual("{{a\n|b =c\n|d = e|f =foo\n|h = i\n}}", node30) | |||
self.assertEqual("{{a|b = c\n|\nd = e|\nf =foo }}", node31) | |||
self.assertEqual("{{a|\nb = c |\nd =foo|\nf = g }}", node32) | |||
self.assertEqual("{{a|b=k|d=e|i=j}}", node33) | |||
self.assertEqual("{{a|1=e|x=y|2=d}}", node34) | |||
self.assertEqual("{{a|x=y|e|d}}", node35) | |||
self.assertEqual("{{a|b=c|d=h|f=g}}", node36) | |||
self.assertEqual("{{a|b}}", node37) | |||
self.assertEqual("{{abc|foo}}", node38) | |||
self.assertEqual("{{a|c}}", node39) | |||
self.assertEqual("{{a| b| c|d}}", node40) | |||
self.assertEqual("{{a|1= b|2= c|3= d}}", node41) | |||
self.assertEqual("{{a|b=hello \n}}", node42) | |||
def test_remove(): | |||
"""test Template.remove()""" | |||
node1 = Template(wraptext("foobar")) | |||
node2 = Template(wraptext("foo"), | |||
[pgenh("1", "bar"), pgens("abc", "def")]) | |||
node3 = Template(wraptext("foo"), | |||
[pgenh("1", "bar"), pgens("abc", "def")]) | |||
node4 = Template(wraptext("foo"), | |||
[pgenh("1", "bar"), pgenh("2", "baz")]) | |||
node5 = Template(wraptext("foo"), [ | |||
pgens(" a", "b"), pgens("b", "c"), pgens("a ", "d")]) | |||
node6 = Template(wraptext("foo"), [ | |||
pgens(" a", "b"), pgens("b", "c"), pgens("a ", "d")]) | |||
node7 = Template(wraptext("foo"), [ | |||
pgens("1 ", "a"), pgens(" 1", "b"), pgens("2", "c")]) | |||
node8 = Template(wraptext("foo"), [ | |||
pgens("1 ", "a"), pgens(" 1", "b"), pgens("2", "c")]) | |||
node9 = Template(wraptext("foo"), [ | |||
pgens("1 ", "a"), pgenh("1", "b"), pgenh("2", "c")]) | |||
node10 = Template(wraptext("foo"), [ | |||
pgens("1 ", "a"), pgenh("1", "b"), pgenh("2", "c")]) | |||
node11 = Template(wraptext("foo"), [ | |||
pgens(" a", "b"), pgens("b", "c"), pgens("a ", "d")]) | |||
node12 = Template(wraptext("foo"), [ | |||
pgens(" a", "b"), pgens("b", "c"), pgens("a ", "d")]) | |||
node13 = Template(wraptext("foo"), [ | |||
pgens(" a", "b"), pgens("b", "c"), pgens("a ", "d")]) | |||
node14 = Template(wraptext("foo"), [ | |||
pgens(" a", "b"), pgens("b", "c"), pgens("a ", "d")]) | |||
node15 = Template(wraptext("foo"), [ | |||
pgens(" a", "b"), pgens("b", "c"), pgens("a ", "d")]) | |||
node16 = Template(wraptext("foo"), [ | |||
pgens(" a", "b"), pgens("b", "c"), pgens("a ", "d")]) | |||
node17 = Template(wraptext("foo"), [ | |||
pgens("1 ", "a"), pgenh("1", "b"), pgenh("2", "c")]) | |||
node18 = Template(wraptext("foo"), [ | |||
pgens("1 ", "a"), pgenh("1", "b"), pgenh("2", "c")]) | |||
node19 = Template(wraptext("foo"), [ | |||
pgens("1 ", "a"), pgenh("1", "b"), pgenh("2", "c")]) | |||
node20 = Template(wraptext("foo"), [ | |||
pgens("1 ", "a"), pgenh("1", "b"), pgenh("2", "c")]) | |||
node21 = Template(wraptext("foo"), [ | |||
pgens("a", "b"), pgens("c", "d"), pgens("e", "f"), pgens("a", "b"), | |||
pgens("a", "b")]) | |||
node22 = Template(wraptext("foo"), [ | |||
pgens("a", "b"), pgens("c", "d"), pgens("e", "f"), pgens("a", "b"), | |||
pgens("a", "b")]) | |||
node23 = Template(wraptext("foo"), [ | |||
pgens("a", "b"), pgens("c", "d"), pgens("e", "f"), pgens("a", "b"), | |||
pgens("a", "b")]) | |||
node24 = Template(wraptext("foo"), [ | |||
pgens("a", "b"), pgens("c", "d"), pgens("e", "f"), pgens("a", "b"), | |||
pgens("a", "b")]) | |||
node25 = Template(wraptext("foo"), [ | |||
pgens("a", "b"), pgens("c", "d"), pgens("e", "f"), pgens("a", "b"), | |||
pgens("a", "b")]) | |||
node26 = Template(wraptext("foo"), [ | |||
pgens("a", "b"), pgens("c", "d"), pgens("e", "f"), pgens("a", "b"), | |||
pgens("a", "b")]) | |||
node27 = Template(wraptext("foo"), [pgenh("1", "bar")]) | |||
node28 = Template(wraptext("foo"), [pgenh("1", "bar")]) | |||
def test_remove(self): | |||
"""test Template.remove()""" | |||
node1 = Template(wraptext("foobar")) | |||
node2 = Template(wraptext("foo"), | |||
[pgenh("1", "bar"), pgens("abc", "def")]) | |||
node3 = Template(wraptext("foo"), | |||
[pgenh("1", "bar"), pgens("abc", "def")]) | |||
node4 = Template(wraptext("foo"), | |||
[pgenh("1", "bar"), pgenh("2", "baz")]) | |||
node5 = Template(wraptext("foo"), [ | |||
pgens(" a", "b"), pgens("b", "c"), pgens("a ", "d")]) | |||
node6 = Template(wraptext("foo"), [ | |||
pgens(" a", "b"), pgens("b", "c"), pgens("a ", "d")]) | |||
node7 = Template(wraptext("foo"), [ | |||
pgens("1 ", "a"), pgens(" 1", "b"), pgens("2", "c")]) | |||
node8 = Template(wraptext("foo"), [ | |||
pgens("1 ", "a"), pgens(" 1", "b"), pgens("2", "c")]) | |||
node9 = Template(wraptext("foo"), [ | |||
pgens("1 ", "a"), pgenh("1", "b"), pgenh("2", "c")]) | |||
node10 = Template(wraptext("foo"), [ | |||
pgens("1 ", "a"), pgenh("1", "b"), pgenh("2", "c")]) | |||
node11 = Template(wraptext("foo"), [ | |||
pgens(" a", "b"), pgens("b", "c"), pgens("a ", "d")]) | |||
node12 = Template(wraptext("foo"), [ | |||
pgens(" a", "b"), pgens("b", "c"), pgens("a ", "d")]) | |||
node13 = Template(wraptext("foo"), [ | |||
pgens(" a", "b"), pgens("b", "c"), pgens("a ", "d")]) | |||
node14 = Template(wraptext("foo"), [ | |||
pgens(" a", "b"), pgens("b", "c"), pgens("a ", "d")]) | |||
node15 = Template(wraptext("foo"), [ | |||
pgens(" a", "b"), pgens("b", "c"), pgens("a ", "d")]) | |||
node16 = Template(wraptext("foo"), [ | |||
pgens(" a", "b"), pgens("b", "c"), pgens("a ", "d")]) | |||
node17 = Template(wraptext("foo"), [ | |||
pgens("1 ", "a"), pgenh("1", "b"), pgenh("2", "c")]) | |||
node18 = Template(wraptext("foo"), [ | |||
pgens("1 ", "a"), pgenh("1", "b"), pgenh("2", "c")]) | |||
node19 = Template(wraptext("foo"), [ | |||
pgens("1 ", "a"), pgenh("1", "b"), pgenh("2", "c")]) | |||
node20 = Template(wraptext("foo"), [ | |||
pgens("1 ", "a"), pgenh("1", "b"), pgenh("2", "c")]) | |||
node21 = Template(wraptext("foo"), [ | |||
pgens("a", "b"), pgens("c", "d"), pgens("e", "f"), pgens("a", "b"), | |||
pgens("a", "b")]) | |||
node22 = Template(wraptext("foo"), [ | |||
pgens("a", "b"), pgens("c", "d"), pgens("e", "f"), pgens("a", "b"), | |||
pgens("a", "b")]) | |||
node23 = Template(wraptext("foo"), [ | |||
pgens("a", "b"), pgens("c", "d"), pgens("e", "f"), pgens("a", "b"), | |||
pgens("a", "b")]) | |||
node24 = Template(wraptext("foo"), [ | |||
pgens("a", "b"), pgens("c", "d"), pgens("e", "f"), pgens("a", "b"), | |||
pgens("a", "b")]) | |||
node25 = Template(wraptext("foo"), [ | |||
pgens("a", "b"), pgens("c", "d"), pgens("e", "f"), pgens("a", "b"), | |||
pgens("a", "b")]) | |||
node26 = Template(wraptext("foo"), [ | |||
pgens("a", "b"), pgens("c", "d"), pgens("e", "f"), pgens("a", "b"), | |||
pgens("a", "b")]) | |||
node27 = Template(wraptext("foo"), [pgenh("1", "bar")]) | |||
node28 = Template(wraptext("foo"), [pgenh("1", "bar")]) | |||
node2.remove("1") | |||
node2.remove("abc") | |||
node3.remove(1, keep_field=True) | |||
node3.remove("abc", keep_field=True) | |||
node4.remove("1", keep_field=False) | |||
node5.remove("a", keep_field=False) | |||
node6.remove("a", keep_field=True) | |||
node7.remove(1, keep_field=True) | |||
node8.remove(1, keep_field=False) | |||
node9.remove(1, keep_field=True) | |||
node10.remove(1, keep_field=False) | |||
node11.remove(node11.params[0], keep_field=False) | |||
node12.remove(node12.params[0], keep_field=True) | |||
node13.remove(node13.params[1], keep_field=False) | |||
node14.remove(node14.params[1], keep_field=True) | |||
node15.remove(node15.params[2], keep_field=False) | |||
node16.remove(node16.params[2], keep_field=True) | |||
node17.remove(node17.params[0], keep_field=False) | |||
node18.remove(node18.params[0], keep_field=True) | |||
node19.remove(node19.params[1], keep_field=False) | |||
node20.remove(node20.params[1], keep_field=True) | |||
node21.remove("a", keep_field=False) | |||
node22.remove("a", keep_field=True) | |||
node23.remove(node23.params[0], keep_field=False) | |||
node24.remove(node24.params[0], keep_field=True) | |||
node25.remove(node25.params[3], keep_field=False) | |||
node26.remove(node26.params[3], keep_field=True) | |||
with pytest.raises(ValueError): | |||
node1.remove(1) | |||
with pytest.raises(ValueError): | |||
node1.remove("a") | |||
with pytest.raises(ValueError): | |||
node2.remove("1") | |||
node2.remove("abc") | |||
node3.remove(1, keep_field=True) | |||
node3.remove("abc", keep_field=True) | |||
node4.remove("1", keep_field=False) | |||
node5.remove("a", keep_field=False) | |||
node6.remove("a", keep_field=True) | |||
node7.remove(1, keep_field=True) | |||
node8.remove(1, keep_field=False) | |||
node9.remove(1, keep_field=True) | |||
node10.remove(1, keep_field=False) | |||
node11.remove(node11.params[0], keep_field=False) | |||
node12.remove(node12.params[0], keep_field=True) | |||
node13.remove(node13.params[1], keep_field=False) | |||
node14.remove(node14.params[1], keep_field=True) | |||
node15.remove(node15.params[2], keep_field=False) | |||
node16.remove(node16.params[2], keep_field=True) | |||
node17.remove(node17.params[0], keep_field=False) | |||
node18.remove(node18.params[0], keep_field=True) | |||
node19.remove(node19.params[1], keep_field=False) | |||
node20.remove(node20.params[1], keep_field=True) | |||
node21.remove("a", keep_field=False) | |||
node22.remove("a", keep_field=True) | |||
node23.remove(node23.params[0], keep_field=False) | |||
node24.remove(node24.params[0], keep_field=True) | |||
node25.remove(node25.params[3], keep_field=False) | |||
node26.remove(node26.params[3], keep_field=True) | |||
assert "{{foo}}" == node2 | |||
assert "{{foo||abc=}}" == node3 | |||
assert "{{foo|2=baz}}" == node4 | |||
assert "{{foo|b=c}}" == node5 | |||
assert "{{foo| a=|b=c}}" == node6 | |||
assert "{{foo|1 =|2=c}}" == node7 | |||
assert "{{foo|2=c}}" == node8 | |||
assert "{{foo||c}}" == node9 | |||
assert "{{foo|2=c}}" == node10 | |||
assert "{{foo|b=c|a =d}}" == node11 | |||
assert "{{foo| a=|b=c|a =d}}" == node12 | |||
assert "{{foo| a=b|a =d}}" == node13 | |||
assert "{{foo| a=b|b=|a =d}}" == node14 | |||
assert "{{foo| a=b|b=c}}" == node15 | |||
assert "{{foo| a=b|b=c|a =}}" == node16 | |||
assert "{{foo|b|c}}" == node17 | |||
assert "{{foo|1 =|b|c}}" == node18 | |||
assert "{{foo|1 =a|2=c}}" == node19 | |||
assert "{{foo|1 =a||c}}" == node20 | |||
assert "{{foo|c=d|e=f}}" == node21 | |||
assert "{{foo|a=|c=d|e=f}}" == node22 | |||
assert "{{foo|c=d|e=f|a=b|a=b}}" == node23 | |||
assert "{{foo|a=|c=d|e=f|a=b|a=b}}" == node24 | |||
assert "{{foo|a=b|c=d|e=f|a=b}}" == node25 | |||
assert "{{foo|a=b|c=d|e=f|a=|a=b}}" == node26 | |||
with pytest.raises(ValueError): | |||
node27.remove(node28.get(1)) | |||
self.assertRaises(ValueError, node1.remove, 1) | |||
self.assertRaises(ValueError, node1.remove, "a") | |||
self.assertRaises(ValueError, node2.remove, "1") | |||
self.assertEqual("{{foo}}", node2) | |||
self.assertEqual("{{foo||abc=}}", node3) | |||
self.assertEqual("{{foo|2=baz}}", node4) | |||
self.assertEqual("{{foo|b=c}}", node5) | |||
self.assertEqual("{{foo| a=|b=c}}", node6) | |||
self.assertEqual("{{foo|1 =|2=c}}", node7) | |||
self.assertEqual("{{foo|2=c}}", node8) | |||
self.assertEqual("{{foo||c}}", node9) | |||
self.assertEqual("{{foo|2=c}}", node10) | |||
self.assertEqual("{{foo|b=c|a =d}}", node11) | |||
self.assertEqual("{{foo| a=|b=c|a =d}}", node12) | |||
self.assertEqual("{{foo| a=b|a =d}}", node13) | |||
self.assertEqual("{{foo| a=b|b=|a =d}}", node14) | |||
self.assertEqual("{{foo| a=b|b=c}}", node15) | |||
self.assertEqual("{{foo| a=b|b=c|a =}}", node16) | |||
self.assertEqual("{{foo|b|c}}", node17) | |||
self.assertEqual("{{foo|1 =|b|c}}", node18) | |||
self.assertEqual("{{foo|1 =a|2=c}}", node19) | |||
self.assertEqual("{{foo|1 =a||c}}", node20) | |||
self.assertEqual("{{foo|c=d|e=f}}", node21) | |||
self.assertEqual("{{foo|a=|c=d|e=f}}", node22) | |||
self.assertEqual("{{foo|c=d|e=f|a=b|a=b}}", node23) | |||
self.assertEqual("{{foo|a=|c=d|e=f|a=b|a=b}}", node24) | |||
self.assertEqual("{{foo|a=b|c=d|e=f|a=b}}", node25) | |||
self.assertEqual("{{foo|a=b|c=d|e=f|a=|a=b}}", node26) | |||
self.assertRaises(ValueError, node27.remove, node28.get(1)) | |||
def test_formatting(self): | |||
"""test realistic param manipulation with complex whitespace formatting | |||
(assumes that parsing works correctly)""" | |||
tests = [ | |||
def test_formatting(): | |||
"""test realistic param manipulation with complex whitespace formatting | |||
(assumes that parsing works correctly)""" | |||
tests = [ | |||
# https://en.wikipedia.org/w/index.php?title=Lamar_County,_Georgia&oldid=792356004 | |||
("""{{Infobox U.S. county | |||
| county = Lamar County | |||
@@ -588,18 +598,15 @@ class TestTemplate(TreeEqualityTestCase): | |||
+ pop = 12345<ref>example ref</ref> | | |||
density_sq_mi = 575""")] | |||
for (original, expected) in tests: | |||
code = parse(original) | |||
template = code.filter_templates()[0] | |||
template.add("pop", "12345<ref>example ref</ref>") | |||
template.add('census estimate yr', "2016", before="pop") | |||
template.remove("census yr") | |||
oldlines = original.splitlines(True) | |||
newlines = str(code).splitlines(True) | |||
difflines = unified_diff(oldlines, newlines, n=1) | |||
diff = "".join(list(difflines)[2:]).strip() | |||
self.assertEqual(expected, diff) | |||
for (original, expected) in tests: | |||
code = parse(original) | |||
template = code.filter_templates()[0] | |||
template.add("pop", "12345<ref>example ref</ref>") | |||
template.add('census estimate yr', "2016", before="pop") | |||
template.remove("census yr") | |||
if __name__ == "__main__": | |||
unittest.main(verbosity=2) | |||
oldlines = original.splitlines(True) | |||
newlines = str(code).splitlines(True) | |||
difflines = unified_diff(oldlines, newlines, n=1) | |||
diff = "".join(list(difflines)[2:]).strip() | |||
assert expected == diff |
@@ -18,51 +18,50 @@ | |||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||
# SOFTWARE. | |||
import unittest | |||
""" | |||
Test cases for the Text node. | |||
""" | |||
from mwparserfromhell.nodes import Text | |||
class TestText(unittest.TestCase): | |||
"""Test cases for the Text node.""" | |||
import pytest | |||
def test_str(self): | |||
"""test Text.__str__()""" | |||
node = Text("foobar") | |||
self.assertEqual("foobar", str(node)) | |||
node2 = Text("fóóbar") | |||
self.assertEqual("fóóbar", str(node2)) | |||
from mwparserfromhell.nodes import Text | |||
def test_children(self): | |||
"""test Text.__children__()""" | |||
node = Text("foobar") | |||
gen = node.__children__() | |||
self.assertRaises(StopIteration, next, gen) | |||
def test_str(): | |||
"""test Text.__str__()""" | |||
node = Text("foobar") | |||
assert "foobar" == str(node) | |||
node2 = Text("fóóbar") | |||
assert "fóóbar" == str(node2) | |||
def test_strip(self): | |||
"""test Text.__strip__()""" | |||
node = Text("foobar") | |||
self.assertIs(node, node.__strip__()) | |||
def test_children(): | |||
"""test Text.__children__()""" | |||
node = Text("foobar") | |||
gen = node.__children__() | |||
with pytest.raises(StopIteration): | |||
next(gen) | |||
def test_showtree(self): | |||
"""test Text.__showtree__()""" | |||
output = [] | |||
node1 = Text("foobar") | |||
node2 = Text("fóóbar") | |||
node3 = Text("𐌲𐌿𐍄") | |||
node1.__showtree__(output.append, None, None) | |||
node2.__showtree__(output.append, None, None) | |||
node3.__showtree__(output.append, None, None) | |||
res = ["foobar", r"f\xf3\xf3bar", "\\U00010332\\U0001033f\\U00010344"] | |||
self.assertEqual(res, output) | |||
def test_strip(): | |||
"""test Text.__strip__()""" | |||
node = Text("foobar") | |||
assert node is node.__strip__() | |||
def test_value(self): | |||
"""test getter/setter for the value attribute""" | |||
node = Text("foobar") | |||
self.assertEqual("foobar", node.value) | |||
self.assertIsInstance(node.value, str) | |||
node.value = "héhéhé" | |||
self.assertEqual("héhéhé", node.value) | |||
self.assertIsInstance(node.value, str) | |||
def test_showtree(): | |||
"""test Text.__showtree__()""" | |||
output = [] | |||
node1 = Text("foobar") | |||
node2 = Text("fóóbar") | |||
node3 = Text("𐌲𐌿𐍄") | |||
node1.__showtree__(output.append, None, None) | |||
node2.__showtree__(output.append, None, None) | |||
node3.__showtree__(output.append, None, None) | |||
res = ["foobar", r"f\xf3\xf3bar", "\\U00010332\\U0001033f\\U00010344"] | |||
assert res == output | |||
if __name__ == "__main__": | |||
unittest.main(verbosity=2) | |||
def test_value(): | |||
"""test getter/setter for the value attribute""" | |||
node = Text("foobar") | |||
assert "foobar" == node.value | |||
assert isinstance(node.value, str) | |||
node.value = "héhéhé" | |||
assert "héhéhé" == node.value | |||
assert isinstance(node.value, str) |
@@ -0,0 +1,126 @@ | |||
# Copyright (C) 2012-2016 Ben Kurtovic <ben.kurtovic@gmail.com> | |||
# | |||
# Permission is hereby granted, free of charge, to any person obtaining a copy | |||
# of this software and associated documentation files (the "Software"), to deal | |||
# in the Software without restriction, including without limitation the rights | |||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | |||
# copies of the Software, and to permit persons to whom the Software is | |||
# furnished to do so, subject to the following conditions: | |||
# | |||
# The above copyright notice and this permission notice shall be included in | |||
# all copies or substantial portions of the Software. | |||
# | |||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | |||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | |||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | |||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||
# SOFTWARE. | |||
import codecs | |||
from os import listdir, path | |||
import warnings | |||
import pytest | |||
from mwparserfromhell.parser import contexts, tokens | |||
from mwparserfromhell.parser.builder import Builder | |||
from mwparserfromhell.parser.tokenizer import Tokenizer as PyTokenizer | |||
try: | |||
from mwparserfromhell.parser._tokenizer import CTokenizer | |||
except ImportError: | |||
CTokenizer = None | |||
class _TestParseError(Exception): | |||
"""Raised internally when a test could not be parsed.""" | |||
def _parse_test(test, data): | |||
"""Parse an individual *test*, storing its info in *data*.""" | |||
for line in test.strip().splitlines(): | |||
if line.startswith("name:"): | |||
data["name"] = line[len("name:"):].strip() | |||
elif line.startswith("label:"): | |||
data["label"] = line[len("label:"):].strip() | |||
elif line.startswith("input:"): | |||
raw = line[len("input:"):].strip() | |||
if raw[0] == '"' and raw[-1] == '"': | |||
raw = raw[1:-1] | |||
raw = raw.encode("raw_unicode_escape") | |||
data["input"] = raw.decode("unicode_escape") | |||
elif line.startswith("output:"): | |||
raw = line[len("output:"):].strip() | |||
try: | |||
data["output"] = eval(raw, vars(tokens)) | |||
except Exception as err: | |||
raise _TestParseError(err) from err | |||
def _load_tests(filename, name, text): | |||
"""Load all tests in *text* from the file *filename*.""" | |||
tests = text.split("\n---\n") | |||
for test in tests: | |||
data = {"name": None, "label": None, "input": None, "output": None} | |||
try: | |||
_parse_test(test, data) | |||
except _TestParseError as err: | |||
if data["name"]: | |||
error = "Could not parse test '{0}' in '{1}':\n\t{2}" | |||
warnings.warn(error.format(data["name"], filename, err)) | |||
else: | |||
error = "Could not parse a test in '{0}':\n\t{1}" | |||
warnings.warn(error.format(filename, err)) | |||
continue | |||
if not data["name"]: | |||
error = "A test in '{0}' was ignored because it lacked a name" | |||
warnings.warn(error.format(filename)) | |||
continue | |||
if data["input"] is None or data["output"] is None: | |||
error = "Test '{}' in '{}' was ignored because it lacked an input or an output" | |||
warnings.warn(error.format(data["name"], filename)) | |||
continue | |||
# Include test filename in name | |||
data['name'] = '{}:{}'.format(name, data['name']) | |||
yield data | |||
def build(): | |||
"""Load and install all tests from the 'tokenizer' directory.""" | |||
directory = path.join(path.dirname(__file__), "tokenizer") | |||
extension = ".mwtest" | |||
for filename in listdir(directory): | |||
if not filename.endswith(extension): | |||
continue | |||
fullname = path.join(directory, filename) | |||
with codecs.open(fullname, "r", encoding="utf8") as fp: | |||
text = fp.read() | |||
name = path.split(fullname)[1][:-len(extension)] | |||
yield from _load_tests(fullname, name, text) | |||
@pytest.mark.parametrize("tokenizer", filter(None, ( | |||
CTokenizer, PyTokenizer | |||
)), ids=lambda t: 'CTokenizer' if t.USES_C else 'PyTokenizer') | |||
@pytest.mark.parametrize("data", build(), ids=lambda data: data['name']) | |||
def test_tokenizer(tokenizer, data): | |||
expected = data["output"] | |||
actual = tokenizer().tokenize(data["input"]) | |||
assert expected == actual | |||
@pytest.mark.parametrize("data", build(), ids=lambda data: data['name']) | |||
def test_roundtrip(data): | |||
expected = data["input"] | |||
actual = str(Builder().build(data["output"][:])) | |||
assert expected == actual | |||
@pytest.mark.skipif(CTokenizer is None, reason='CTokenizer not available') | |||
def test_c_tokenizer_uses_c(): | |||
"""make sure the C tokenizer identifies as using a C extension""" | |||
assert CTokenizer.USES_C is True | |||
assert CTokenizer().USES_C is True | |||
def test_describe_context(): | |||
assert "" == contexts.describe(0) | |||
ctx = contexts.describe(contexts.TEMPLATE_PARAM_KEY|contexts.HAS_TEXT) | |||
assert "TEMPLATE_PARAM_KEY|HAS_TEXT" == ctx |
@@ -18,79 +18,76 @@ | |||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||
# SOFTWARE. | |||
import unittest | |||
""" | |||
Test cases for the Token class and its subclasses. | |||
""" | |||
from mwparserfromhell.parser import tokens | |||
class TestTokens(unittest.TestCase): | |||
"""Test cases for the Token class and its subclasses.""" | |||
import pytest | |||
def test_issubclass(self): | |||
"""check that all classes within the tokens module are really Tokens""" | |||
for name in tokens.__all__: | |||
klass = getattr(tokens, name) | |||
self.assertTrue(issubclass(klass, tokens.Token)) | |||
self.assertIsInstance(klass(), klass) | |||
self.assertIsInstance(klass(), tokens.Token) | |||
from mwparserfromhell.parser import tokens | |||
def test_attributes(self): | |||
"""check that Token attributes can be managed properly""" | |||
token1 = tokens.Token() | |||
token2 = tokens.Token(foo="bar", baz=123) | |||
@pytest.mark.parametrize("name", tokens.__all__) | |||
def test_issubclass(name): | |||
"""check that all classes within the tokens module are really Tokens""" | |||
klass = getattr(tokens, name) | |||
assert issubclass(klass, tokens.Token) is True | |||
assert isinstance(klass(), klass) | |||
assert isinstance(klass(), tokens.Token) | |||
self.assertEqual("bar", token2.foo) | |||
self.assertEqual(123, token2.baz) | |||
self.assertFalse(token1.foo) | |||
self.assertFalse(token2.bar) | |||
def test_attributes(): | |||
"""check that Token attributes can be managed properly""" | |||
token1 = tokens.Token() | |||
token2 = tokens.Token(foo="bar", baz=123) | |||
token1.spam = "eggs" | |||
token2.foo = "ham" | |||
del token2.baz | |||
assert "bar" == token2.foo | |||
assert 123 == token2.baz | |||
assert token1.foo is None | |||
assert token2.bar is None | |||
self.assertEqual("eggs", token1.spam) | |||
self.assertEqual("ham", token2.foo) | |||
self.assertFalse(token2.baz) | |||
self.assertRaises(KeyError, delattr, token2, "baz") | |||
token1.spam = "eggs" | |||
token2.foo = "ham" | |||
del token2.baz | |||
def test_repr(self): | |||
"""check that repr() on a Token works as expected""" | |||
token1 = tokens.Token() | |||
token2 = tokens.Token(foo="bar", baz=123) | |||
token3 = tokens.Text(text="earwig" * 100) | |||
hundredchars = ("earwig" * 100)[:97] + "..." | |||
assert "eggs" == token1.spam | |||
assert "ham" == token2.foo | |||
assert token2.baz is None | |||
with pytest.raises(KeyError): | |||
token2.__delattr__("baz") | |||
self.assertEqual("Token()", repr(token1)) | |||
self.assertTrue(repr(token2) in ( | |||
"Token(foo='bar', baz=123)", "Token(baz=123, foo='bar')")) | |||
self.assertEqual("Text(text='" + hundredchars + "')", repr(token3)) | |||
def test_repr(): | |||
"""check that repr() on a Token works as expected""" | |||
token1 = tokens.Token() | |||
token2 = tokens.Token(foo="bar", baz=123) | |||
token3 = tokens.Text(text="earwig" * 100) | |||
hundredchars = ("earwig" * 100)[:97] + "..." | |||
def test_equality(self): | |||
"""check that equivalent tokens are considered equal""" | |||
token1 = tokens.Token() | |||
token2 = tokens.Token() | |||
token3 = tokens.Token(foo="bar", baz=123) | |||
token4 = tokens.Text(text="asdf") | |||
token5 = tokens.Text(text="asdf") | |||
token6 = tokens.TemplateOpen(text="asdf") | |||
assert "Token()" == repr(token1) | |||
assert repr(token2) in ("Token(foo='bar', baz=123)", "Token(baz=123, foo='bar')") | |||
assert "Text(text='" + hundredchars + "')" == repr(token3) | |||
self.assertEqual(token1, token2) | |||
self.assertEqual(token2, token1) | |||
self.assertEqual(token4, token5) | |||
self.assertEqual(token5, token4) | |||
self.assertNotEqual(token1, token3) | |||
self.assertNotEqual(token2, token3) | |||
self.assertNotEqual(token4, token6) | |||
self.assertNotEqual(token5, token6) | |||
def test_equality(): | |||
"""check that equivalent tokens are considered equal""" | |||
token1 = tokens.Token() | |||
token2 = tokens.Token() | |||
token3 = tokens.Token(foo="bar", baz=123) | |||
token4 = tokens.Text(text="asdf") | |||
token5 = tokens.Text(text="asdf") | |||
token6 = tokens.TemplateOpen(text="asdf") | |||
def test_repr_equality(self): | |||
"check that eval(repr(token)) == token" | |||
tests = [ | |||
tokens.Token(), | |||
tokens.Token(foo="bar", baz=123), | |||
tokens.Text(text="earwig") | |||
] | |||
for token in tests: | |||
self.assertEqual(token, eval(repr(token), vars(tokens))) | |||
assert token1 == token2 | |||
assert token2 == token1 | |||
assert token4 == token5 | |||
assert token5 == token4 | |||
assert token1 != token3 | |||
assert token2 != token3 | |||
assert token4 != token6 | |||
assert token5 != token6 | |||
if __name__ == "__main__": | |||
unittest.main(verbosity=2) | |||
@pytest.mark.parametrize("token", [ | |||
tokens.Token(), | |||
tokens.Token(foo="bar", baz=123), | |||
tokens.Text(text="earwig") | |||
]) | |||
def test_repr_equality(token): | |||
"""check that eval(repr(token)) == token""" | |||
assert token == eval(repr(token), vars(tokens)) |
@@ -18,42 +18,39 @@ | |||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||
# SOFTWARE. | |||
import unittest | |||
""" | |||
Tests for the utils module, which provides parse_anything(). | |||
""" | |||
import pytest | |||
from mwparserfromhell.nodes import Template, Text | |||
from mwparserfromhell.utils import parse_anything | |||
from .conftest import assert_wikicode_equal, wrap, wraptext | |||
from ._test_tree_equality import TreeEqualityTestCase, wrap, wraptext | |||
class TestUtils(TreeEqualityTestCase): | |||
"""Tests for the utils module, which provides parse_anything().""" | |||
def test_parse_anything_valid(self): | |||
"""tests for valid input to utils.parse_anything()""" | |||
tests = [ | |||
(wraptext("foobar"), wraptext("foobar")), | |||
(Template(wraptext("spam")), wrap([Template(wraptext("spam"))])), | |||
("fóóbar", wraptext("fóóbar")), | |||
(b"foob\xc3\xa1r", wraptext("foobár")), | |||
(123, wraptext("123")), | |||
(True, wraptext("True")), | |||
(None, wrap([])), | |||
([Text("foo"), Text("bar"), Text("baz")], | |||
wraptext("foo", "bar", "baz")), | |||
([wraptext("foo"), Text("bar"), "baz", 123, 456], | |||
wraptext("foo", "bar", "baz", "123", "456")), | |||
([[[([[((("foo",),),)], "bar"],)]]], wraptext("foo", "bar")) | |||
] | |||
for test, valid in tests: | |||
self.assertWikicodeEqual(valid, parse_anything(test)) | |||
def test_parse_anything_invalid(self): | |||
"""tests for invalid input to utils.parse_anything()""" | |||
self.assertRaises(ValueError, parse_anything, Ellipsis) | |||
self.assertRaises(ValueError, parse_anything, object) | |||
self.assertRaises(ValueError, parse_anything, object()) | |||
self.assertRaises(ValueError, parse_anything, type) | |||
self.assertRaises(ValueError, parse_anything, ["foo", [object]]) | |||
@pytest.mark.parametrize("test,valid", [ | |||
(wraptext("foobar"), wraptext("foobar")), | |||
(Template(wraptext("spam")), wrap([Template(wraptext("spam"))])), | |||
("fóóbar", wraptext("fóóbar")), | |||
(b"foob\xc3\xa1r", wraptext("foobár")), | |||
(123, wraptext("123")), | |||
(True, wraptext("True")), | |||
(None, wrap([])), | |||
([Text("foo"), Text("bar"), Text("baz")], | |||
wraptext("foo", "bar", "baz")), | |||
([wraptext("foo"), Text("bar"), "baz", 123, 456], | |||
wraptext("foo", "bar", "baz", "123", "456")), | |||
([[[([[((("foo",),),)], "bar"],)]]], wraptext("foo", "bar")) | |||
]) | |||
def test_parse_anything_valid(test, valid): | |||
"""tests for valid input to utils.parse_anything()""" | |||
assert_wikicode_equal(valid, parse_anything(test)) | |||
if __name__ == "__main__": | |||
unittest.main(verbosity=2) | |||
@pytest.mark.parametrize("invalid", [ | |||
Ellipsis, object, object(), type, | |||
["foo", [object]] | |||
]) | |||
def test_parse_anything_invalid(invalid): | |||
"""tests for invalid input to utils.parse_anything()""" | |||
with pytest.raises(ValueError): | |||
parse_anything(invalid) |
@@ -18,79 +18,78 @@ | |||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||
# SOFTWARE. | |||
import unittest | |||
""" | |||
Test cases for the Wikilink node. | |||
""" | |||
from mwparserfromhell.nodes import Text, Wikilink | |||
from ._test_tree_equality import TreeEqualityTestCase, wrap, wraptext | |||
import pytest | |||
class TestWikilink(TreeEqualityTestCase): | |||
"""Test cases for the Wikilink node.""" | |||
def test_str(self): | |||
"""test Wikilink.__str__()""" | |||
node = Wikilink(wraptext("foobar")) | |||
self.assertEqual("[[foobar]]", str(node)) | |||
node2 = Wikilink(wraptext("foo"), wraptext("bar")) | |||
self.assertEqual("[[foo|bar]]", str(node2)) | |||
from mwparserfromhell.nodes import Text, Wikilink | |||
from .conftest import assert_wikicode_equal, wrap, wraptext | |||
def test_children(self): | |||
"""test Wikilink.__children__()""" | |||
node1 = Wikilink(wraptext("foobar")) | |||
node2 = Wikilink(wraptext("foo"), wrap([Text("bar"), Text("baz")])) | |||
gen1 = node1.__children__() | |||
gen2 = node2.__children__() | |||
self.assertEqual(node1.title, next(gen1)) | |||
self.assertEqual(node2.title, next(gen2)) | |||
self.assertEqual(node2.text, next(gen2)) | |||
self.assertRaises(StopIteration, next, gen1) | |||
self.assertRaises(StopIteration, next, gen2) | |||
def test_str(): | |||
"""test Wikilink.__str__()""" | |||
node = Wikilink(wraptext("foobar")) | |||
assert "[[foobar]]" == str(node) | |||
node2 = Wikilink(wraptext("foo"), wraptext("bar")) | |||
assert "[[foo|bar]]" == str(node2) | |||
def test_strip(self): | |||
"""test Wikilink.__strip__()""" | |||
node = Wikilink(wraptext("foobar")) | |||
node2 = Wikilink(wraptext("foo"), wraptext("bar")) | |||
self.assertEqual("foobar", node.__strip__()) | |||
self.assertEqual("bar", node2.__strip__()) | |||
def test_children(): | |||
"""test Wikilink.__children__()""" | |||
node1 = Wikilink(wraptext("foobar")) | |||
node2 = Wikilink(wraptext("foo"), wrap([Text("bar"), Text("baz")])) | |||
gen1 = node1.__children__() | |||
gen2 = node2.__children__() | |||
assert node1.title == next(gen1) | |||
assert node2.title == next(gen2) | |||
assert node2.text == next(gen2) | |||
with pytest.raises(StopIteration): | |||
next(gen1) | |||
with pytest.raises(StopIteration): | |||
next(gen2) | |||
def test_showtree(self): | |||
"""test Wikilink.__showtree__()""" | |||
output = [] | |||
getter, marker = object(), object() | |||
get = lambda code: output.append((getter, code)) | |||
mark = lambda: output.append(marker) | |||
node1 = Wikilink(wraptext("foobar")) | |||
node2 = Wikilink(wraptext("foo"), wraptext("bar")) | |||
node1.__showtree__(output.append, get, mark) | |||
node2.__showtree__(output.append, get, mark) | |||
valid = [ | |||
"[[", (getter, node1.title), "]]", "[[", (getter, node2.title), | |||
" | ", marker, (getter, node2.text), "]]"] | |||
self.assertEqual(valid, output) | |||
def test_strip(): | |||
"""test Wikilink.__strip__()""" | |||
node = Wikilink(wraptext("foobar")) | |||
node2 = Wikilink(wraptext("foo"), wraptext("bar")) | |||
assert "foobar" == node.__strip__() | |||
assert "bar" == node2.__strip__() | |||
def test_title(self): | |||
"""test getter/setter for the title attribute""" | |||
title = wraptext("foobar") | |||
node1 = Wikilink(title) | |||
node2 = Wikilink(title, wraptext("baz")) | |||
self.assertIs(title, node1.title) | |||
self.assertIs(title, node2.title) | |||
node1.title = "héhehé" | |||
node2.title = "héhehé" | |||
self.assertWikicodeEqual(wraptext("héhehé"), node1.title) | |||
self.assertWikicodeEqual(wraptext("héhehé"), node2.title) | |||
def test_showtree(): | |||
"""test Wikilink.__showtree__()""" | |||
output = [] | |||
getter, marker = object(), object() | |||
get = lambda code: output.append((getter, code)) | |||
mark = lambda: output.append(marker) | |||
node1 = Wikilink(wraptext("foobar")) | |||
node2 = Wikilink(wraptext("foo"), wraptext("bar")) | |||
node1.__showtree__(output.append, get, mark) | |||
node2.__showtree__(output.append, get, mark) | |||
valid = [ | |||
"[[", (getter, node1.title), "]]", "[[", (getter, node2.title), | |||
" | ", marker, (getter, node2.text), "]]"] | |||
assert valid == output | |||
def test_text(self): | |||
"""test getter/setter for the text attribute""" | |||
text = wraptext("baz") | |||
node1 = Wikilink(wraptext("foobar")) | |||
node2 = Wikilink(wraptext("foobar"), text) | |||
self.assertIs(None, node1.text) | |||
self.assertIs(text, node2.text) | |||
node1.text = "buzz" | |||
node2.text = None | |||
self.assertWikicodeEqual(wraptext("buzz"), node1.text) | |||
self.assertIs(None, node2.text) | |||
def test_title(): | |||
"""test getter/setter for the title attribute""" | |||
title = wraptext("foobar") | |||
node1 = Wikilink(title) | |||
node2 = Wikilink(title, wraptext("baz")) | |||
assert title is node1.title | |||
assert title is node2.title | |||
node1.title = "héhehé" | |||
node2.title = "héhehé" | |||
assert_wikicode_equal(wraptext("héhehé"), node1.title) | |||
assert_wikicode_equal(wraptext("héhehé"), node2.title) | |||
if __name__ == "__main__": | |||
unittest.main(verbosity=2) | |||
def test_text(): | |||
"""test getter/setter for the text attribute""" | |||
text = wraptext("baz") | |||
node1 = Wikilink(wraptext("foobar")) | |||
node2 = Wikilink(wraptext("foobar"), text) | |||
assert None is node1.text | |||
assert text is node2.text | |||
node1.text = "buzz" | |||
node2.text = None | |||
assert_wikicode_equal(wraptext("buzz"), node1.text) | |||
assert None is node2.text |
@@ -153,9 +153,9 @@ output: [ExternalLinkOpen(brackets=True), Text(text="http://example.(com)"), Ext | |||
--- | |||
name: brackets_open_bracket_inside | |||
label: an open bracket inside a bracket-enclosed link that is also included | |||
label: an open bracket inside a bracket-enclosed link that is not included | |||
input: "[http://foobar[baz.com Example]" | |||
output: [ExternalLinkOpen(brackets=True), Text(text="http://foobar[baz.com"), ExternalLinkSeparator(), Text(text="Example"), ExternalLinkClose()] | |||
output: [ExternalLinkOpen(brackets=True), Text(text="http://foobar"), ExternalLinkSeparator(suppress_space=True), Text(text="[baz.com Example"), ExternalLinkClose()] | |||
--- | |||
@@ -478,3 +478,101 @@ name: brackets_scheme_title_but_no_url | |||
label: brackets around a scheme, colon, and slashes, with a title | |||
input: "[http:// Example]" | |||
output: [Text(text="[http:// Example]")] | |||
--- | |||
name: url_preceded_by_non_word_character | |||
label: non-word character immediately before a valid URL | |||
input: "svn+ssh://server.domain.com:/reponame" | |||
output: [Text(text="svn+"), ExternalLinkOpen(brackets=False), Text(text="ssh://server.domain.com:/reponame"), ExternalLinkClose()] | |||
--- | |||
name: url_preceded_by_underscore | |||
label: underscore immediately before a valid URL | |||
input: "svn_ssh://server.domain.com:/reponame" | |||
output: [Text(text="svn_ssh://server.domain.com:/reponame")] | |||
--- | |||
name: url_terminated_by_double_quote | |||
label: a free link terminated by a double quote | |||
input: "http://foo\"bar" | |||
output: [ExternalLinkOpen(brackets=False), Text(text="http://foo"), ExternalLinkClose(), Text(text="\"bar")] | |||
--- | |||
name: url_not_terminated_by_single_quote | |||
label: a free link not terminated by a single quote | |||
input: "http://foo'bar" | |||
output: [ExternalLinkOpen(brackets=False), Text(text="http://foo'bar"), ExternalLinkClose()] | |||
--- | |||
name: url_terminated_by_two_single_quotes | |||
label: a free link terminated by two single quotes | |||
input: "http://foo''bar''" | |||
output: [ExternalLinkOpen(brackets=False), Text(text="http://foo"), ExternalLinkClose(), TagOpenOpen(wiki_markup="''"), Text(text="i"), TagCloseOpen(), Text(text="bar"), TagOpenClose(), Text(text="i"), TagCloseClose()] | |||
--- | |||
name: url_terminated_by_left_angle | |||
label: a free link terminated by a left angle | |||
input: "http://foo<bar" | |||
output: [ExternalLinkOpen(brackets=False), Text(text="http://foo"), ExternalLinkClose(), Text(text="<bar")] | |||
--- | |||
name: url_terminated_by_right_angle | |||
label: a free link terminated by a right angle | |||
input: "http://foo>bar" | |||
output: [ExternalLinkOpen(brackets=False), Text(text="http://foo"), ExternalLinkClose(), Text(text=">bar")] | |||
--- | |||
name: brackets_terminated_by_double_quote | |||
label: an external link terminated by a double quote | |||
input: "[http://foo\"bar]" | |||
output: [ExternalLinkOpen(brackets=True), Text(text="http://foo"), ExternalLinkSeparator(suppress_space=True), Text(text="\"bar"), ExternalLinkClose()] | |||
--- | |||
name: brackets_not_terminated_by_single_quote | |||
label: an external link not terminated by a single quote | |||
input: "[http://foo'bar]" | |||
output: [ExternalLinkOpen(brackets=True), Text(text="http://foo'bar"), ExternalLinkClose()] | |||
--- | |||
name: brackets_terminated_by_two_single_quotes | |||
label: an external link terminated by two single quotes | |||
input: "[http://foo''bar'']" | |||
output: [ExternalLinkOpen(brackets=True), Text(text="http://foo"), ExternalLinkSeparator(suppress_space=True), TagOpenOpen(wiki_markup="''"), Text(text="i"), TagCloseOpen(), Text(text="bar"), TagOpenClose(), Text(text="i"), TagCloseClose(), ExternalLinkClose()] | |||
--- | |||
name: brackets_terminated_by_left_angle | |||
label: an external link terminated by a left angle | |||
input: "[http://foo<bar]" | |||
output: [ExternalLinkOpen(brackets=True), Text(text="http://foo"), ExternalLinkSeparator(suppress_space=True), Text(text="<bar"), ExternalLinkClose()] | |||
--- | |||
name: brackets_terminated_by_right_angle | |||
label: an external link terminated by a right angle | |||
input: "[http://foo>bar]" | |||
output: [ExternalLinkOpen(brackets=True), Text(text="http://foo"), ExternalLinkSeparator(suppress_space=True), Text(text=">bar"), ExternalLinkClose()] | |||
--- | |||
name: scheme_case | |||
label: a free link with uppercase letters in the URL scheme | |||
input: "HtTp://example.com/" | |||
output: [ExternalLinkOpen(brackets=False), Text(text="HtTp://example.com/"), ExternalLinkClose()] | |||
--- | |||
name: bracket_scheme_case | |||
label: an external link with uppercase letters in the URL scheme | |||
input: "[HtTp://example.com/]" | |||
output: [ExternalLinkOpen(brackets=True), Text(text="HtTp://example.com/"), ExternalLinkClose()] |
@@ -143,7 +143,6 @@ label: a bracketed external link nested inside a template, before the end | |||
input: "{{URL|[http://example.com}}]" | |||
output: [Text(text="{{URL|"), ExternalLinkOpen(brackets=True), Text(text="http://example.com}}"), ExternalLinkClose()] | |||
--- | |||
name: comment_inside_bracketed_link | |||
@@ -360,3 +359,10 @@ name: nested_templates_and_style_tags | |||
label: many nested templates and style tags, testing edge case behavior and error recovery near the recursion depth limit (see issue #224) | |||
input: "{{a|'''}}{{b|1='''c''}}{{d|1='''e''}}{{f|1='''g''}}{{h|1='''i''}}{{j|1='''k''}}{{l|1='''m''}}{{n|1='''o''}}{{p|1='''q''}}{{r|1=''s'''}}{{t|1='''u''}}{{v|1='''w''x'''y'''}}\n{|\n|-\n|'''\n|}" | |||
output: [TemplateOpen(), Text(text="a"), TemplateParamSeparator(), Text(text="'''"), TemplateClose(), TemplateOpen(), Text(text="b"), TemplateParamSeparator(), Text(text="1"), TemplateParamEquals(), Text(text="'"), TagOpenOpen(wiki_markup="''"), Text(text="i"), TagCloseOpen(), Text(text="c"), TagOpenClose(), Text(text="i"), TagCloseClose(), TemplateClose(), TemplateOpen(), Text(text="d"), TemplateParamSeparator(), Text(text="1"), TemplateParamEquals(), Text(text="'"), TagOpenOpen(wiki_markup="''"), Text(text="i"), TagCloseOpen(), Text(text="e"), TagOpenClose(), Text(text="i"), TagCloseClose(), TemplateClose(), TemplateOpen(), Text(text="f"), TemplateParamSeparator(), Text(text="1"), TemplateParamEquals(), Text(text="'"), TagOpenOpen(wiki_markup="''"), Text(text="i"), TagCloseOpen(), Text(text="g"), TagOpenClose(), Text(text="i"), TagCloseClose(), TemplateClose(), TemplateOpen(), Text(text="h"), TemplateParamSeparator(), Text(text="1"), TemplateParamEquals(), Text(text="'"), TagOpenOpen(wiki_markup="''"), Text(text="i"), TagCloseOpen(), Text(text="i"), TagOpenClose(), Text(text="i"), TagCloseClose(), TemplateClose(), TemplateOpen(), Text(text="j"), TemplateParamSeparator(), Text(text="1"), TemplateParamEquals(), Text(text="'"), TagOpenOpen(wiki_markup="''"), Text(text="i"), TagCloseOpen(), Text(text="k"), TagOpenClose(), Text(text="i"), TagCloseClose(), TemplateClose(), TemplateOpen(), Text(text="l"), TemplateParamSeparator(), Text(text="1"), TemplateParamEquals(), Text(text="'"), TagOpenOpen(wiki_markup="''"), Text(text="i"), TagCloseOpen(), Text(text="m"), TagOpenClose(), Text(text="i"), TagCloseClose(), TemplateClose(), TemplateOpen(), Text(text="n"), TemplateParamSeparator(), Text(text="1"), TemplateParamEquals(), TagOpenOpen(wiki_markup="'''"), Text(text="b"), TagCloseOpen(), Text(text="o''}}"), TemplateOpen(), Text(text="p"), TemplateParamSeparator(), Text(text="1"), TemplateParamEquals(), Text(text="'"), TagOpenOpen(wiki_markup="''"), Text(text="i"), TagCloseOpen(), Text(text="q"), TagOpenClose(), Text(text="i"), TagCloseClose(), TemplateClose(), TemplateOpen(), Text(text="r"), TemplateParamSeparator(), Text(text="1"), TemplateParamEquals(), Text(text="''s'''"), TemplateClose(), TemplateOpen(), Text(text="t"), TemplateParamSeparator(), Text(text="1"), TemplateParamEquals(), Text(text="'"), TagOpenOpen(wiki_markup="''"), Text(text="i"), TagCloseOpen(), Text(text="u"), TagOpenClose(), Text(text="i"), TagCloseClose(), TemplateClose(), Text(text="{{v|1="), TagOpenClose(), Text(text="b"), TagCloseClose(), Text(text="w''x"), TagOpenOpen(wiki_markup="'''"), Text(text="b"), TagCloseOpen(), Text(text="y"), TagOpenClose(), Text(text="b"), TagCloseClose(), TemplateClose(), Text(text="\n"), TagOpenOpen(wiki_markup="{|"), Text(text="table"), TagCloseOpen(padding="\n"), TagOpenOpen(wiki_markup="|-"), Text(text="tr"), TagCloseOpen(padding="\n"), TagOpenOpen(wiki_markup="|"), Text(text="td"), TagCloseOpen(padding=""), Text(text="'''\n"), TagOpenClose(wiki_markup=""), Text(text="td"), TagCloseClose(), TagOpenClose(wiki_markup=""), Text(text="tr"), TagCloseClose(), TagOpenClose(wiki_markup="|}"), Text(text="table"), TagCloseClose()] | |||
--- | |||
name: wikilink_nested_with_nowiki | |||
label: wikilinks nested within the text of another, but surrounded by nowiki tags | |||
input: [[foo|bar<nowiki>[[baz]][[qux]]</nowiki>]] | |||
output: [WikilinkOpen(), Text(text="foo"), WikilinkSeparator(), Text(text="bar"), TagOpenOpen(), Text(text="nowiki"), TagCloseOpen(padding=""), Text(text="[[baz]][[qux]]"), TagOpenClose(), Text(text="nowiki"), TagCloseClose(), WikilinkClose()] |
@@ -54,20 +54,6 @@ output: [WikilinkOpen(), Text(text="foo"), WikilinkSeparator(), Text(text="bar[b | |||
--- | |||
name: nested | |||
label: a wikilink nested within another | |||
input: "[[foo|[[bar]]]]" | |||
output: [WikilinkOpen(), Text(text="foo"), WikilinkSeparator(), WikilinkOpen(), Text(text="bar"), WikilinkClose(), WikilinkClose()] | |||
name: nested_padding | |||
label: a wikilink nested within another, separated by other data | |||
input: "[[foo|a[[b]]c]]" | |||
output: [WikilinkOpen(), Text(text="foo"), WikilinkSeparator(), Text(text="a"), WikilinkOpen(), Text(text="b"), WikilinkClose(), Text(text="c"), WikilinkClose()] | |||
name: invalid_newline | |||
label: invalid wikilink: newline as only content | |||
input: "[[\n]]" | |||
@@ -103,15 +89,29 @@ output: [Text(text="[[foo[bar]]")] | |||
--- | |||
name: invalid_nested | |||
label: invalid wikilink: trying to nest in the wrong context | |||
name: invalid_nested_text | |||
label: invalid wikilink: nested within the text of another | |||
input: "[[foo|[[bar]]]]" | |||
output: [Text(text="[[foo|"), WikilinkOpen(), Text(text="bar"), WikilinkClose(), Text(text="]]")] | |||
--- | |||
name: invalid_nested_text_2 | |||
label: invalid wikilink: a wikilink nested within the text of another, with additional content | |||
input: "[[foo|a[[b]]c]]" | |||
output: [Text(text="[[foo|a"), WikilinkOpen(), Text(text="b"), WikilinkClose(), Text(text="c]]")] | |||
--- | |||
name: invalid_nested_title | |||
label: invalid wikilink: nested within the title of another | |||
input: "[[foo[[bar]]]]" | |||
output: [Text(text="[[foo"), WikilinkOpen(), Text(text="bar"), WikilinkClose(), Text(text="]]")] | |||
--- | |||
name: invalid_nested_padding | |||
label: invalid wikilink: trying to nest in the wrong context, with a text param | |||
name: invalid_nested_title_and_text | |||
label: invalid wikilink: nested within the title of another, with a text param | |||
input: "[[foo[[bar]]|baz]]" | |||
output: [Text(text="[[foo"), WikilinkOpen(), Text(text="bar"), WikilinkClose(), Text(text="|baz]]")] | |||