diff --git a/mwparserfromhell/nodes/tag.py b/mwparserfromhell/nodes/tag.py index eb5d1ee..d301d85 100644 --- a/mwparserfromhell/nodes/tag.py +++ b/mwparserfromhell/nodes/tag.py @@ -79,8 +79,9 @@ class Tag(TagDefinitions, Node): if attr.value: for child in getter(attr.value): yield attr.value, child - for child in getter(self.contents): - yield self.contents, child + if self.contents: + for child in getter(self.contents): + yield self.contents, child def __strip__(self, normalize, collapse): if self.type in self.TAGS_VISIBLE: diff --git a/mwparserfromhell/parser/builder.py b/mwparserfromhell/parser/builder.py index 60bfaa9..4b468b7 100644 --- a/mwparserfromhell/parser/builder.py +++ b/mwparserfromhell/parser/builder.py @@ -191,8 +191,8 @@ class Builder(object): self._push() elif isinstance(token, tokens.TagAttrQuote): quoted = True - elif isinstance(token, (tokens.TagAttrStart, - tokens.TagCloseOpen)): + elif isinstance(token, (tokens.TagAttrStart, tokens.TagCloseOpen, + tokens.TagCloseSelfclose)): self._tokens.append(token) if name is not None: return Attribute(name, self._pop(), quoted, padding) diff --git a/mwparserfromhell/parser/tokenizer.py b/mwparserfromhell/parser/tokenizer.py index 82f748c..b466de5 100644 --- a/mwparserfromhell/parser/tokenizer.py +++ b/mwparserfromhell/parser/tokenizer.py @@ -26,8 +26,8 @@ import re from . import contexts from . import tokens -from ..nodes.tag import Tag from ..compat import htmlentities +from ..nodes.tag import Tag __all__ = ["Tokenizer"] @@ -431,7 +431,7 @@ class Tokenizer(object): try: return Tag.TRANSLATIONS[text] except KeyError: - self._fail_route() + return Tag.TAG_UNKNOWN def _actually_close_tag_opening(self): """Handle cleanup at the end of a opening tag.