# -*- coding: utf-8 -*- # # Copyright (C) 2012-2014 Ben Kurtovic # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from __future__ import unicode_literals from math import log import re from . import contexts, tokens, ParserError from ..compat import htmlentities, range from ..definitions import (get_html_tag, is_parsable, is_single, is_single_only, is_scheme) __all__ = ["Tokenizer"] class BadRoute(Exception): """Raised internally when the current tokenization route is invalid.""" def __init__(self, context=0): super(BadRoute, self).__init__() self.context = context class _TagOpenData(object): """Stores data about an HTML open tag, like ````.""" CX_NAME = 1 << 0 CX_ATTR_READY = 1 << 1 CX_ATTR_NAME = 1 << 2 CX_ATTR_VALUE = 1 << 3 CX_QUOTED = 1 << 4 CX_NOTE_SPACE = 1 << 5 CX_NOTE_EQUALS = 1 << 6 CX_NOTE_QUOTE = 1 << 7 def __init__(self): self.context = self.CX_NAME self.padding_buffer = {"first": "", "before_eq": "", "after_eq": ""} self.reset = 0 class Tokenizer(object): """Creates a list of tokens from a string of wikicode.""" USES_C = False START = object() END = object() MARKERS = ["{", "}", "[", "]", "<", ">", "|", "=", "&", "'", "#", "*", ";", ":", "/", "-", "\n", START, END] MAX_DEPTH = 40 MAX_CYCLES = 100000 regex = re.compile(r"([{}\[\]<>|=&'#*;:/\\\"\-!\n])", flags=re.IGNORECASE) tag_splitter = re.compile(r"([\s\"\\]+)") def __init__(self): self._text = None self._head = 0 self._stacks = [] self._global = 0 self._depth = 0 self._cycles = 0 @property def _stack(self): """The current token stack.""" return self._stacks[-1][0] @property def _context(self): """The current token context.""" return self._stacks[-1][1] @_context.setter def _context(self, value): self._stacks[-1][1] = value @property def _textbuffer(self): """The current textbuffer.""" return self._stacks[-1][2] @_textbuffer.setter def _textbuffer(self, value): self._stacks[-1][2] = value def _push(self, context=0): """Add a new token stack, context, and textbuffer to the list.""" self._stacks.append([[], context, []]) self._depth += 1 self._cycles += 1 def _push_textbuffer(self): """Push the textbuffer onto the stack as a Text node and clear it.""" if self._textbuffer: self._stack.append(tokens.Text(text="".join(self._textbuffer))) self._textbuffer = [] def _pop(self, keep_context=False): """Pop the current stack/context/textbuffer, returning the stack. If *keep_context* is ``True``, then we will replace the underlying stack's context with the current stack's. """ self._push_textbuffer() self._depth -= 1 if keep_context: context = self._context stack = self._stacks.pop()[0] self._context = context return stack return self._stacks.pop()[0] def _can_recurse(self): """Return whether or not our max recursion depth has been exceeded.""" return self._depth < self.MAX_DEPTH and self._cycles < self.MAX_CYCLES def _fail_route(self): """Fail the current tokenization route. Discards the current stack/context/textbuffer and raises :py:exc:`~.BadRoute`. """ context = self._context self._pop() raise BadRoute(context) def _emit(self, token): """Write a token to the end of the current token stack.""" self._push_textbuffer() self._stack.append(token) def _emit_first(self, token): """Write a token to the beginning of the current token stack.""" self._push_textbuffer() self._stack.insert(0, token) def _emit_text(self, text): """Write text to the current textbuffer.""" self._textbuffer.append(text) def _emit_all(self, tokenlist): """Write a series of tokens to the current stack at once.""" if tokenlist and isinstance(tokenlist[0], tokens.Text): self._emit_text(tokenlist.pop(0).text) self._push_textbuffer() self._stack.extend(tokenlist) def _emit_text_then_stack(self, text): """Pop the current stack, write *text*, and then write the stack.""" stack = self._pop() self._emit_text(text) if stack: self._emit_all(stack) self._head -= 1 def _read(self, delta=0, wrap=False, strict=False): """Read the value at a relative point in the wikicode. The value is read from :py:attr:`self._head <_head>` plus the value of *delta* (which can be negative). If *wrap* is ``False``, we will not allow attempts to read from the end of the string if ``self._head + delta`` is negative. If *strict* is ``True``, the route will be failed (with :py:meth:`_fail_route`) if we try to read from past the end of the string; otherwise, :py:attr:`self.END ` is returned. If we try to read from before the start of the string, :py:attr:`self.START ` is returned. """ index = self._head + delta if index < 0 and (not wrap or abs(index) > len(self._text)): return self.START try: return self._text[index] except IndexError: if strict: self._fail_route() return self.END def _parse_template(self): """Parse a template at the head of the wikicode string.""" reset = self._head try: template = self._parse(contexts.TEMPLATE_NAME) except BadRoute: self._head = reset raise self._emit_first(tokens.TemplateOpen()) self._emit_all(template) self._emit(tokens.TemplateClose()) def _parse_argument(self): """Parse an argument at the head of the wikicode string.""" reset = self._head try: argument = self._parse(contexts.ARGUMENT_NAME) except BadRoute: self._head = reset raise self._emit_first(tokens.ArgumentOpen()) self._emit_all(argument) self._emit(tokens.ArgumentClose()) def _parse_template_or_argument(self): """Parse a template or argument at the head of the wikicode string.""" self._head += 2 braces = 2 while self._read() == "{": self._head += 1 braces += 1 self._push() while braces: if braces == 1: return self._emit_text_then_stack("{") if braces == 2: try: self._parse_template() except BadRoute: return self._emit_text_then_stack("{{") break try: self._parse_argument() braces -= 3 except BadRoute: try: self._parse_template() braces -= 2 except BadRoute: return self._emit_text_then_stack("{" * braces) if braces: self._head += 1 self._emit_all(self._pop()) if self._context & contexts.FAIL_NEXT: self._context ^= contexts.FAIL_NEXT def _handle_template_param(self): """Handle a template parameter at the head of the string.""" if self._context & contexts.TEMPLATE_NAME: self._context ^= contexts.TEMPLATE_NAME elif self._context & contexts.TEMPLATE_PARAM_VALUE: self._context ^= contexts.TEMPLATE_PARAM_VALUE elif self._context & contexts.TEMPLATE_PARAM_KEY: self._emit_all(self._pop(keep_context=True)) self._context |= contexts.TEMPLATE_PARAM_KEY self._emit(tokens.TemplateParamSeparator()) self._push(self._context) def _handle_template_param_value(self): """Handle a template parameter's value at the head of the string.""" self._emit_all(self._pop(keep_context=True)) self._context ^= contexts.TEMPLATE_PARAM_KEY self._context |= contexts.TEMPLATE_PARAM_VALUE self._emit(tokens.TemplateParamEquals()) def _handle_template_end(self): """Handle the end of a template at the head of the string.""" if self._context & contexts.TEMPLATE_PARAM_KEY: self._emit_all(self._pop(keep_context=True)) self._head += 1 return self._pop() def _handle_argument_separator(self): """Handle the separator between an argument's name and default.""" self._context ^= contexts.ARGUMENT_NAME self._context |= contexts.ARGUMENT_DEFAULT self._emit(tokens.ArgumentSeparator()) def _handle_argument_end(self): """Handle the end of an argument at the head of the string.""" self._head += 2 return self._pop() def _parse_wikilink(self): """Parse an internal wikilink at the head of the wikicode string.""" self._head += 2 reset = self._head - 1 try: wikilink = self._parse(contexts.WIKILINK_TITLE) except BadRoute: self._head = reset self._emit_text("[[") else: if self._context & contexts.FAIL_NEXT: self._context ^= contexts.FAIL_NEXT self._emit(tokens.WikilinkOpen()) self._emit_all(wikilink) self._emit(tokens.WikilinkClose()) def _handle_wikilink_separator(self): """Handle the separator between a wikilink's title and its text.""" self._context ^= contexts.WIKILINK_TITLE self._context |= contexts.WIKILINK_TEXT self._emit(tokens.WikilinkSeparator()) def _handle_wikilink_end(self): """Handle the end of a wikilink at the head of the string.""" self._head += 1 return self._pop() def _parse_bracketed_uri_scheme(self): """Parse the URI scheme of a bracket-enclosed external link.""" self._push(contexts.EXT_LINK_URI) if self._read() == self._read(1) == "/": self._emit_text("//") self._head += 2 else: valid = "abcdefghijklmnopqrstuvwxyz0123456789+.-" all_valid = lambda: all(char in valid for char in self._read()) scheme = "" while self._read() is not self.END and all_valid(): scheme += self._read() self._emit_text(self._read()) self._head += 1 if self._read() != ":": self._fail_route() self._emit_text(":") self._head += 1 slashes = self._read() == self._read(1) == "/" if slashes: self._emit_text("//") self._head += 2 if not is_scheme(scheme, slashes): self._fail_route() def _parse_free_uri_scheme(self): """Parse the URI scheme of a free (no brackets) external link.""" valid = "abcdefghijklmnopqrstuvwxyz0123456789+.-" scheme = [] try: # We have to backtrack through the textbuffer looking for our # scheme since it was just parsed as text: for chunk in reversed(self._textbuffer): for char in reversed(chunk): if char.isspace() or char in self.MARKERS: raise StopIteration() if char not in valid: raise BadRoute() scheme.append(char) except StopIteration: pass scheme = "".join(reversed(scheme)) slashes = self._read() == self._read(1) == "/" if not is_scheme(scheme, slashes): raise BadRoute() self._push(self._context | contexts.EXT_LINK_URI) self._emit_text(scheme) self._emit_text(":") if slashes: self._emit_text("//") self._head += 2 def _handle_free_link_text(self, punct, tail, this): """Handle text in a free ext link, including trailing punctuation.""" if "(" in this and ")" in punct: punct = punct[:-1] # ')' is not longer valid punctuation if this.endswith(punct): for i in reversed(range(-len(this), 0)): if i == -len(this) or this[i - 1] not in punct: break stripped = this[:i] if stripped and tail: self._emit_text(tail) tail = "" tail += this[i:] this = stripped elif tail: self._emit_text(tail) tail = "" self._emit_text(this) return punct, tail def _is_free_link_end(self, this, next): """Return whether the current head is the end of a free link.""" # Built from _parse()'s end sentinels: after, ctx = self._read(2), self._context equal_sign_contexts = contexts.TEMPLATE_PARAM_KEY | contexts.HEADING return (this in (self.END, "\n", "[", "]", "<", ">") or this == next == "'" or (this == "|" and ctx & contexts.TEMPLATE) or (this == "=" and ctx & equal_sign_contexts) or (this == next == "}" and ctx & contexts.TEMPLATE) or (this == next == after == "}" and ctx & contexts.ARGUMENT)) def _really_parse_external_link(self, brackets): """Really parse an external link.""" if brackets: self._parse_bracketed_uri_scheme() invalid = ("\n", " ", "]") else: self._parse_free_uri_scheme() invalid = ("\n", " ", "[", "]") punct = tuple(",;\.:!?)") if self._read() is self.END or self._read()[0] in invalid: self._fail_route() tail = "" while True: this, next = self._read(), self._read(1) if this == "&": if tail: self._emit_text(tail) tail = "" self._parse_entity() elif (this == "<" and next == "!" and self._read(2) == self._read(3) == "-"): if tail: self._emit_text(tail) tail = "" self._parse_comment() elif not brackets and self._is_free_link_end(this, next): return self._pop(), tail, -1 elif this is self.END or this == "\n": self._fail_route() elif this == next == "{" and self._can_recurse(): if tail: self._emit_text(tail) tail = "" self._parse_template_or_argument() elif this == "]": return self._pop(), tail, 0 elif " " in this: before, after = this.split(" ", 1) if brackets: self._emit_text(before) self._emit(tokens.ExternalLinkSeparator()) if after: self._emit_text(after) self._context ^= contexts.EXT_LINK_URI self._context |= contexts.EXT_LINK_TITLE self._head += 1 return self._parse(push=False), None, 0 punct, tail = self._handle_free_link_text(punct, tail, before) return self._pop(), tail + " " + after, 0 elif not brackets: punct, tail = self._handle_free_link_text(punct, tail, this) else: self._emit_text(this) self._head += 1 def _remove_uri_scheme_from_textbuffer(self, scheme): """Remove the URI scheme of a new external link from the textbuffer.""" length = len(scheme) while length: if length < len(self._textbuffer[-1]): self._textbuffer[-1] = self._textbuffer[-1][:-length] break length -= len(self._textbuffer[-1]) self._textbuffer.pop() def _parse_external_link(self, brackets): """Parse an external link at the head of the wikicode string.""" reset = self._head self._head += 1 try: bad_context = self._context & contexts.NO_EXT_LINKS if bad_context or not self._can_recurse(): raise BadRoute() link, extra, delta = self._really_parse_external_link(brackets) except BadRoute: self._head = reset if not brackets and self._context & contexts.DL_TERM: self._handle_dl_term() else: self._emit_text(self._read()) else: if not brackets: scheme = link[0].text.split(":", 1)[0] self._remove_uri_scheme_from_textbuffer(scheme) self._emit(tokens.ExternalLinkOpen(brackets=brackets)) self._emit_all(link) self._emit(tokens.ExternalLinkClose()) self._head += delta if extra: self._emit_text(extra) def _parse_heading(self): """Parse a section heading at the head of the wikicode string.""" self._global |= contexts.GL_HEADING reset = self._head self._head += 1 best = 1 while self._read() == "=": best += 1 self._head += 1 context = contexts.HEADING_LEVEL_1 << min(best - 1, 5) try: title, level = self._parse(context) except BadRoute: self._head = reset + best - 1 self._emit_text("=" * best) else: self._emit(tokens.HeadingStart(level=level)) if level < best: self._emit_text("=" * (best - level)) self._emit_all(title) self._emit(tokens.HeadingEnd()) finally: self._global ^= contexts.GL_HEADING def _handle_heading_end(self): """Handle the end of a section heading at the head of the string.""" reset = self._head self._head += 1 best = 1 while self._read() == "=": best += 1 self._head += 1 current = int(log(self._context / contexts.HEADING_LEVEL_1, 2)) + 1 level = min(current, min(best, 6)) try: # Try to check for a heading closure after this one after, after_level = self._parse(self._context) except BadRoute: if level < best: self._emit_text("=" * (best - level)) self._head = reset + best - 1 return self._pop(), level else: # Found another closure self._emit_text("=" * best) self._emit_all(after) return self._pop(), after_level def _really_parse_entity(self): """Actually parse an HTML entity and ensure that it is valid.""" self._emit(tokens.HTMLEntityStart()) self._head += 1 this = self._read(strict=True) if this == "#": numeric = True self._emit(tokens.HTMLEntityNumeric()) self._head += 1 this = self._read(strict=True) if this[0].lower() == "x": hexadecimal = True self._emit(tokens.HTMLEntityHex(char=this[0])) this = this[1:] if not this: self._fail_route() else: hexadecimal = False else: numeric = hexadecimal = False valid = "0123456789abcdefABCDEF" if hexadecimal else "0123456789" if not numeric and not hexadecimal: valid += "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" if not all([char in valid for char in this]): self._fail_route() self._head += 1 if self._read() != ";": self._fail_route() if numeric: test = int(this, 16) if hexadecimal else int(this) if test < 1 or test > 0x10FFFF: self._fail_route() else: if this not in htmlentities.entitydefs: self._fail_route() self._emit(tokens.Text(text=this)) self._emit(tokens.HTMLEntityEnd()) def _parse_entity(self): """Parse an HTML entity at the head of the wikicode string.""" reset = self._head self._push() try: self._really_parse_entity() except BadRoute: self._head = reset self._emit_text(self._read()) else: self._emit_all(self._pop()) def _parse_comment(self): """Parse an HTML comment at the head of the wikicode string.""" self._head += 4 reset = self._head - 1 self._push() while True: this = self._read() if this == self.END: self._pop() self._head = reset self._emit_text("