A Python parser for MediaWiki wikicode https://mwparserfromhell.readthedocs.io/
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 

1162 lines
44 KiB

  1. # -*- coding: utf-8 -*-
  2. #
  3. # Copyright (C) 2012-2014 Ben Kurtovic <ben.kurtovic@gmail.com>
  4. #
  5. # Permission is hereby granted, free of charge, to any person obtaining a copy
  6. # of this software and associated documentation files (the "Software"), to deal
  7. # in the Software without restriction, including without limitation the rights
  8. # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  9. # copies of the Software, and to permit persons to whom the Software is
  10. # furnished to do so, subject to the following conditions:
  11. #
  12. # The above copyright notice and this permission notice shall be included in
  13. # all copies or substantial portions of the Software.
  14. #
  15. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16. # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17. # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  18. # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  19. # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  20. # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
  21. # SOFTWARE.
  22. from __future__ import unicode_literals
  23. from math import log
  24. import re
  25. from . import contexts, tokens, ParserError
  26. from ..compat import htmlentities, range
  27. from ..definitions import (get_html_tag, is_parsable, is_single,
  28. is_single_only, is_scheme)
  29. __all__ = ["Tokenizer"]
  30. class BadRoute(Exception):
  31. """Raised internally when the current tokenization route is invalid."""
  32. def __init__(self, context=0):
  33. super(BadRoute, self).__init__()
  34. self.context = context
  35. class _TagOpenData(object):
  36. """Stores data about an HTML open tag, like ``<ref name="foo">``."""
  37. CX_NAME = 1 << 0
  38. CX_ATTR_READY = 1 << 1
  39. CX_ATTR_NAME = 1 << 2
  40. CX_ATTR_VALUE = 1 << 3
  41. CX_QUOTED = 1 << 4
  42. CX_NOTE_SPACE = 1 << 5
  43. CX_NOTE_EQUALS = 1 << 6
  44. CX_NOTE_QUOTE = 1 << 7
  45. def __init__(self):
  46. self.context = self.CX_NAME
  47. self.padding_buffer = {"first": "", "before_eq": "", "after_eq": ""}
  48. self.quoter = None
  49. self.reset = 0
  50. class Tokenizer(object):
  51. """Creates a list of tokens from a string of wikicode."""
  52. USES_C = False
  53. START = object()
  54. END = object()
  55. MARKERS = ["{", "}", "[", "]", "<", ">", "|", "=", "&", "'", "#", "*", ";",
  56. ":", "/", "-", "\n", START, END]
  57. MAX_DEPTH = 40
  58. MAX_CYCLES = 100000
  59. regex = re.compile(r"([{}\[\]<>|=&'#*;:/\\\"\-!\n])", flags=re.IGNORECASE)
  60. tag_splitter = re.compile(r"([\s\"\'\\]+)")
  61. def __init__(self):
  62. self._text = None
  63. self._head = 0
  64. self._stacks = []
  65. self._global = 0
  66. self._depth = 0
  67. self._cycles = 0
  68. @property
  69. def _stack(self):
  70. """The current token stack."""
  71. return self._stacks[-1][0]
  72. @property
  73. def _context(self):
  74. """The current token context."""
  75. return self._stacks[-1][1]
  76. @_context.setter
  77. def _context(self, value):
  78. self._stacks[-1][1] = value
  79. @property
  80. def _textbuffer(self):
  81. """The current textbuffer."""
  82. return self._stacks[-1][2]
  83. @_textbuffer.setter
  84. def _textbuffer(self, value):
  85. self._stacks[-1][2] = value
  86. def _push(self, context=0):
  87. """Add a new token stack, context, and textbuffer to the list."""
  88. self._stacks.append([[], context, []])
  89. self._depth += 1
  90. self._cycles += 1
  91. def _push_textbuffer(self):
  92. """Push the textbuffer onto the stack as a Text node and clear it."""
  93. if self._textbuffer:
  94. self._stack.append(tokens.Text(text="".join(self._textbuffer)))
  95. self._textbuffer = []
  96. def _pop(self, keep_context=False):
  97. """Pop the current stack/context/textbuffer, returning the stack.
  98. If *keep_context* is ``True``, then we will replace the underlying
  99. stack's context with the current stack's.
  100. """
  101. self._push_textbuffer()
  102. self._depth -= 1
  103. if keep_context:
  104. context = self._context
  105. stack = self._stacks.pop()[0]
  106. self._context = context
  107. return stack
  108. return self._stacks.pop()[0]
  109. def _can_recurse(self):
  110. """Return whether or not our max recursion depth has been exceeded."""
  111. return self._depth < self.MAX_DEPTH and self._cycles < self.MAX_CYCLES
  112. def _fail_route(self):
  113. """Fail the current tokenization route.
  114. Discards the current stack/context/textbuffer and raises
  115. :py:exc:`~.BadRoute`.
  116. """
  117. context = self._context
  118. self._pop()
  119. raise BadRoute(context)
  120. def _emit(self, token):
  121. """Write a token to the end of the current token stack."""
  122. self._push_textbuffer()
  123. self._stack.append(token)
  124. def _emit_first(self, token):
  125. """Write a token to the beginning of the current token stack."""
  126. self._push_textbuffer()
  127. self._stack.insert(0, token)
  128. def _emit_text(self, text):
  129. """Write text to the current textbuffer."""
  130. self._textbuffer.append(text)
  131. def _emit_all(self, tokenlist):
  132. """Write a series of tokens to the current stack at once."""
  133. if tokenlist and isinstance(tokenlist[0], tokens.Text):
  134. self._emit_text(tokenlist.pop(0).text)
  135. self._push_textbuffer()
  136. self._stack.extend(tokenlist)
  137. def _emit_text_then_stack(self, text):
  138. """Pop the current stack, write *text*, and then write the stack."""
  139. stack = self._pop()
  140. self._emit_text(text)
  141. if stack:
  142. self._emit_all(stack)
  143. self._head -= 1
  144. def _read(self, delta=0, wrap=False, strict=False):
  145. """Read the value at a relative point in the wikicode.
  146. The value is read from :py:attr:`self._head <_head>` plus the value of
  147. *delta* (which can be negative). If *wrap* is ``False``, we will not
  148. allow attempts to read from the end of the string if ``self._head +
  149. delta`` is negative. If *strict* is ``True``, the route will be failed
  150. (with :py:meth:`_fail_route`) if we try to read from past the end of
  151. the string; otherwise, :py:attr:`self.END <END>` is returned. If we try
  152. to read from before the start of the string, :py:attr:`self.START
  153. <START>` is returned.
  154. """
  155. index = self._head + delta
  156. if index < 0 and (not wrap or abs(index) > len(self._text)):
  157. return self.START
  158. try:
  159. return self._text[index]
  160. except IndexError:
  161. if strict:
  162. self._fail_route()
  163. return self.END
  164. def _parse_template(self):
  165. """Parse a template at the head of the wikicode string."""
  166. reset = self._head
  167. try:
  168. template = self._parse(contexts.TEMPLATE_NAME)
  169. except BadRoute:
  170. self._head = reset
  171. raise
  172. self._emit_first(tokens.TemplateOpen())
  173. self._emit_all(template)
  174. self._emit(tokens.TemplateClose())
  175. def _parse_argument(self):
  176. """Parse an argument at the head of the wikicode string."""
  177. reset = self._head
  178. try:
  179. argument = self._parse(contexts.ARGUMENT_NAME)
  180. except BadRoute:
  181. self._head = reset
  182. raise
  183. self._emit_first(tokens.ArgumentOpen())
  184. self._emit_all(argument)
  185. self._emit(tokens.ArgumentClose())
  186. def _parse_template_or_argument(self):
  187. """Parse a template or argument at the head of the wikicode string."""
  188. self._head += 2
  189. braces = 2
  190. while self._read() == "{":
  191. self._head += 1
  192. braces += 1
  193. self._push()
  194. while braces:
  195. if braces == 1:
  196. return self._emit_text_then_stack("{")
  197. if braces == 2:
  198. try:
  199. self._parse_template()
  200. except BadRoute:
  201. return self._emit_text_then_stack("{{")
  202. break
  203. try:
  204. self._parse_argument()
  205. braces -= 3
  206. except BadRoute:
  207. try:
  208. self._parse_template()
  209. braces -= 2
  210. except BadRoute:
  211. return self._emit_text_then_stack("{" * braces)
  212. if braces:
  213. self._head += 1
  214. self._emit_all(self._pop())
  215. if self._context & contexts.FAIL_NEXT:
  216. self._context ^= contexts.FAIL_NEXT
  217. def _handle_template_param(self):
  218. """Handle a template parameter at the head of the string."""
  219. if self._context & contexts.TEMPLATE_NAME:
  220. self._context ^= contexts.TEMPLATE_NAME
  221. elif self._context & contexts.TEMPLATE_PARAM_VALUE:
  222. self._context ^= contexts.TEMPLATE_PARAM_VALUE
  223. else:
  224. self._emit_all(self._pop(keep_context=True))
  225. self._context |= contexts.TEMPLATE_PARAM_KEY
  226. self._emit(tokens.TemplateParamSeparator())
  227. self._push(self._context)
  228. def _handle_template_param_value(self):
  229. """Handle a template parameter's value at the head of the string."""
  230. self._emit_all(self._pop(keep_context=True))
  231. self._context ^= contexts.TEMPLATE_PARAM_KEY
  232. self._context |= contexts.TEMPLATE_PARAM_VALUE
  233. self._emit(tokens.TemplateParamEquals())
  234. def _handle_template_end(self):
  235. """Handle the end of a template at the head of the string."""
  236. if self._context & contexts.TEMPLATE_PARAM_KEY:
  237. self._emit_all(self._pop(keep_context=True))
  238. self._head += 1
  239. return self._pop()
  240. def _handle_argument_separator(self):
  241. """Handle the separator between an argument's name and default."""
  242. self._context ^= contexts.ARGUMENT_NAME
  243. self._context |= contexts.ARGUMENT_DEFAULT
  244. self._emit(tokens.ArgumentSeparator())
  245. def _handle_argument_end(self):
  246. """Handle the end of an argument at the head of the string."""
  247. self._head += 2
  248. return self._pop()
  249. def _parse_wikilink(self):
  250. """Parse an internal wikilink at the head of the wikicode string."""
  251. self._head += 2
  252. reset = self._head - 1
  253. try:
  254. wikilink = self._parse(contexts.WIKILINK_TITLE)
  255. except BadRoute:
  256. self._head = reset
  257. self._emit_text("[[")
  258. else:
  259. self._emit(tokens.WikilinkOpen())
  260. self._emit_all(wikilink)
  261. self._emit(tokens.WikilinkClose())
  262. def _handle_wikilink_separator(self):
  263. """Handle the separator between a wikilink's title and its text."""
  264. self._context ^= contexts.WIKILINK_TITLE
  265. self._context |= contexts.WIKILINK_TEXT
  266. self._emit(tokens.WikilinkSeparator())
  267. def _handle_wikilink_end(self):
  268. """Handle the end of a wikilink at the head of the string."""
  269. self._head += 1
  270. return self._pop()
  271. def _parse_bracketed_uri_scheme(self):
  272. """Parse the URI scheme of a bracket-enclosed external link."""
  273. self._push(contexts.EXT_LINK_URI)
  274. if self._read() == self._read(1) == "/":
  275. self._emit_text("//")
  276. self._head += 2
  277. else:
  278. valid = "abcdefghijklmnopqrstuvwxyz0123456789+.-"
  279. all_valid = lambda: all(char in valid for char in self._read())
  280. scheme = ""
  281. while self._read() is not self.END and all_valid():
  282. scheme += self._read()
  283. self._emit_text(self._read())
  284. self._head += 1
  285. if self._read() != ":":
  286. self._fail_route()
  287. self._emit_text(":")
  288. self._head += 1
  289. slashes = self._read() == self._read(1) == "/"
  290. if slashes:
  291. self._emit_text("//")
  292. self._head += 2
  293. if not is_scheme(scheme, slashes):
  294. self._fail_route()
  295. def _parse_free_uri_scheme(self):
  296. """Parse the URI scheme of a free (no brackets) external link."""
  297. valid = "abcdefghijklmnopqrstuvwxyz0123456789+.-"
  298. scheme = []
  299. try:
  300. # We have to backtrack through the textbuffer looking for our
  301. # scheme since it was just parsed as text:
  302. for chunk in reversed(self._textbuffer):
  303. for char in reversed(chunk):
  304. if char.isspace() or char in self.MARKERS:
  305. raise StopIteration()
  306. if char not in valid:
  307. raise BadRoute()
  308. scheme.append(char)
  309. except StopIteration:
  310. pass
  311. scheme = "".join(reversed(scheme))
  312. slashes = self._read() == self._read(1) == "/"
  313. if not is_scheme(scheme, slashes):
  314. raise BadRoute()
  315. self._push(self._context | contexts.EXT_LINK_URI)
  316. self._emit_text(scheme)
  317. self._emit_text(":")
  318. if slashes:
  319. self._emit_text("//")
  320. self._head += 2
  321. def _handle_free_link_text(self, punct, tail, this):
  322. """Handle text in a free ext link, including trailing punctuation."""
  323. if "(" in this and ")" in punct:
  324. punct = punct[:-1] # ')' is not longer valid punctuation
  325. if this.endswith(punct):
  326. for i in range(len(this) - 1, 0, -1):
  327. if this[i - 1] not in punct:
  328. break
  329. else:
  330. i = 0
  331. stripped = this[:i]
  332. if stripped and tail:
  333. self._emit_text(tail)
  334. tail = ""
  335. tail += this[i:]
  336. this = stripped
  337. elif tail:
  338. self._emit_text(tail)
  339. tail = ""
  340. self._emit_text(this)
  341. return punct, tail
  342. def _is_free_link_end(self, this, next):
  343. """Return whether the current head is the end of a free link."""
  344. # Built from _parse()'s end sentinels:
  345. after, ctx = self._read(2), self._context
  346. equal_sign_contexts = contexts.TEMPLATE_PARAM_KEY | contexts.HEADING
  347. return (this in (self.END, "\n", "[", "]", "<", ">") or
  348. this == next == "'" or
  349. (this == "|" and ctx & contexts.TEMPLATE) or
  350. (this == "=" and ctx & equal_sign_contexts) or
  351. (this == next == "}" and ctx & contexts.TEMPLATE) or
  352. (this == next == after == "}" and ctx & contexts.ARGUMENT))
  353. def _really_parse_external_link(self, brackets):
  354. """Really parse an external link."""
  355. if brackets:
  356. self._parse_bracketed_uri_scheme()
  357. invalid = ("\n", " ", "]")
  358. else:
  359. self._parse_free_uri_scheme()
  360. invalid = ("\n", " ", "[", "]")
  361. punct = tuple(",;\.:!?)")
  362. if self._read() is self.END or self._read()[0] in invalid:
  363. self._fail_route()
  364. tail = ""
  365. while True:
  366. this, next = self._read(), self._read(1)
  367. if this == "&":
  368. if tail:
  369. self._emit_text(tail)
  370. tail = ""
  371. self._parse_entity()
  372. elif (this == "<" and next == "!" and self._read(2) ==
  373. self._read(3) == "-"):
  374. if tail:
  375. self._emit_text(tail)
  376. tail = ""
  377. self._parse_comment()
  378. elif not brackets and self._is_free_link_end(this, next):
  379. return self._pop(), tail, -1
  380. elif this is self.END or this == "\n":
  381. self._fail_route()
  382. elif this == next == "{" and self._can_recurse():
  383. if tail:
  384. self._emit_text(tail)
  385. tail = ""
  386. self._parse_template_or_argument()
  387. elif this == "]":
  388. return self._pop(), tail, 0
  389. elif " " in this:
  390. before, after = this.split(" ", 1)
  391. if brackets:
  392. self._emit_text(before)
  393. self._emit(tokens.ExternalLinkSeparator())
  394. if after:
  395. self._emit_text(after)
  396. self._context ^= contexts.EXT_LINK_URI
  397. self._context |= contexts.EXT_LINK_TITLE
  398. self._head += 1
  399. return self._parse(push=False), None, 0
  400. punct, tail = self._handle_free_link_text(punct, tail, before)
  401. return self._pop(), tail + " " + after, 0
  402. elif not brackets:
  403. punct, tail = self._handle_free_link_text(punct, tail, this)
  404. else:
  405. self._emit_text(this)
  406. self._head += 1
  407. def _remove_uri_scheme_from_textbuffer(self, scheme):
  408. """Remove the URI scheme of a new external link from the textbuffer."""
  409. length = len(scheme)
  410. while length:
  411. if length < len(self._textbuffer[-1]):
  412. self._textbuffer[-1] = self._textbuffer[-1][:-length]
  413. break
  414. length -= len(self._textbuffer[-1])
  415. self._textbuffer.pop()
  416. def _parse_external_link(self, brackets):
  417. """Parse an external link at the head of the wikicode string."""
  418. reset = self._head
  419. self._head += 1
  420. try:
  421. bad_context = self._context & contexts.NO_EXT_LINKS
  422. if bad_context or not self._can_recurse():
  423. raise BadRoute()
  424. link, extra, delta = self._really_parse_external_link(brackets)
  425. except BadRoute:
  426. self._head = reset
  427. if not brackets and self._context & contexts.DL_TERM:
  428. self._handle_dl_term()
  429. else:
  430. self._emit_text(self._read())
  431. else:
  432. if not brackets:
  433. scheme = link[0].text.split(":", 1)[0]
  434. self._remove_uri_scheme_from_textbuffer(scheme)
  435. self._emit(tokens.ExternalLinkOpen(brackets=brackets))
  436. self._emit_all(link)
  437. self._emit(tokens.ExternalLinkClose())
  438. self._head += delta
  439. if extra:
  440. self._emit_text(extra)
  441. def _parse_heading(self):
  442. """Parse a section heading at the head of the wikicode string."""
  443. self._global |= contexts.GL_HEADING
  444. reset = self._head
  445. self._head += 1
  446. best = 1
  447. while self._read() == "=":
  448. best += 1
  449. self._head += 1
  450. context = contexts.HEADING_LEVEL_1 << min(best - 1, 5)
  451. try:
  452. title, level = self._parse(context)
  453. except BadRoute:
  454. self._head = reset + best - 1
  455. self._emit_text("=" * best)
  456. else:
  457. self._emit(tokens.HeadingStart(level=level))
  458. if level < best:
  459. self._emit_text("=" * (best - level))
  460. self._emit_all(title)
  461. self._emit(tokens.HeadingEnd())
  462. finally:
  463. self._global ^= contexts.GL_HEADING
  464. def _handle_heading_end(self):
  465. """Handle the end of a section heading at the head of the string."""
  466. reset = self._head
  467. self._head += 1
  468. best = 1
  469. while self._read() == "=":
  470. best += 1
  471. self._head += 1
  472. current = int(log(self._context / contexts.HEADING_LEVEL_1, 2)) + 1
  473. level = min(current, min(best, 6))
  474. try: # Try to check for a heading closure after this one
  475. after, after_level = self._parse(self._context)
  476. except BadRoute:
  477. if level < best:
  478. self._emit_text("=" * (best - level))
  479. self._head = reset + best - 1
  480. return self._pop(), level
  481. else: # Found another closure
  482. self._emit_text("=" * best)
  483. self._emit_all(after)
  484. return self._pop(), after_level
  485. def _really_parse_entity(self):
  486. """Actually parse an HTML entity and ensure that it is valid."""
  487. self._emit(tokens.HTMLEntityStart())
  488. self._head += 1
  489. this = self._read(strict=True)
  490. if this == "#":
  491. numeric = True
  492. self._emit(tokens.HTMLEntityNumeric())
  493. self._head += 1
  494. this = self._read(strict=True)
  495. if this[0].lower() == "x":
  496. hexadecimal = True
  497. self._emit(tokens.HTMLEntityHex(char=this[0]))
  498. this = this[1:]
  499. if not this:
  500. self._fail_route()
  501. else:
  502. hexadecimal = False
  503. else:
  504. numeric = hexadecimal = False
  505. valid = "0123456789abcdefABCDEF" if hexadecimal else "0123456789"
  506. if not numeric and not hexadecimal:
  507. valid += "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
  508. if not all([char in valid for char in this]):
  509. self._fail_route()
  510. self._head += 1
  511. if self._read() != ";":
  512. self._fail_route()
  513. if numeric:
  514. test = int(this, 16) if hexadecimal else int(this)
  515. if test < 1 or test > 0x10FFFF:
  516. self._fail_route()
  517. else:
  518. if this not in htmlentities.entitydefs:
  519. self._fail_route()
  520. self._emit(tokens.Text(text=this))
  521. self._emit(tokens.HTMLEntityEnd())
  522. def _parse_entity(self):
  523. """Parse an HTML entity at the head of the wikicode string."""
  524. reset = self._head
  525. self._push()
  526. try:
  527. self._really_parse_entity()
  528. except BadRoute:
  529. self._head = reset
  530. self._emit_text(self._read())
  531. else:
  532. self._emit_all(self._pop())
  533. def _parse_comment(self):
  534. """Parse an HTML comment at the head of the wikicode string."""
  535. self._head += 4
  536. reset = self._head - 1
  537. self._push()
  538. while True:
  539. this = self._read()
  540. if this == self.END:
  541. self._pop()
  542. self._head = reset
  543. self._emit_text("<!--")
  544. return
  545. if this == self._read(1) == "-" and self._read(2) == ">":
  546. self._emit_first(tokens.CommentStart())
  547. self._emit(tokens.CommentEnd())
  548. self._emit_all(self._pop())
  549. self._head += 2
  550. return
  551. self._emit_text(this)
  552. self._head += 1
  553. def _push_tag_buffer(self, data):
  554. """Write a pending tag attribute from *data* to the stack."""
  555. if data.context & data.CX_QUOTED:
  556. self._emit_first(tokens.TagAttrQuote(char=data.quoter))
  557. self._emit_all(self._pop())
  558. buf = data.padding_buffer
  559. self._emit_first(tokens.TagAttrStart(pad_first=buf["first"],
  560. pad_before_eq=buf["before_eq"], pad_after_eq=buf["after_eq"]))
  561. self._emit_all(self._pop())
  562. for key in data.padding_buffer:
  563. data.padding_buffer[key] = ""
  564. def _handle_tag_space(self, data, text):
  565. """Handle whitespace (*text*) inside of an HTML open tag."""
  566. ctx = data.context
  567. end_of_value = ctx & data.CX_ATTR_VALUE and not ctx & (data.CX_QUOTED | data.CX_NOTE_QUOTE)
  568. if end_of_value or (ctx & data.CX_QUOTED and ctx & data.CX_NOTE_SPACE):
  569. self._push_tag_buffer(data)
  570. data.context = data.CX_ATTR_READY
  571. elif ctx & data.CX_NOTE_SPACE:
  572. data.context = data.CX_ATTR_READY
  573. elif ctx & data.CX_ATTR_NAME:
  574. data.context |= data.CX_NOTE_EQUALS
  575. data.padding_buffer["before_eq"] += text
  576. if ctx & data.CX_QUOTED and not ctx & data.CX_NOTE_SPACE:
  577. self._emit_text(text)
  578. elif data.context & data.CX_ATTR_READY:
  579. data.padding_buffer["first"] += text
  580. elif data.context & data.CX_ATTR_VALUE:
  581. data.padding_buffer["after_eq"] += text
  582. def _handle_tag_text(self, text):
  583. """Handle regular *text* inside of an HTML open tag."""
  584. next = self._read(1)
  585. if not self._can_recurse() or text not in self.MARKERS:
  586. self._emit_text(text)
  587. elif text == next == "{":
  588. self._parse_template_or_argument()
  589. elif text == next == "[":
  590. self._parse_wikilink()
  591. elif text == "<":
  592. self._parse_tag()
  593. else:
  594. self._emit_text(text)
  595. def _handle_tag_data(self, data, text):
  596. """Handle all sorts of *text* data inside of an HTML open tag."""
  597. for chunk in self.tag_splitter.split(text):
  598. if not chunk:
  599. continue
  600. if data.context & data.CX_NAME:
  601. if chunk in self.MARKERS or chunk.isspace():
  602. self._fail_route() # Tags must start with text, not spaces
  603. data.context = data.CX_NOTE_SPACE
  604. elif chunk.isspace():
  605. self._handle_tag_space(data, chunk)
  606. continue
  607. elif data.context & data.CX_NOTE_SPACE:
  608. if data.context & data.CX_QUOTED:
  609. data.context = data.CX_ATTR_VALUE
  610. self._pop()
  611. self._head = data.reset - 1 # Will be auto-incremented
  612. return # Break early
  613. self._fail_route()
  614. elif data.context & data.CX_ATTR_READY:
  615. data.context = data.CX_ATTR_NAME
  616. self._push(contexts.TAG_ATTR)
  617. elif data.context & data.CX_ATTR_NAME:
  618. if chunk == "=":
  619. data.context = data.CX_ATTR_VALUE | data.CX_NOTE_QUOTE
  620. self._emit(tokens.TagAttrEquals())
  621. continue
  622. if data.context & data.CX_NOTE_EQUALS:
  623. self._push_tag_buffer(data)
  624. data.context = data.CX_ATTR_NAME
  625. self._push(contexts.TAG_ATTR)
  626. else: # data.context & data.CX_ATTR_VALUE assured
  627. escaped = self._read(-1) == "\\" and self._read(-2) != "\\"
  628. if data.context & data.CX_NOTE_QUOTE:
  629. data.context ^= data.CX_NOTE_QUOTE
  630. if chunk in "'\"" and not escaped:
  631. data.context |= data.CX_QUOTED
  632. data.quoter = chunk
  633. data.reset = self._head
  634. self._push(self._context)
  635. continue
  636. elif data.context & data.CX_QUOTED:
  637. if chunk == data.quoter and not escaped:
  638. data.context |= data.CX_NOTE_SPACE
  639. continue
  640. self._handle_tag_text(chunk)
  641. def _handle_tag_close_open(self, data, token):
  642. """Handle the closing of a open tag (``<foo>``)."""
  643. if data.context & (data.CX_ATTR_NAME | data.CX_ATTR_VALUE):
  644. self._push_tag_buffer(data)
  645. self._emit(token(padding=data.padding_buffer["first"]))
  646. self._head += 1
  647. def _handle_tag_open_close(self):
  648. """Handle the opening of a closing tag (``</foo>``)."""
  649. self._emit(tokens.TagOpenClose())
  650. self._push(contexts.TAG_CLOSE)
  651. self._head += 1
  652. def _handle_tag_close_close(self):
  653. """Handle the ending of a closing tag (``</foo>``)."""
  654. strip = lambda tok: tok.text.rstrip().lower()
  655. closing = self._pop()
  656. if len(closing) != 1 or (not isinstance(closing[0], tokens.Text) or
  657. strip(closing[0]) != strip(self._stack[1])):
  658. self._fail_route()
  659. self._emit_all(closing)
  660. self._emit(tokens.TagCloseClose())
  661. return self._pop()
  662. def _handle_blacklisted_tag(self):
  663. """Handle the body of an HTML tag that is parser-blacklisted."""
  664. while True:
  665. this, next = self._read(), self._read(1)
  666. if this is self.END:
  667. self._fail_route()
  668. elif this == "<" and next == "/":
  669. self._handle_tag_open_close()
  670. self._head += 1
  671. return self._parse(push=False)
  672. elif this == "&":
  673. self._parse_entity()
  674. else:
  675. self._emit_text(this)
  676. self._head += 1
  677. def _handle_single_only_tag_end(self):
  678. """Handle the end of an implicitly closing single-only HTML tag."""
  679. padding = self._stack.pop().padding
  680. self._emit(tokens.TagCloseSelfclose(padding=padding, implicit=True))
  681. self._head -= 1 # Offset displacement done by _handle_tag_close_open
  682. return self._pop()
  683. def _handle_single_tag_end(self):
  684. """Handle the stream end when inside a single-supporting HTML tag."""
  685. stack = self._stack
  686. # We need to find the index of the TagCloseOpen token corresponding to
  687. # the TagOpenOpen token located at index 0:
  688. depth = 1
  689. for index, token in enumerate(stack[2:], 2):
  690. if isinstance(token, tokens.TagOpenOpen):
  691. depth += 1
  692. elif isinstance(token, tokens.TagCloseOpen):
  693. depth -= 1
  694. if depth == 0:
  695. break
  696. else: # pragma: no cover (untestable/exceptional case)
  697. raise ParserError("_handle_single_tag_end() missed a TagCloseOpen")
  698. padding = stack[index].padding
  699. stack[index] = tokens.TagCloseSelfclose(padding=padding, implicit=True)
  700. return self._pop()
  701. def _really_parse_tag(self):
  702. """Actually parse an HTML tag, starting with the open (``<foo>``)."""
  703. data = _TagOpenData()
  704. self._push(contexts.TAG_OPEN)
  705. self._emit(tokens.TagOpenOpen())
  706. while True:
  707. this, next = self._read(), self._read(1)
  708. can_exit = (not data.context & (data.CX_QUOTED | data.CX_NAME) or
  709. data.context & data.CX_NOTE_SPACE)
  710. if this is self.END:
  711. if self._context & contexts.TAG_ATTR:
  712. if data.context & data.CX_QUOTED:
  713. # Unclosed attribute quote: reset, don't die
  714. data.context = data.CX_ATTR_VALUE
  715. self._pop()
  716. self._head = data.reset
  717. continue
  718. self._pop()
  719. self._fail_route()
  720. elif this == ">" and can_exit:
  721. self._handle_tag_close_open(data, tokens.TagCloseOpen)
  722. self._context = contexts.TAG_BODY
  723. if is_single_only(self._stack[1].text):
  724. return self._handle_single_only_tag_end()
  725. if is_parsable(self._stack[1].text):
  726. return self._parse(push=False)
  727. return self._handle_blacklisted_tag()
  728. elif this == "/" and next == ">" and can_exit:
  729. self._handle_tag_close_open(data, tokens.TagCloseSelfclose)
  730. return self._pop()
  731. else:
  732. self._handle_tag_data(data, this)
  733. self._head += 1
  734. def _handle_invalid_tag_start(self):
  735. """Handle the (possible) start of an implicitly closing single tag."""
  736. reset = self._head + 1
  737. self._head += 2
  738. try:
  739. if not is_single_only(self.tag_splitter.split(self._read())[0]):
  740. raise BadRoute()
  741. tag = self._really_parse_tag()
  742. except BadRoute:
  743. self._head = reset
  744. self._emit_text("</")
  745. else:
  746. tag[0].invalid = True # Set flag of TagOpenOpen
  747. self._emit_all(tag)
  748. def _parse_tag(self):
  749. """Parse an HTML tag at the head of the wikicode string."""
  750. reset = self._head
  751. self._head += 1
  752. try:
  753. tag = self._really_parse_tag()
  754. except BadRoute:
  755. self._head = reset
  756. self._emit_text("<")
  757. else:
  758. self._emit_all(tag)
  759. def _emit_style_tag(self, tag, markup, body):
  760. """Write the body of a tag and the tokens that should surround it."""
  761. self._emit(tokens.TagOpenOpen(wiki_markup=markup))
  762. self._emit_text(tag)
  763. self._emit(tokens.TagCloseOpen())
  764. self._emit_all(body)
  765. self._emit(tokens.TagOpenClose())
  766. self._emit_text(tag)
  767. self._emit(tokens.TagCloseClose())
  768. def _parse_italics(self):
  769. """Parse wiki-style italics."""
  770. reset = self._head
  771. try:
  772. stack = self._parse(contexts.STYLE_ITALICS)
  773. except BadRoute as route:
  774. self._head = reset
  775. if route.context & contexts.STYLE_PASS_AGAIN:
  776. new_ctx = contexts.STYLE_ITALICS | contexts.STYLE_SECOND_PASS
  777. stack = self._parse(new_ctx)
  778. else:
  779. return self._emit_text("''")
  780. self._emit_style_tag("i", "''", stack)
  781. def _parse_bold(self):
  782. """Parse wiki-style bold."""
  783. reset = self._head
  784. try:
  785. stack = self._parse(contexts.STYLE_BOLD)
  786. except BadRoute:
  787. self._head = reset
  788. if self._context & contexts.STYLE_SECOND_PASS:
  789. self._emit_text("'")
  790. return True
  791. elif self._context & contexts.STYLE_ITALICS:
  792. self._context |= contexts.STYLE_PASS_AGAIN
  793. self._emit_text("'''")
  794. else:
  795. self._emit_text("'")
  796. self._parse_italics()
  797. else:
  798. self._emit_style_tag("b", "'''", stack)
  799. def _parse_italics_and_bold(self):
  800. """Parse wiki-style italics and bold together (i.e., five ticks)."""
  801. reset = self._head
  802. try:
  803. stack = self._parse(contexts.STYLE_BOLD)
  804. except BadRoute:
  805. self._head = reset
  806. try:
  807. stack = self._parse(contexts.STYLE_ITALICS)
  808. except BadRoute:
  809. self._head = reset
  810. self._emit_text("'''''")
  811. else:
  812. reset = self._head
  813. try:
  814. stack2 = self._parse(contexts.STYLE_BOLD)
  815. except BadRoute:
  816. self._head = reset
  817. self._emit_text("'''")
  818. self._emit_style_tag("i", "''", stack)
  819. else:
  820. self._push()
  821. self._emit_style_tag("i", "''", stack)
  822. self._emit_all(stack2)
  823. self._emit_style_tag("b", "'''", self._pop())
  824. else:
  825. reset = self._head
  826. try:
  827. stack2 = self._parse(contexts.STYLE_ITALICS)
  828. except BadRoute:
  829. self._head = reset
  830. self._emit_text("''")
  831. self._emit_style_tag("b", "'''", stack)
  832. else:
  833. self._push()
  834. self._emit_style_tag("b", "'''", stack)
  835. self._emit_all(stack2)
  836. self._emit_style_tag("i", "''", self._pop())
  837. def _parse_style(self):
  838. """Parse wiki-style formatting (``''``/``'''`` for italics/bold)."""
  839. self._head += 2
  840. ticks = 2
  841. while self._read() == "'":
  842. self._head += 1
  843. ticks += 1
  844. italics = self._context & contexts.STYLE_ITALICS
  845. bold = self._context & contexts.STYLE_BOLD
  846. if ticks > 5:
  847. self._emit_text("'" * (ticks - 5))
  848. ticks = 5
  849. elif ticks == 4:
  850. self._emit_text("'")
  851. ticks = 3
  852. if (italics and ticks in (2, 5)) or (bold and ticks in (3, 5)):
  853. if ticks == 5:
  854. self._head -= 3 if italics else 2
  855. return self._pop()
  856. elif not self._can_recurse():
  857. if ticks == 3:
  858. if self._context & contexts.STYLE_SECOND_PASS:
  859. self._emit_text("'")
  860. return self._pop()
  861. if self._context & contexts.STYLE_ITALICS:
  862. self._context |= contexts.STYLE_PASS_AGAIN
  863. self._emit_text("'" * ticks)
  864. elif ticks == 2:
  865. self._parse_italics()
  866. elif ticks == 3:
  867. if self._parse_bold():
  868. return self._pop()
  869. else: # ticks == 5
  870. self._parse_italics_and_bold()
  871. self._head -= 1
  872. def _handle_list_marker(self):
  873. """Handle a list marker at the head (``#``, ``*``, ``;``, ``:``)."""
  874. markup = self._read()
  875. if markup == ";":
  876. self._context |= contexts.DL_TERM
  877. self._emit(tokens.TagOpenOpen(wiki_markup=markup))
  878. self._emit_text(get_html_tag(markup))
  879. self._emit(tokens.TagCloseSelfclose())
  880. def _handle_list(self):
  881. """Handle a wiki-style list (``#``, ``*``, ``;``, ``:``)."""
  882. self._handle_list_marker()
  883. while self._read(1) in ("#", "*", ";", ":"):
  884. self._head += 1
  885. self._handle_list_marker()
  886. def _handle_hr(self):
  887. """Handle a wiki-style horizontal rule (``----``) in the string."""
  888. length = 4
  889. self._head += 3
  890. while self._read(1) == "-":
  891. length += 1
  892. self._head += 1
  893. self._emit(tokens.TagOpenOpen(wiki_markup="-" * length))
  894. self._emit_text("hr")
  895. self._emit(tokens.TagCloseSelfclose())
  896. def _handle_dl_term(self):
  897. """Handle the term in a description list (``foo`` in ``;foo:bar``)."""
  898. self._context ^= contexts.DL_TERM
  899. if self._read() == ":":
  900. self._handle_list_marker()
  901. else:
  902. self._emit_text("\n")
  903. def _handle_end(self):
  904. """Handle the end of the stream of wikitext."""
  905. if self._context & contexts.FAIL:
  906. if self._context & contexts.TAG_BODY:
  907. if is_single(self._stack[1].text):
  908. return self._handle_single_tag_end()
  909. if self._context & contexts.DOUBLE:
  910. self._pop()
  911. self._fail_route()
  912. return self._pop()
  913. def _verify_safe(self, this):
  914. """Make sure we are not trying to write an invalid character."""
  915. context = self._context
  916. if context & contexts.FAIL_NEXT:
  917. return False
  918. if context & contexts.WIKILINK_TITLE:
  919. if this == "]" or this == "{":
  920. self._context |= contexts.FAIL_NEXT
  921. elif this == "\n" or this == "[" or this == "}":
  922. return False
  923. return True
  924. elif context & contexts.EXT_LINK_TITLE:
  925. return this != "\n"
  926. elif context & contexts.TEMPLATE_NAME:
  927. if this == "{" or this == "}" or this == "[":
  928. self._context |= contexts.FAIL_NEXT
  929. return True
  930. if this == "]":
  931. return False
  932. if this == "|":
  933. return True
  934. if context & contexts.HAS_TEXT:
  935. if context & contexts.FAIL_ON_TEXT:
  936. if this is self.END or not this.isspace():
  937. return False
  938. else:
  939. if this == "\n":
  940. self._context |= contexts.FAIL_ON_TEXT
  941. elif this is self.END or not this.isspace():
  942. self._context |= contexts.HAS_TEXT
  943. return True
  944. elif context & contexts.TAG_CLOSE:
  945. return this != "<"
  946. else:
  947. if context & contexts.FAIL_ON_EQUALS:
  948. if this == "=":
  949. return False
  950. elif context & contexts.FAIL_ON_LBRACE:
  951. if this == "{" or (self._read(-1) == self._read(-2) == "{"):
  952. if context & contexts.TEMPLATE:
  953. self._context |= contexts.FAIL_ON_EQUALS
  954. else:
  955. self._context |= contexts.FAIL_NEXT
  956. return True
  957. self._context ^= contexts.FAIL_ON_LBRACE
  958. elif context & contexts.FAIL_ON_RBRACE:
  959. if this == "}":
  960. self._context |= contexts.FAIL_NEXT
  961. return True
  962. self._context ^= contexts.FAIL_ON_RBRACE
  963. elif this == "{":
  964. self._context |= contexts.FAIL_ON_LBRACE
  965. elif this == "}":
  966. self._context |= contexts.FAIL_ON_RBRACE
  967. return True
  968. def _parse(self, context=0, push=True):
  969. """Parse the wikicode string, using *context* for when to stop."""
  970. if push:
  971. self._push(context)
  972. while True:
  973. this = self._read()
  974. if self._context & contexts.UNSAFE:
  975. if not self._verify_safe(this):
  976. if self._context & contexts.DOUBLE:
  977. self._pop()
  978. self._fail_route()
  979. if this not in self.MARKERS:
  980. self._emit_text(this)
  981. self._head += 1
  982. continue
  983. if this is self.END:
  984. return self._handle_end()
  985. next = self._read(1)
  986. if this == next == "{":
  987. if self._can_recurse():
  988. self._parse_template_or_argument()
  989. else:
  990. self._emit_text("{")
  991. elif this == "|" and self._context & contexts.TEMPLATE:
  992. self._handle_template_param()
  993. elif this == "=" and self._context & contexts.TEMPLATE_PARAM_KEY:
  994. self._handle_template_param_value()
  995. elif this == next == "}" and self._context & contexts.TEMPLATE:
  996. return self._handle_template_end()
  997. elif this == "|" and self._context & contexts.ARGUMENT_NAME:
  998. self._handle_argument_separator()
  999. elif this == next == "}" and self._context & contexts.ARGUMENT:
  1000. if self._read(2) == "}":
  1001. return self._handle_argument_end()
  1002. else:
  1003. self._emit_text("}")
  1004. elif this == next == "[" and self._can_recurse():
  1005. if not self._context & contexts.NO_WIKILINKS:
  1006. self._parse_wikilink()
  1007. else:
  1008. self._emit_text("[")
  1009. elif this == "|" and self._context & contexts.WIKILINK_TITLE:
  1010. self._handle_wikilink_separator()
  1011. elif this == next == "]" and self._context & contexts.WIKILINK:
  1012. return self._handle_wikilink_end()
  1013. elif this == "[":
  1014. self._parse_external_link(True)
  1015. elif this == ":" and self._read(-1) not in self.MARKERS:
  1016. self._parse_external_link(False)
  1017. elif this == "]" and self._context & contexts.EXT_LINK_TITLE:
  1018. return self._pop()
  1019. elif this == "=" and not self._global & contexts.GL_HEADING:
  1020. if self._read(-1) in ("\n", self.START):
  1021. self._parse_heading()
  1022. else:
  1023. self._emit_text("=")
  1024. elif this == "=" and self._context & contexts.HEADING:
  1025. return self._handle_heading_end()
  1026. elif this == "\n" and self._context & contexts.HEADING:
  1027. self._fail_route()
  1028. elif this == "&":
  1029. self._parse_entity()
  1030. elif this == "<" and next == "!":
  1031. if self._read(2) == self._read(3) == "-":
  1032. self._parse_comment()
  1033. else:
  1034. self._emit_text(this)
  1035. elif this == "<" and next == "/" and self._read(2) is not self.END:
  1036. if self._context & contexts.TAG_BODY:
  1037. self._handle_tag_open_close()
  1038. else:
  1039. self._handle_invalid_tag_start()
  1040. elif this == "<" and not self._context & contexts.TAG_CLOSE:
  1041. if self._can_recurse():
  1042. self._parse_tag()
  1043. else:
  1044. self._emit_text("<")
  1045. elif this == ">" and self._context & contexts.TAG_CLOSE:
  1046. return self._handle_tag_close_close()
  1047. elif this == next == "'" and not self._skip_style_tags:
  1048. result = self._parse_style()
  1049. if result is not None:
  1050. return result
  1051. elif self._read(-1) in ("\n", self.START):
  1052. if this in ("#", "*", ";", ":"):
  1053. self._handle_list()
  1054. elif this == next == self._read(2) == self._read(3) == "-":
  1055. self._handle_hr()
  1056. else:
  1057. self._emit_text(this)
  1058. elif this in ("\n", ":") and self._context & contexts.DL_TERM:
  1059. self._handle_dl_term()
  1060. else:
  1061. self._emit_text(this)
  1062. self._head += 1
  1063. def tokenize(self, text, context=0, skip_style_tags=False):
  1064. """Build a list of tokens from a string of wikicode and return it."""
  1065. self._skip_style_tags = skip_style_tags
  1066. split = self.regex.split(text)
  1067. self._text = [segment for segment in split if segment]
  1068. self._head = self._global = self._depth = self._cycles = 0
  1069. try:
  1070. return self._parse(context)
  1071. except BadRoute: # pragma: no cover (untestable/exceptional case)
  1072. raise ParserError("Python tokenizer exited with BadRoute")