A Python parser for MediaWiki wikicode https://mwparserfromhell.readthedocs.io/
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 

1420 lines
56 KiB

  1. # -*- coding: utf-8 -*-
  2. #
  3. # Copyright (C) 2012-2015 Ben Kurtovic <ben.kurtovic@gmail.com>
  4. #
  5. # Permission is hereby granted, free of charge, to any person obtaining a copy
  6. # of this software and associated documentation files (the "Software"), to deal
  7. # in the Software without restriction, including without limitation the rights
  8. # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  9. # copies of the Software, and to permit persons to whom the Software is
  10. # furnished to do so, subject to the following conditions:
  11. #
  12. # The above copyright notice and this permission notice shall be included in
  13. # all copies or substantial portions of the Software.
  14. #
  15. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16. # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17. # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  18. # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  19. # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  20. # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
  21. # SOFTWARE.
  22. from __future__ import unicode_literals
  23. from math import log
  24. import re
  25. from . import contexts, tokens, ParserError
  26. from ..compat import htmlentities, range
  27. from ..definitions import (get_html_tag, is_parsable, is_single,
  28. is_single_only, is_scheme)
  29. __all__ = ["Tokenizer"]
  30. class BadRoute(Exception):
  31. """Raised internally when the current tokenization route is invalid."""
  32. def __init__(self, context=0):
  33. super(BadRoute, self).__init__()
  34. self.context = context
  35. class _TagOpenData(object):
  36. """Stores data about an HTML open tag, like ``<ref name="foo">``."""
  37. CX_NAME = 1 << 0
  38. CX_ATTR_READY = 1 << 1
  39. CX_ATTR_NAME = 1 << 2
  40. CX_ATTR_VALUE = 1 << 3
  41. CX_QUOTED = 1 << 4
  42. CX_NOTE_SPACE = 1 << 5
  43. CX_NOTE_EQUALS = 1 << 6
  44. CX_NOTE_QUOTE = 1 << 7
  45. def __init__(self):
  46. self.context = self.CX_NAME
  47. self.padding_buffer = {"first": "", "before_eq": "", "after_eq": ""}
  48. self.quoter = None
  49. self.reset = 0
  50. class Tokenizer(object):
  51. """Creates a list of tokens from a string of wikicode."""
  52. USES_C = False
  53. START = object()
  54. END = object()
  55. MARKERS = ["{", "}", "[", "]", "<", ">", "|", "=", "&", "'", "#", "*", ";",
  56. ":", "/", "-", "!", "\n", START, END]
  57. MAX_DEPTH = 40
  58. MAX_CYCLES = 100000
  59. regex = re.compile(r"([{}\[\]<>|=&'#*;:/\\\"\-!\n])", flags=re.IGNORECASE)
  60. tag_splitter = re.compile(r"([\s\"\'\\]+)")
  61. def __init__(self):
  62. self._text = None
  63. self._head = 0
  64. self._stacks = []
  65. self._global = 0
  66. self._depth = 0
  67. self._cycles = 0
  68. @property
  69. def _stack(self):
  70. """The current token stack."""
  71. return self._stacks[-1][0]
  72. @property
  73. def _context(self):
  74. """The current token context."""
  75. return self._stacks[-1][1]
  76. @_context.setter
  77. def _context(self, value):
  78. self._stacks[-1][1] = value
  79. @property
  80. def _textbuffer(self):
  81. """The current textbuffer."""
  82. return self._stacks[-1][2]
  83. @_textbuffer.setter
  84. def _textbuffer(self, value):
  85. self._stacks[-1][2] = value
  86. def _push(self, context=0):
  87. """Add a new token stack, context, and textbuffer to the list."""
  88. self._stacks.append([[], context, []])
  89. self._depth += 1
  90. self._cycles += 1
  91. def _push_textbuffer(self):
  92. """Push the textbuffer onto the stack as a Text node and clear it."""
  93. if self._textbuffer:
  94. self._stack.append(tokens.Text(text="".join(self._textbuffer)))
  95. self._textbuffer = []
  96. def _pop(self, keep_context=False):
  97. """Pop the current stack/context/textbuffer, returning the stack.
  98. If *keep_context* is ``True``, then we will replace the underlying
  99. stack's context with the current stack's.
  100. """
  101. self._push_textbuffer()
  102. self._depth -= 1
  103. if keep_context:
  104. context = self._context
  105. stack = self._stacks.pop()[0]
  106. self._context = context
  107. return stack
  108. return self._stacks.pop()[0]
  109. def _can_recurse(self):
  110. """Return whether or not our max recursion depth has been exceeded."""
  111. return self._depth < self.MAX_DEPTH and self._cycles < self.MAX_CYCLES
  112. def _fail_route(self):
  113. """Fail the current tokenization route.
  114. Discards the current stack/context/textbuffer and raises
  115. :exc:`.BadRoute`.
  116. """
  117. context = self._context
  118. self._pop()
  119. raise BadRoute(context)
  120. def _emit(self, token):
  121. """Write a token to the end of the current token stack."""
  122. self._push_textbuffer()
  123. self._stack.append(token)
  124. def _emit_first(self, token):
  125. """Write a token to the beginning of the current token stack."""
  126. self._push_textbuffer()
  127. self._stack.insert(0, token)
  128. def _emit_text(self, text):
  129. """Write text to the current textbuffer."""
  130. self._textbuffer.append(text)
  131. def _emit_all(self, tokenlist):
  132. """Write a series of tokens to the current stack at once."""
  133. if tokenlist and isinstance(tokenlist[0], tokens.Text):
  134. self._emit_text(tokenlist.pop(0).text)
  135. self._push_textbuffer()
  136. self._stack.extend(tokenlist)
  137. def _emit_text_then_stack(self, text):
  138. """Pop the current stack, write *text*, and then write the stack."""
  139. stack = self._pop()
  140. self._emit_text(text)
  141. if stack:
  142. self._emit_all(stack)
  143. self._head -= 1
  144. def _read(self, delta=0, wrap=False, strict=False):
  145. """Read the value at a relative point in the wikicode.
  146. The value is read from :attr:`self._head <_head>` plus the value of
  147. *delta* (which can be negative). If *wrap* is ``False``, we will not
  148. allow attempts to read from the end of the string if ``self._head +
  149. delta`` is negative. If *strict* is ``True``, the route will be failed
  150. (with :meth:`_fail_route`) if we try to read from past the end of the
  151. string; otherwise, :attr:`self.END <END>` is returned. If we try to
  152. read from before the start of the string, :attr:`self.START <START>` is
  153. returned.
  154. """
  155. index = self._head + delta
  156. if index < 0 and (not wrap or abs(index) > len(self._text)):
  157. return self.START
  158. try:
  159. return self._text[index]
  160. except IndexError:
  161. if strict:
  162. self._fail_route()
  163. return self.END
  164. def _parse_template(self, has_content):
  165. """Parse a template at the head of the wikicode string."""
  166. reset = self._head
  167. context = contexts.TEMPLATE_NAME
  168. if has_content:
  169. context |= contexts.HAS_TEMPLATE
  170. try:
  171. template = self._parse(context)
  172. except BadRoute:
  173. self._head = reset
  174. raise
  175. self._emit_first(tokens.TemplateOpen())
  176. self._emit_all(template)
  177. self._emit(tokens.TemplateClose())
  178. def _parse_argument(self):
  179. """Parse an argument at the head of the wikicode string."""
  180. reset = self._head
  181. try:
  182. argument = self._parse(contexts.ARGUMENT_NAME)
  183. except BadRoute:
  184. self._head = reset
  185. raise
  186. self._emit_first(tokens.ArgumentOpen())
  187. self._emit_all(argument)
  188. self._emit(tokens.ArgumentClose())
  189. def _parse_template_or_argument(self):
  190. """Parse a template or argument at the head of the wikicode string."""
  191. self._head += 2
  192. braces = 2
  193. while self._read() == "{":
  194. self._head += 1
  195. braces += 1
  196. has_content = False
  197. self._push()
  198. while braces:
  199. if braces == 1:
  200. return self._emit_text_then_stack("{")
  201. if braces == 2:
  202. try:
  203. self._parse_template(has_content)
  204. except BadRoute:
  205. return self._emit_text_then_stack("{{")
  206. break
  207. try:
  208. self._parse_argument()
  209. braces -= 3
  210. except BadRoute:
  211. try:
  212. self._parse_template(has_content)
  213. braces -= 2
  214. except BadRoute:
  215. return self._emit_text_then_stack("{" * braces)
  216. if braces:
  217. has_content = True
  218. self._head += 1
  219. self._emit_all(self._pop())
  220. if self._context & contexts.FAIL_NEXT:
  221. self._context ^= contexts.FAIL_NEXT
  222. def _handle_template_param(self):
  223. """Handle a template parameter at the head of the string."""
  224. if self._context & contexts.TEMPLATE_NAME:
  225. if not self._context & (contexts.HAS_TEXT | contexts.HAS_TEMPLATE):
  226. self._fail_route()
  227. self._context ^= contexts.TEMPLATE_NAME
  228. elif self._context & contexts.TEMPLATE_PARAM_VALUE:
  229. self._context ^= contexts.TEMPLATE_PARAM_VALUE
  230. else:
  231. self._emit_all(self._pop(keep_context=True))
  232. self._context |= contexts.TEMPLATE_PARAM_KEY
  233. self._emit(tokens.TemplateParamSeparator())
  234. self._push(self._context)
  235. def _handle_template_param_value(self):
  236. """Handle a template parameter's value at the head of the string."""
  237. self._emit_all(self._pop(keep_context=True))
  238. self._context ^= contexts.TEMPLATE_PARAM_KEY
  239. self._context |= contexts.TEMPLATE_PARAM_VALUE
  240. self._emit(tokens.TemplateParamEquals())
  241. def _handle_template_end(self):
  242. """Handle the end of a template at the head of the string."""
  243. if self._context & contexts.TEMPLATE_NAME:
  244. if not self._context & (contexts.HAS_TEXT | contexts.HAS_TEMPLATE):
  245. self._fail_route()
  246. elif self._context & contexts.TEMPLATE_PARAM_KEY:
  247. self._emit_all(self._pop(keep_context=True))
  248. self._head += 1
  249. return self._pop()
  250. def _handle_argument_separator(self):
  251. """Handle the separator between an argument's name and default."""
  252. self._context ^= contexts.ARGUMENT_NAME
  253. self._context |= contexts.ARGUMENT_DEFAULT
  254. self._emit(tokens.ArgumentSeparator())
  255. def _handle_argument_end(self):
  256. """Handle the end of an argument at the head of the string."""
  257. self._head += 2
  258. return self._pop()
  259. def _parse_wikilink(self):
  260. """Parse an internal wikilink at the head of the wikicode string."""
  261. reset = self._head + 1
  262. self._head += 2
  263. try:
  264. # If the wikilink looks like an external link, parse it as such:
  265. link, extra, delta = self._really_parse_external_link(True)
  266. except BadRoute:
  267. self._head = reset + 1
  268. try:
  269. # Otherwise, actually parse it as a wikilink:
  270. wikilink = self._parse(contexts.WIKILINK_TITLE)
  271. except BadRoute:
  272. self._head = reset
  273. self._emit_text("[[")
  274. else:
  275. self._emit(tokens.WikilinkOpen())
  276. self._emit_all(wikilink)
  277. self._emit(tokens.WikilinkClose())
  278. else:
  279. if self._context & contexts.EXT_LINK_TITLE:
  280. # In this exceptional case, an external link that looks like a
  281. # wikilink inside of an external link is parsed as text:
  282. self._head = reset
  283. self._emit_text("[[")
  284. return
  285. self._emit_text("[")
  286. self._emit(tokens.ExternalLinkOpen(brackets=True))
  287. self._emit_all(link)
  288. self._emit(tokens.ExternalLinkClose())
  289. def _handle_wikilink_separator(self):
  290. """Handle the separator between a wikilink's title and its text."""
  291. self._context ^= contexts.WIKILINK_TITLE
  292. self._context |= contexts.WIKILINK_TEXT
  293. self._emit(tokens.WikilinkSeparator())
  294. def _handle_wikilink_end(self):
  295. """Handle the end of a wikilink at the head of the string."""
  296. self._head += 1
  297. return self._pop()
  298. def _parse_bracketed_uri_scheme(self):
  299. """Parse the URI scheme of a bracket-enclosed external link."""
  300. self._push(contexts.EXT_LINK_URI)
  301. if self._read() == self._read(1) == "/":
  302. self._emit_text("//")
  303. self._head += 2
  304. else:
  305. valid = "abcdefghijklmnopqrstuvwxyz0123456789+.-"
  306. all_valid = lambda: all(char in valid for char in self._read())
  307. scheme = ""
  308. while self._read() is not self.END and all_valid():
  309. scheme += self._read()
  310. self._emit_text(self._read())
  311. self._head += 1
  312. if self._read() != ":":
  313. self._fail_route()
  314. self._emit_text(":")
  315. self._head += 1
  316. slashes = self._read() == self._read(1) == "/"
  317. if slashes:
  318. self._emit_text("//")
  319. self._head += 2
  320. if not is_scheme(scheme, slashes):
  321. self._fail_route()
  322. def _parse_free_uri_scheme(self):
  323. """Parse the URI scheme of a free (no brackets) external link."""
  324. valid = "abcdefghijklmnopqrstuvwxyz0123456789+.-"
  325. scheme = []
  326. try:
  327. # We have to backtrack through the textbuffer looking for our
  328. # scheme since it was just parsed as text:
  329. for chunk in reversed(self._textbuffer):
  330. for char in reversed(chunk):
  331. if char.isspace() or char in self.MARKERS:
  332. raise StopIteration()
  333. if char not in valid:
  334. raise BadRoute()
  335. scheme.append(char)
  336. except StopIteration:
  337. pass
  338. scheme = "".join(reversed(scheme))
  339. slashes = self._read() == self._read(1) == "/"
  340. if not is_scheme(scheme, slashes):
  341. raise BadRoute()
  342. self._push(self._context | contexts.EXT_LINK_URI)
  343. self._emit_text(scheme)
  344. self._emit_text(":")
  345. if slashes:
  346. self._emit_text("//")
  347. self._head += 2
  348. def _handle_free_link_text(self, punct, tail, this):
  349. """Handle text in a free ext link, including trailing punctuation."""
  350. if "(" in this and ")" in punct:
  351. punct = punct[:-1] # ')' is not longer valid punctuation
  352. if this.endswith(punct):
  353. for i in range(len(this) - 1, 0, -1):
  354. if this[i - 1] not in punct:
  355. break
  356. else:
  357. i = 0
  358. stripped = this[:i]
  359. if stripped and tail:
  360. self._emit_text(tail)
  361. tail = ""
  362. tail += this[i:]
  363. this = stripped
  364. elif tail:
  365. self._emit_text(tail)
  366. tail = ""
  367. self._emit_text(this)
  368. return punct, tail
  369. def _is_free_link_end(self, this, next):
  370. """Return whether the current head is the end of a free link."""
  371. # Built from _parse()'s end sentinels:
  372. after, ctx = self._read(2), self._context
  373. equal_sign_contexts = contexts.TEMPLATE_PARAM_KEY | contexts.HEADING
  374. return (this in (self.END, "\n", "[", "]", "<", ">") or
  375. this == next == "'" or
  376. (this == "|" and ctx & contexts.TEMPLATE) or
  377. (this == "=" and ctx & equal_sign_contexts) or
  378. (this == next == "}" and ctx & contexts.TEMPLATE) or
  379. (this == next == after == "}" and ctx & contexts.ARGUMENT))
  380. def _really_parse_external_link(self, brackets):
  381. """Really parse an external link."""
  382. if brackets:
  383. self._parse_bracketed_uri_scheme()
  384. invalid = ("\n", " ", "]")
  385. else:
  386. self._parse_free_uri_scheme()
  387. invalid = ("\n", " ", "[", "]")
  388. punct = tuple(",;\.:!?)")
  389. if self._read() is self.END or self._read()[0] in invalid:
  390. self._fail_route()
  391. tail = ""
  392. while True:
  393. this, next = self._read(), self._read(1)
  394. if this == "&":
  395. if tail:
  396. self._emit_text(tail)
  397. tail = ""
  398. self._parse_entity()
  399. elif (this == "<" and next == "!" and self._read(2) ==
  400. self._read(3) == "-"):
  401. if tail:
  402. self._emit_text(tail)
  403. tail = ""
  404. self._parse_comment()
  405. elif not brackets and self._is_free_link_end(this, next):
  406. return self._pop(), tail, -1
  407. elif this is self.END or this == "\n":
  408. self._fail_route()
  409. elif this == next == "{" and self._can_recurse():
  410. if tail:
  411. self._emit_text(tail)
  412. tail = ""
  413. self._parse_template_or_argument()
  414. elif this == "]":
  415. return self._pop(), tail, 0
  416. elif " " in this:
  417. before, after = this.split(" ", 1)
  418. if brackets:
  419. self._emit_text(before)
  420. self._emit(tokens.ExternalLinkSeparator())
  421. if after:
  422. self._emit_text(after)
  423. self._context ^= contexts.EXT_LINK_URI
  424. self._context |= contexts.EXT_LINK_TITLE
  425. self._head += 1
  426. return self._parse(push=False), None, 0
  427. punct, tail = self._handle_free_link_text(punct, tail, before)
  428. return self._pop(), tail + " " + after, 0
  429. elif not brackets:
  430. punct, tail = self._handle_free_link_text(punct, tail, this)
  431. else:
  432. self._emit_text(this)
  433. self._head += 1
  434. def _remove_uri_scheme_from_textbuffer(self, scheme):
  435. """Remove the URI scheme of a new external link from the textbuffer."""
  436. length = len(scheme)
  437. while length:
  438. if length < len(self._textbuffer[-1]):
  439. self._textbuffer[-1] = self._textbuffer[-1][:-length]
  440. break
  441. length -= len(self._textbuffer[-1])
  442. self._textbuffer.pop()
  443. def _parse_external_link(self, brackets):
  444. """Parse an external link at the head of the wikicode string."""
  445. reset = self._head
  446. self._head += 1
  447. try:
  448. bad_context = self._context & contexts.NO_EXT_LINKS
  449. if bad_context or not self._can_recurse():
  450. raise BadRoute()
  451. link, extra, delta = self._really_parse_external_link(brackets)
  452. except BadRoute:
  453. self._head = reset
  454. if not brackets and self._context & contexts.DL_TERM:
  455. self._handle_dl_term()
  456. else:
  457. self._emit_text(self._read())
  458. else:
  459. if not brackets:
  460. scheme = link[0].text.split(":", 1)[0]
  461. self._remove_uri_scheme_from_textbuffer(scheme)
  462. self._emit(tokens.ExternalLinkOpen(brackets=brackets))
  463. self._emit_all(link)
  464. self._emit(tokens.ExternalLinkClose())
  465. self._head += delta
  466. if extra:
  467. self._emit_text(extra)
  468. def _parse_heading(self):
  469. """Parse a section heading at the head of the wikicode string."""
  470. self._global |= contexts.GL_HEADING
  471. reset = self._head
  472. self._head += 1
  473. best = 1
  474. while self._read() == "=":
  475. best += 1
  476. self._head += 1
  477. context = contexts.HEADING_LEVEL_1 << min(best - 1, 5)
  478. try:
  479. title, level = self._parse(context)
  480. except BadRoute:
  481. self._head = reset + best - 1
  482. self._emit_text("=" * best)
  483. else:
  484. self._emit(tokens.HeadingStart(level=level))
  485. if level < best:
  486. self._emit_text("=" * (best - level))
  487. self._emit_all(title)
  488. self._emit(tokens.HeadingEnd())
  489. finally:
  490. self._global ^= contexts.GL_HEADING
  491. def _handle_heading_end(self):
  492. """Handle the end of a section heading at the head of the string."""
  493. reset = self._head
  494. self._head += 1
  495. best = 1
  496. while self._read() == "=":
  497. best += 1
  498. self._head += 1
  499. current = int(log(self._context / contexts.HEADING_LEVEL_1, 2)) + 1
  500. level = min(current, min(best, 6))
  501. try: # Try to check for a heading closure after this one
  502. after, after_level = self._parse(self._context)
  503. except BadRoute:
  504. if level < best:
  505. self._emit_text("=" * (best - level))
  506. self._head = reset + best - 1
  507. return self._pop(), level
  508. else: # Found another closure
  509. self._emit_text("=" * best)
  510. self._emit_all(after)
  511. return self._pop(), after_level
  512. def _really_parse_entity(self):
  513. """Actually parse an HTML entity and ensure that it is valid."""
  514. self._emit(tokens.HTMLEntityStart())
  515. self._head += 1
  516. this = self._read(strict=True)
  517. if this == "#":
  518. numeric = True
  519. self._emit(tokens.HTMLEntityNumeric())
  520. self._head += 1
  521. this = self._read(strict=True)
  522. if this[0].lower() == "x":
  523. hexadecimal = True
  524. self._emit(tokens.HTMLEntityHex(char=this[0]))
  525. this = this[1:]
  526. if not this:
  527. self._fail_route()
  528. else:
  529. hexadecimal = False
  530. else:
  531. numeric = hexadecimal = False
  532. valid = "0123456789abcdefABCDEF" if hexadecimal else "0123456789"
  533. if not numeric and not hexadecimal:
  534. valid += "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
  535. if not all([char in valid for char in this]):
  536. self._fail_route()
  537. self._head += 1
  538. if self._read() != ";":
  539. self._fail_route()
  540. if numeric:
  541. test = int(this, 16) if hexadecimal else int(this)
  542. if test < 1 or test > 0x10FFFF:
  543. self._fail_route()
  544. else:
  545. if this not in htmlentities.entitydefs:
  546. self._fail_route()
  547. self._emit(tokens.Text(text=this))
  548. self._emit(tokens.HTMLEntityEnd())
  549. def _parse_entity(self):
  550. """Parse an HTML entity at the head of the wikicode string."""
  551. reset = self._head
  552. self._push()
  553. try:
  554. self._really_parse_entity()
  555. except BadRoute:
  556. self._head = reset
  557. self._emit_text(self._read())
  558. else:
  559. self._emit_all(self._pop())
  560. def _parse_comment(self):
  561. """Parse an HTML comment at the head of the wikicode string."""
  562. self._head += 4
  563. reset = self._head - 1
  564. self._push()
  565. while True:
  566. this = self._read()
  567. if this == self.END:
  568. self._pop()
  569. self._head = reset
  570. self._emit_text("<!--")
  571. return
  572. if this == self._read(1) == "-" and self._read(2) == ">":
  573. self._emit_first(tokens.CommentStart())
  574. self._emit(tokens.CommentEnd())
  575. self._emit_all(self._pop())
  576. self._head += 2
  577. if self._context & contexts.FAIL_NEXT:
  578. # _verify_safe() sets this flag while parsing a template
  579. # or link when it encounters what might be a comment -- we
  580. # must unset it to let _verify_safe() know it was correct:
  581. self._context ^= contexts.FAIL_NEXT
  582. return
  583. self._emit_text(this)
  584. self._head += 1
  585. def _push_tag_buffer(self, data):
  586. """Write a pending tag attribute from *data* to the stack."""
  587. if data.context & data.CX_QUOTED:
  588. self._emit_first(tokens.TagAttrQuote(char=data.quoter))
  589. self._emit_all(self._pop())
  590. buf = data.padding_buffer
  591. self._emit_first(tokens.TagAttrStart(pad_first=buf["first"],
  592. pad_before_eq=buf["before_eq"], pad_after_eq=buf["after_eq"]))
  593. self._emit_all(self._pop())
  594. for key in data.padding_buffer:
  595. data.padding_buffer[key] = ""
  596. def _handle_tag_space(self, data, text):
  597. """Handle whitespace (*text*) inside of an HTML open tag."""
  598. ctx = data.context
  599. end_of_value = ctx & data.CX_ATTR_VALUE and not ctx & (data.CX_QUOTED | data.CX_NOTE_QUOTE)
  600. if end_of_value or (ctx & data.CX_QUOTED and ctx & data.CX_NOTE_SPACE):
  601. self._push_tag_buffer(data)
  602. data.context = data.CX_ATTR_READY
  603. elif ctx & data.CX_NOTE_SPACE:
  604. data.context = data.CX_ATTR_READY
  605. elif ctx & data.CX_ATTR_NAME:
  606. data.context |= data.CX_NOTE_EQUALS
  607. data.padding_buffer["before_eq"] += text
  608. if ctx & data.CX_QUOTED and not ctx & data.CX_NOTE_SPACE:
  609. self._emit_text(text)
  610. elif data.context & data.CX_ATTR_READY:
  611. data.padding_buffer["first"] += text
  612. elif data.context & data.CX_ATTR_VALUE:
  613. data.padding_buffer["after_eq"] += text
  614. def _handle_tag_text(self, text):
  615. """Handle regular *text* inside of an HTML open tag."""
  616. next = self._read(1)
  617. if not self._can_recurse() or text not in self.MARKERS:
  618. self._emit_text(text)
  619. elif text == next == "{":
  620. self._parse_template_or_argument()
  621. elif text == next == "[":
  622. self._parse_wikilink()
  623. elif text == "<":
  624. self._parse_tag()
  625. else:
  626. self._emit_text(text)
  627. def _handle_tag_data(self, data, text):
  628. """Handle all sorts of *text* data inside of an HTML open tag."""
  629. for chunk in self.tag_splitter.split(text):
  630. if not chunk:
  631. continue
  632. if data.context & data.CX_NAME:
  633. if chunk in self.MARKERS or chunk.isspace():
  634. self._fail_route() # Tags must start with text, not spaces
  635. data.context = data.CX_NOTE_SPACE
  636. elif chunk.isspace():
  637. self._handle_tag_space(data, chunk)
  638. continue
  639. elif data.context & data.CX_NOTE_SPACE:
  640. if data.context & data.CX_QUOTED:
  641. data.context = data.CX_ATTR_VALUE
  642. self._pop()
  643. self._head = data.reset - 1 # Will be auto-incremented
  644. return # Break early
  645. self._fail_route()
  646. elif data.context & data.CX_ATTR_READY:
  647. data.context = data.CX_ATTR_NAME
  648. self._push(contexts.TAG_ATTR)
  649. elif data.context & data.CX_ATTR_NAME:
  650. if chunk == "=":
  651. data.context = data.CX_ATTR_VALUE | data.CX_NOTE_QUOTE
  652. self._emit(tokens.TagAttrEquals())
  653. continue
  654. if data.context & data.CX_NOTE_EQUALS:
  655. self._push_tag_buffer(data)
  656. data.context = data.CX_ATTR_NAME
  657. self._push(contexts.TAG_ATTR)
  658. else: # data.context & data.CX_ATTR_VALUE assured
  659. escaped = self._read(-1) == "\\" and self._read(-2) != "\\"
  660. if data.context & data.CX_NOTE_QUOTE:
  661. data.context ^= data.CX_NOTE_QUOTE
  662. if chunk in "'\"" and not escaped:
  663. data.context |= data.CX_QUOTED
  664. data.quoter = chunk
  665. data.reset = self._head
  666. self._push(self._context)
  667. continue
  668. elif data.context & data.CX_QUOTED:
  669. if chunk == data.quoter and not escaped:
  670. data.context |= data.CX_NOTE_SPACE
  671. continue
  672. self._handle_tag_text(chunk)
  673. def _handle_tag_close_open(self, data, token):
  674. """Handle the closing of a open tag (``<foo>``)."""
  675. if data.context & (data.CX_ATTR_NAME | data.CX_ATTR_VALUE):
  676. self._push_tag_buffer(data)
  677. self._emit(token(padding=data.padding_buffer["first"]))
  678. self._head += 1
  679. def _handle_tag_open_close(self):
  680. """Handle the opening of a closing tag (``</foo>``)."""
  681. self._emit(tokens.TagOpenClose())
  682. self._push(contexts.TAG_CLOSE)
  683. self._head += 1
  684. def _handle_tag_close_close(self):
  685. """Handle the ending of a closing tag (``</foo>``)."""
  686. strip = lambda tok: tok.text.rstrip().lower()
  687. closing = self._pop()
  688. if len(closing) != 1 or (not isinstance(closing[0], tokens.Text) or
  689. strip(closing[0]) != strip(self._stack[1])):
  690. self._fail_route()
  691. self._emit_all(closing)
  692. self._emit(tokens.TagCloseClose())
  693. return self._pop()
  694. def _handle_blacklisted_tag(self):
  695. """Handle the body of an HTML tag that is parser-blacklisted."""
  696. strip = lambda text: text.rstrip().lower()
  697. while True:
  698. this, next = self._read(), self._read(1)
  699. if this is self.END:
  700. self._fail_route()
  701. elif this == "<" and next == "/":
  702. self._head += 3
  703. if self._read() != ">" or (strip(self._read(-1)) !=
  704. strip(self._stack[1].text)):
  705. self._head -= 1
  706. self._emit_text("</")
  707. continue
  708. self._emit(tokens.TagOpenClose())
  709. self._emit_text(self._read(-1))
  710. self._emit(tokens.TagCloseClose())
  711. return self._pop()
  712. elif this == "&":
  713. self._parse_entity()
  714. else:
  715. self._emit_text(this)
  716. self._head += 1
  717. def _handle_single_only_tag_end(self):
  718. """Handle the end of an implicitly closing single-only HTML tag."""
  719. padding = self._stack.pop().padding
  720. self._emit(tokens.TagCloseSelfclose(padding=padding, implicit=True))
  721. self._head -= 1 # Offset displacement done by _handle_tag_close_open
  722. return self._pop()
  723. def _handle_single_tag_end(self):
  724. """Handle the stream end when inside a single-supporting HTML tag."""
  725. stack = self._stack
  726. # We need to find the index of the TagCloseOpen token corresponding to
  727. # the TagOpenOpen token located at index 0:
  728. depth = 1
  729. for index, token in enumerate(stack[2:], 2):
  730. if isinstance(token, tokens.TagOpenOpen):
  731. depth += 1
  732. elif isinstance(token, tokens.TagCloseOpen):
  733. depth -= 1
  734. if depth == 0:
  735. break
  736. else: # pragma: no cover (untestable/exceptional case)
  737. raise ParserError("_handle_single_tag_end() missed a TagCloseOpen")
  738. padding = stack[index].padding
  739. stack[index] = tokens.TagCloseSelfclose(padding=padding, implicit=True)
  740. return self._pop()
  741. def _really_parse_tag(self):
  742. """Actually parse an HTML tag, starting with the open (``<foo>``)."""
  743. data = _TagOpenData()
  744. self._push(contexts.TAG_OPEN)
  745. self._emit(tokens.TagOpenOpen())
  746. while True:
  747. this, next = self._read(), self._read(1)
  748. can_exit = (not data.context & (data.CX_QUOTED | data.CX_NAME) or
  749. data.context & data.CX_NOTE_SPACE)
  750. if this is self.END:
  751. if self._context & contexts.TAG_ATTR:
  752. if data.context & data.CX_QUOTED:
  753. # Unclosed attribute quote: reset, don't die
  754. data.context = data.CX_ATTR_VALUE
  755. self._pop()
  756. self._head = data.reset
  757. continue
  758. self._pop()
  759. self._fail_route()
  760. elif this == ">" and can_exit:
  761. self._handle_tag_close_open(data, tokens.TagCloseOpen)
  762. self._context = contexts.TAG_BODY
  763. if is_single_only(self._stack[1].text):
  764. return self._handle_single_only_tag_end()
  765. if is_parsable(self._stack[1].text):
  766. return self._parse(push=False)
  767. return self._handle_blacklisted_tag()
  768. elif this == "/" and next == ">" and can_exit:
  769. self._handle_tag_close_open(data, tokens.TagCloseSelfclose)
  770. return self._pop()
  771. else:
  772. self._handle_tag_data(data, this)
  773. self._head += 1
  774. def _handle_invalid_tag_start(self):
  775. """Handle the (possible) start of an implicitly closing single tag."""
  776. reset = self._head + 1
  777. self._head += 2
  778. try:
  779. if not is_single_only(self.tag_splitter.split(self._read())[0]):
  780. raise BadRoute()
  781. tag = self._really_parse_tag()
  782. except BadRoute:
  783. self._head = reset
  784. self._emit_text("</")
  785. else:
  786. tag[0].invalid = True # Set flag of TagOpenOpen
  787. self._emit_all(tag)
  788. def _parse_tag(self):
  789. """Parse an HTML tag at the head of the wikicode string."""
  790. reset = self._head
  791. self._head += 1
  792. try:
  793. tag = self._really_parse_tag()
  794. except BadRoute:
  795. self._head = reset
  796. self._emit_text("<")
  797. else:
  798. self._emit_all(tag)
  799. def _emit_style_tag(self, tag, markup, body):
  800. """Write the body of a tag and the tokens that should surround it."""
  801. self._emit(tokens.TagOpenOpen(wiki_markup=markup))
  802. self._emit_text(tag)
  803. self._emit(tokens.TagCloseOpen())
  804. self._emit_all(body)
  805. self._emit(tokens.TagOpenClose())
  806. self._emit_text(tag)
  807. self._emit(tokens.TagCloseClose())
  808. def _parse_italics(self):
  809. """Parse wiki-style italics."""
  810. reset = self._head
  811. try:
  812. stack = self._parse(contexts.STYLE_ITALICS)
  813. except BadRoute as route:
  814. self._head = reset
  815. if route.context & contexts.STYLE_PASS_AGAIN:
  816. new_ctx = contexts.STYLE_ITALICS | contexts.STYLE_SECOND_PASS
  817. stack = self._parse(new_ctx)
  818. else:
  819. return self._emit_text("''")
  820. self._emit_style_tag("i", "''", stack)
  821. def _parse_bold(self):
  822. """Parse wiki-style bold."""
  823. reset = self._head
  824. try:
  825. stack = self._parse(contexts.STYLE_BOLD)
  826. except BadRoute:
  827. self._head = reset
  828. if self._context & contexts.STYLE_SECOND_PASS:
  829. self._emit_text("'")
  830. return True
  831. elif self._context & contexts.STYLE_ITALICS:
  832. self._context |= contexts.STYLE_PASS_AGAIN
  833. self._emit_text("'''")
  834. else:
  835. self._emit_text("'")
  836. self._parse_italics()
  837. else:
  838. self._emit_style_tag("b", "'''", stack)
  839. def _parse_italics_and_bold(self):
  840. """Parse wiki-style italics and bold together (i.e., five ticks)."""
  841. reset = self._head
  842. try:
  843. stack = self._parse(contexts.STYLE_BOLD)
  844. except BadRoute:
  845. self._head = reset
  846. try:
  847. stack = self._parse(contexts.STYLE_ITALICS)
  848. except BadRoute:
  849. self._head = reset
  850. self._emit_text("'''''")
  851. else:
  852. reset = self._head
  853. try:
  854. stack2 = self._parse(contexts.STYLE_BOLD)
  855. except BadRoute:
  856. self._head = reset
  857. self._emit_text("'''")
  858. self._emit_style_tag("i", "''", stack)
  859. else:
  860. self._push()
  861. self._emit_style_tag("i", "''", stack)
  862. self._emit_all(stack2)
  863. self._emit_style_tag("b", "'''", self._pop())
  864. else:
  865. reset = self._head
  866. try:
  867. stack2 = self._parse(contexts.STYLE_ITALICS)
  868. except BadRoute:
  869. self._head = reset
  870. self._emit_text("''")
  871. self._emit_style_tag("b", "'''", stack)
  872. else:
  873. self._push()
  874. self._emit_style_tag("b", "'''", stack)
  875. self._emit_all(stack2)
  876. self._emit_style_tag("i", "''", self._pop())
  877. def _parse_style(self):
  878. """Parse wiki-style formatting (``''``/``'''`` for italics/bold)."""
  879. self._head += 2
  880. ticks = 2
  881. while self._read() == "'":
  882. self._head += 1
  883. ticks += 1
  884. italics = self._context & contexts.STYLE_ITALICS
  885. bold = self._context & contexts.STYLE_BOLD
  886. if ticks > 5:
  887. self._emit_text("'" * (ticks - 5))
  888. ticks = 5
  889. elif ticks == 4:
  890. self._emit_text("'")
  891. ticks = 3
  892. if (italics and ticks in (2, 5)) or (bold and ticks in (3, 5)):
  893. if ticks == 5:
  894. self._head -= 3 if italics else 2
  895. return self._pop()
  896. elif not self._can_recurse():
  897. if ticks == 3:
  898. if self._context & contexts.STYLE_SECOND_PASS:
  899. self._emit_text("'")
  900. return self._pop()
  901. if self._context & contexts.STYLE_ITALICS:
  902. self._context |= contexts.STYLE_PASS_AGAIN
  903. self._emit_text("'" * ticks)
  904. elif ticks == 2:
  905. self._parse_italics()
  906. elif ticks == 3:
  907. if self._parse_bold():
  908. return self._pop()
  909. else: # ticks == 5
  910. self._parse_italics_and_bold()
  911. self._head -= 1
  912. def _handle_list_marker(self):
  913. """Handle a list marker at the head (``#``, ``*``, ``;``, ``:``)."""
  914. markup = self._read()
  915. if markup == ";":
  916. self._context |= contexts.DL_TERM
  917. self._emit(tokens.TagOpenOpen(wiki_markup=markup))
  918. self._emit_text(get_html_tag(markup))
  919. self._emit(tokens.TagCloseSelfclose())
  920. def _handle_list(self):
  921. """Handle a wiki-style list (``#``, ``*``, ``;``, ``:``)."""
  922. self._handle_list_marker()
  923. while self._read(1) in ("#", "*", ";", ":"):
  924. self._head += 1
  925. self._handle_list_marker()
  926. def _handle_hr(self):
  927. """Handle a wiki-style horizontal rule (``----``) in the string."""
  928. length = 4
  929. self._head += 3
  930. while self._read(1) == "-":
  931. length += 1
  932. self._head += 1
  933. self._emit(tokens.TagOpenOpen(wiki_markup="-" * length))
  934. self._emit_text("hr")
  935. self._emit(tokens.TagCloseSelfclose())
  936. def _handle_dl_term(self):
  937. """Handle the term in a description list (``foo`` in ``;foo:bar``)."""
  938. self._context ^= contexts.DL_TERM
  939. if self._read() == ":":
  940. self._handle_list_marker()
  941. else:
  942. self._emit_text("\n")
  943. def _emit_table_tag(self, open_open_markup, tag, style, padding,
  944. close_open_markup, contents, open_close_markup):
  945. """Emit a table tag."""
  946. self._emit(tokens.TagOpenOpen(wiki_markup=open_open_markup))
  947. self._emit_text(tag)
  948. if style:
  949. self._emit_all(style)
  950. if close_open_markup:
  951. self._emit(tokens.TagCloseOpen(wiki_markup=close_open_markup,
  952. padding=padding))
  953. else:
  954. self._emit(tokens.TagCloseOpen(padding=padding))
  955. if contents:
  956. self._emit_all(contents)
  957. self._emit(tokens.TagOpenClose(wiki_markup=open_close_markup))
  958. self._emit_text(tag)
  959. self._emit(tokens.TagCloseClose())
  960. def _handle_table_style(self, end_token):
  961. """Handle style attributes for a table until ``end_token``."""
  962. data = _TagOpenData()
  963. data.context = _TagOpenData.CX_ATTR_READY
  964. while True:
  965. this = self._read()
  966. can_exit = (not data.context & data.CX_QUOTED or
  967. data.context & data.CX_NOTE_SPACE)
  968. if this == end_token and can_exit:
  969. if data.context & (data.CX_ATTR_NAME | data.CX_ATTR_VALUE):
  970. self._push_tag_buffer(data)
  971. if this.isspace():
  972. data.padding_buffer["first"] += this
  973. return data.padding_buffer["first"]
  974. elif this is self.END or this == end_token:
  975. if self._context & contexts.TAG_ATTR:
  976. if data.context & data.CX_QUOTED:
  977. # Unclosed attribute quote: reset, don't die
  978. data.context = data.CX_ATTR_VALUE
  979. self._pop()
  980. self._head = data.reset
  981. continue
  982. self._pop()
  983. self._fail_route()
  984. else:
  985. self._handle_tag_data(data, this)
  986. self._head += 1
  987. def _parse_table(self):
  988. """Parse a wikicode table by starting with the first line."""
  989. reset = self._head + 1
  990. self._head += 2
  991. self._push(contexts.TABLE_OPEN)
  992. try:
  993. padding = self._handle_table_style("\n")
  994. except BadRoute:
  995. self._head = reset
  996. self._emit_text("{|")
  997. return
  998. style = self._pop()
  999. self._head += 1
  1000. try:
  1001. table = self._parse(contexts.TABLE_OPEN)
  1002. except BadRoute:
  1003. self._head = reset
  1004. self._emit_text("{|")
  1005. return
  1006. self._emit_table_tag("{|", "table", style, padding, None, table, "|}")
  1007. # Offset displacement done by _parse():
  1008. self._head -= 1
  1009. def _handle_table_row(self):
  1010. """Parse as style until end of the line, then continue."""
  1011. self._head += 2
  1012. if not self._can_recurse():
  1013. self._emit_text("|-")
  1014. self._head -= 1
  1015. return
  1016. self._push(contexts.TABLE_OPEN | contexts.TABLE_ROW_OPEN)
  1017. try:
  1018. padding = self._handle_table_style("\n")
  1019. except BadRoute:
  1020. self._pop()
  1021. raise
  1022. style = self._pop()
  1023. # Don't parse the style separator:
  1024. self._head += 1
  1025. row = self._parse(contexts.TABLE_OPEN | contexts.TABLE_ROW_OPEN)
  1026. self._emit_table_tag("|-", "tr", style, padding, None, row, "")
  1027. # Offset displacement done by parse():
  1028. self._head -= 1
  1029. def _handle_table_cell(self, markup, tag, line_context):
  1030. """Parse as normal syntax unless we hit a style marker, then parse
  1031. style as HTML attributes and the remainder as normal syntax."""
  1032. old_context = self._context
  1033. padding, style = "", None
  1034. self._head += len(markup)
  1035. reset = self._head
  1036. if not self._can_recurse():
  1037. self._emit_text(markup)
  1038. self._head -= 1
  1039. return
  1040. cell = self._parse(contexts.TABLE_OPEN | contexts.TABLE_CELL_OPEN |
  1041. line_context | contexts.TABLE_CELL_STYLE)
  1042. cell_context = self._context
  1043. self._context = old_context
  1044. reset_for_style = cell_context & contexts.TABLE_CELL_STYLE
  1045. if reset_for_style:
  1046. self._head = reset
  1047. self._push(contexts.TABLE_OPEN | contexts.TABLE_CELL_OPEN |
  1048. line_context)
  1049. padding = self._handle_table_style("|")
  1050. style = self._pop()
  1051. # Don't parse the style separator:
  1052. self._head += 1
  1053. cell = self._parse(contexts.TABLE_OPEN | contexts.TABLE_CELL_OPEN |
  1054. line_context)
  1055. cell_context = self._context
  1056. self._context = old_context
  1057. close_open_markup = "|" if reset_for_style else None
  1058. self._emit_table_tag(markup, tag, style, padding, close_open_markup,
  1059. cell, "")
  1060. # Keep header/cell line contexts:
  1061. self._context |= cell_context & (contexts.TABLE_TH_LINE |
  1062. contexts.TABLE_TD_LINE)
  1063. # Offset displacement done by parse():
  1064. self._head -= 1
  1065. def _handle_table_cell_end(self, reset_for_style=False):
  1066. """Returns the current context, with the TABLE_CELL_STYLE flag set if
  1067. it is necessary to reset and parse style attributes."""
  1068. if reset_for_style:
  1069. self._context |= contexts.TABLE_CELL_STYLE
  1070. else:
  1071. self._context &= ~contexts.TABLE_CELL_STYLE
  1072. return self._pop(keep_context=True)
  1073. def _handle_table_row_end(self):
  1074. """Return the stack in order to handle the table row end."""
  1075. return self._pop()
  1076. def _handle_table_end(self):
  1077. """Return the stack in order to handle the table end."""
  1078. self._head += 2
  1079. return self._pop()
  1080. def _handle_end(self):
  1081. """Handle the end of the stream of wikitext."""
  1082. if self._context & contexts.FAIL:
  1083. if self._context & contexts.TAG_BODY:
  1084. if is_single(self._stack[1].text):
  1085. return self._handle_single_tag_end()
  1086. if self._context & contexts.TABLE_CELL_OPEN:
  1087. self._pop()
  1088. if self._context & contexts.DOUBLE:
  1089. self._pop()
  1090. self._fail_route()
  1091. return self._pop()
  1092. def _verify_safe(self, this):
  1093. """Make sure we are not trying to write an invalid character."""
  1094. context = self._context
  1095. if context & contexts.FAIL_NEXT:
  1096. return False
  1097. if context & contexts.WIKILINK_TITLE:
  1098. if this == "]" or this == "{":
  1099. self._context |= contexts.FAIL_NEXT
  1100. elif this == "\n" or this == "[" or this == "}" or this == ">":
  1101. return False
  1102. elif this == "<":
  1103. if self._read(1) == "!":
  1104. self._context |= contexts.FAIL_NEXT
  1105. else:
  1106. return False
  1107. return True
  1108. elif context & contexts.EXT_LINK_TITLE:
  1109. return this != "\n"
  1110. elif context & contexts.TEMPLATE_NAME:
  1111. if this == "{":
  1112. self._context |= contexts.HAS_TEMPLATE | contexts.FAIL_NEXT
  1113. return True
  1114. if this == "}" or (this == "<" and self._read(1) == "!"):
  1115. self._context |= contexts.FAIL_NEXT
  1116. return True
  1117. if this == "[" or this == "]" or this == "<" or this == ">":
  1118. return False
  1119. if this == "|":
  1120. return True
  1121. if context & contexts.HAS_TEXT:
  1122. if context & contexts.FAIL_ON_TEXT:
  1123. if this is self.END or not this.isspace():
  1124. return False
  1125. elif this == "\n":
  1126. self._context |= contexts.FAIL_ON_TEXT
  1127. elif this is self.END or not this.isspace():
  1128. self._context |= contexts.HAS_TEXT
  1129. return True
  1130. elif context & contexts.TAG_CLOSE:
  1131. return this != "<"
  1132. else:
  1133. if context & contexts.FAIL_ON_EQUALS:
  1134. if this == "=":
  1135. return False
  1136. elif context & contexts.FAIL_ON_LBRACE:
  1137. if this == "{" or (self._read(-1) == self._read(-2) == "{"):
  1138. if context & contexts.TEMPLATE:
  1139. self._context |= contexts.FAIL_ON_EQUALS
  1140. else:
  1141. self._context |= contexts.FAIL_NEXT
  1142. return True
  1143. self._context ^= contexts.FAIL_ON_LBRACE
  1144. elif context & contexts.FAIL_ON_RBRACE:
  1145. if this == "}":
  1146. self._context |= contexts.FAIL_NEXT
  1147. return True
  1148. self._context ^= contexts.FAIL_ON_RBRACE
  1149. elif this == "{":
  1150. self._context |= contexts.FAIL_ON_LBRACE
  1151. elif this == "}":
  1152. self._context |= contexts.FAIL_ON_RBRACE
  1153. return True
  1154. def _parse(self, context=0, push=True):
  1155. """Parse the wikicode string, using *context* for when to stop."""
  1156. if push:
  1157. self._push(context)
  1158. while True:
  1159. this = self._read()
  1160. if self._context & contexts.UNSAFE:
  1161. if not self._verify_safe(this):
  1162. if self._context & contexts.DOUBLE:
  1163. self._pop()
  1164. self._fail_route()
  1165. if this not in self.MARKERS:
  1166. self._emit_text(this)
  1167. self._head += 1
  1168. continue
  1169. if this is self.END:
  1170. return self._handle_end()
  1171. next = self._read(1)
  1172. if this == next == "{":
  1173. if self._can_recurse():
  1174. self._parse_template_or_argument()
  1175. else:
  1176. self._emit_text("{")
  1177. elif this == "|" and self._context & contexts.TEMPLATE:
  1178. self._handle_template_param()
  1179. elif this == "=" and self._context & contexts.TEMPLATE_PARAM_KEY:
  1180. self._handle_template_param_value()
  1181. elif this == next == "}" and self._context & contexts.TEMPLATE:
  1182. return self._handle_template_end()
  1183. elif this == "|" and self._context & contexts.ARGUMENT_NAME:
  1184. self._handle_argument_separator()
  1185. elif this == next == "}" and self._context & contexts.ARGUMENT:
  1186. if self._read(2) == "}":
  1187. return self._handle_argument_end()
  1188. else:
  1189. self._emit_text("}")
  1190. elif this == next == "[" and self._can_recurse():
  1191. if not self._context & contexts.NO_WIKILINKS:
  1192. self._parse_wikilink()
  1193. else:
  1194. self._emit_text("[")
  1195. elif this == "|" and self._context & contexts.WIKILINK_TITLE:
  1196. self._handle_wikilink_separator()
  1197. elif this == next == "]" and self._context & contexts.WIKILINK:
  1198. return self._handle_wikilink_end()
  1199. elif this == "[":
  1200. self._parse_external_link(True)
  1201. elif this == ":" and self._read(-1) not in self.MARKERS:
  1202. self._parse_external_link(False)
  1203. elif this == "]" and self._context & contexts.EXT_LINK_TITLE:
  1204. return self._pop()
  1205. elif this == "=" and not self._global & contexts.GL_HEADING:
  1206. if self._read(-1) in ("\n", self.START):
  1207. self._parse_heading()
  1208. else:
  1209. self._emit_text("=")
  1210. elif this == "=" and self._context & contexts.HEADING:
  1211. return self._handle_heading_end()
  1212. elif this == "\n" and self._context & contexts.HEADING:
  1213. self._fail_route()
  1214. elif this == "&":
  1215. self._parse_entity()
  1216. elif this == "<" and next == "!":
  1217. if self._read(2) == self._read(3) == "-":
  1218. self._parse_comment()
  1219. else:
  1220. self._emit_text(this)
  1221. elif this == "<" and next == "/" and self._read(2) is not self.END:
  1222. if self._context & contexts.TAG_BODY:
  1223. self._handle_tag_open_close()
  1224. else:
  1225. self._handle_invalid_tag_start()
  1226. elif this == "<" and not self._context & contexts.TAG_CLOSE:
  1227. if self._can_recurse():
  1228. self._parse_tag()
  1229. else:
  1230. self._emit_text("<")
  1231. elif this == ">" and self._context & contexts.TAG_CLOSE:
  1232. return self._handle_tag_close_close()
  1233. elif this == next == "'" and not self._skip_style_tags:
  1234. result = self._parse_style()
  1235. if result is not None:
  1236. return result
  1237. elif self._read(-1) in ("\n", self.START) and this in ("#", "*", ";", ":"):
  1238. self._handle_list()
  1239. elif self._read(-1) in ("\n", self.START) and this == next == self._read(2) == self._read(3) == "-":
  1240. self._handle_hr()
  1241. elif this in ("\n", ":") and self._context & contexts.DL_TERM:
  1242. self._handle_dl_term()
  1243. if this == "\n":
  1244. # Kill potential table contexts
  1245. self._context &= ~contexts.TABLE_CELL_LINE_CONTEXTS
  1246. # Start of table parsing
  1247. elif this == "{" and next == "|" and (self._read(-1) in ("\n", self.START) or
  1248. (self._read(-2) in ("\n", self.START) and self._read(-1).isspace())):
  1249. if self._can_recurse():
  1250. self._parse_table()
  1251. else:
  1252. self._emit_text("{|")
  1253. elif self._context & contexts.TABLE_OPEN:
  1254. if this == next == "|" and self._context & contexts.TABLE_TD_LINE:
  1255. if self._context & contexts.TABLE_CELL_OPEN:
  1256. return self._handle_table_cell_end()
  1257. self._handle_table_cell("||", "td", contexts.TABLE_TD_LINE)
  1258. elif this == next == "|" and self._context & contexts.TABLE_TH_LINE:
  1259. if self._context & contexts.TABLE_CELL_OPEN:
  1260. return self._handle_table_cell_end()
  1261. self._handle_table_cell("||", "th", contexts.TABLE_TH_LINE)
  1262. elif this == next == "!" and self._context & contexts.TABLE_TH_LINE:
  1263. if self._context & contexts.TABLE_CELL_OPEN:
  1264. return self._handle_table_cell_end()
  1265. self._handle_table_cell("!!", "th", contexts.TABLE_TH_LINE)
  1266. elif this == "|" and self._context & contexts.TABLE_CELL_STYLE:
  1267. return self._handle_table_cell_end(reset_for_style=True)
  1268. # on newline, clear out cell line contexts
  1269. elif this == "\n" and self._context & contexts.TABLE_CELL_LINE_CONTEXTS:
  1270. self._context &= ~contexts.TABLE_CELL_LINE_CONTEXTS
  1271. self._emit_text(this)
  1272. elif (self._read(-1) in ("\n", self.START) or
  1273. (self._read(-2) in ("\n", self.START) and self._read(-1).isspace())):
  1274. if this == "|" and next == "}":
  1275. if self._context & contexts.TABLE_CELL_OPEN:
  1276. return self._handle_table_cell_end()
  1277. if self._context & contexts.TABLE_ROW_OPEN:
  1278. return self._handle_table_row_end()
  1279. return self._handle_table_end()
  1280. elif this == "|" and next == "-":
  1281. if self._context & contexts.TABLE_CELL_OPEN:
  1282. return self._handle_table_cell_end()
  1283. if self._context & contexts.TABLE_ROW_OPEN:
  1284. return self._handle_table_row_end()
  1285. self._handle_table_row()
  1286. elif this == "|":
  1287. if self._context & contexts.TABLE_CELL_OPEN:
  1288. return self._handle_table_cell_end()
  1289. self._handle_table_cell("|", "td", contexts.TABLE_TD_LINE)
  1290. elif this == "!":
  1291. if self._context & contexts.TABLE_CELL_OPEN:
  1292. return self._handle_table_cell_end()
  1293. self._handle_table_cell("!", "th", contexts.TABLE_TH_LINE)
  1294. else:
  1295. self._emit_text(this)
  1296. else:
  1297. self._emit_text(this)
  1298. else:
  1299. self._emit_text(this)
  1300. self._head += 1
  1301. def tokenize(self, text, context=0, skip_style_tags=False):
  1302. """Build a list of tokens from a string of wikicode and return it."""
  1303. self._skip_style_tags = skip_style_tags
  1304. split = self.regex.split(text)
  1305. self._text = [segment for segment in split if segment]
  1306. self._head = self._global = self._depth = self._cycles = 0
  1307. try:
  1308. tokens = self._parse(context)
  1309. except BadRoute: # pragma: no cover (untestable/exceptional case)
  1310. raise ParserError("Python tokenizer exited with BadRoute")
  1311. if self._stacks: # pragma: no cover (untestable/exceptional case)
  1312. err = "Python tokenizer exited with non-empty token stack"
  1313. raise ParserError(err)
  1314. return tokens