A Python parser for MediaWiki wikicode https://mwparserfromhell.readthedocs.io/
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 

1555 lines
59 KiB

  1. # Copyright (C) 2012-2021 Ben Kurtovic <ben.kurtovic@gmail.com>
  2. #
  3. # Permission is hereby granted, free of charge, to any person obtaining a copy
  4. # of this software and associated documentation files (the "Software"), to deal
  5. # in the Software without restriction, including without limitation the rights
  6. # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  7. # copies of the Software, and to permit persons to whom the Software is
  8. # furnished to do so, subject to the following conditions:
  9. #
  10. # The above copyright notice and this permission notice shall be included in
  11. # all copies or substantial portions of the Software.
  12. #
  13. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  14. # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  15. # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  16. # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  17. # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  18. # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
  19. # SOFTWARE.
  20. import html.entities as htmlentities
  21. from math import log
  22. import re
  23. from . import contexts, tokens
  24. from .errors import ParserError
  25. from ..definitions import (
  26. get_html_tag,
  27. is_parsable,
  28. is_single,
  29. is_single_only,
  30. is_scheme,
  31. )
  32. __all__ = ["Tokenizer"]
  33. class BadRoute(Exception):
  34. """Raised internally when the current tokenization route is invalid."""
  35. def __init__(self, context=0):
  36. super().__init__()
  37. self.context = context
  38. class _TagOpenData:
  39. """Stores data about an HTML open tag, like ``<ref name="foo">``."""
  40. CX_NAME = 1 << 0
  41. CX_ATTR_READY = 1 << 1
  42. CX_ATTR_NAME = 1 << 2
  43. CX_ATTR_VALUE = 1 << 3
  44. CX_QUOTED = 1 << 4
  45. CX_NOTE_SPACE = 1 << 5
  46. CX_NOTE_EQUALS = 1 << 6
  47. CX_NOTE_QUOTE = 1 << 7
  48. def __init__(self):
  49. self.context = self.CX_NAME
  50. self.padding_buffer = {"first": "", "before_eq": "", "after_eq": ""}
  51. self.quoter = None
  52. self.reset = 0
  53. class Tokenizer:
  54. """Creates a list of tokens from a string of wikicode."""
  55. USES_C = False
  56. START = object()
  57. END = object()
  58. MARKERS = [
  59. "{",
  60. "}",
  61. "[",
  62. "]",
  63. "<",
  64. ">",
  65. "|",
  66. "=",
  67. "&",
  68. "'",
  69. '"',
  70. "#",
  71. "*",
  72. ";",
  73. ":",
  74. "/",
  75. "-",
  76. "!",
  77. "\n",
  78. START,
  79. END,
  80. ]
  81. URISCHEME = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+.-"
  82. MAX_DEPTH = 40
  83. regex = re.compile(r"([{}\[\]<>|=&'#*;:/\\\"\-!\n])", flags=re.IGNORECASE)
  84. tag_splitter = re.compile(r"([\s\"\'\\]+)")
  85. def __init__(self):
  86. self._text = None
  87. self._head = 0
  88. self._stacks = []
  89. self._global = 0
  90. self._depth = 0
  91. self._bad_routes = set()
  92. self._skip_style_tags = False
  93. @property
  94. def _stack(self):
  95. """The current token stack."""
  96. return self._stacks[-1][0]
  97. @property
  98. def _context(self):
  99. """The current token context."""
  100. return self._stacks[-1][1]
  101. @_context.setter
  102. def _context(self, value):
  103. self._stacks[-1][1] = value
  104. @property
  105. def _textbuffer(self):
  106. """The current textbuffer."""
  107. return self._stacks[-1][2]
  108. @_textbuffer.setter
  109. def _textbuffer(self, value):
  110. self._stacks[-1][2] = value
  111. @property
  112. def _stack_ident(self):
  113. """An identifier for the current stack.
  114. This is based on the starting head position and context. Stacks with
  115. the same identifier are always parsed in the same way. This can be used
  116. to cache intermediate parsing info.
  117. """
  118. return self._stacks[-1][3]
  119. def _push(self, context=0):
  120. """Add a new token stack, context, and textbuffer to the list."""
  121. new_ident = (self._head, context)
  122. if new_ident in self._bad_routes:
  123. raise BadRoute(context)
  124. self._stacks.append([[], context, [], new_ident])
  125. self._depth += 1
  126. def _push_textbuffer(self):
  127. """Push the textbuffer onto the stack as a Text node and clear it."""
  128. if self._textbuffer:
  129. self._stack.append(tokens.Text(text="".join(self._textbuffer)))
  130. self._textbuffer = []
  131. def _pop(self, keep_context=False):
  132. """Pop the current stack/context/textbuffer, returning the stack.
  133. If *keep_context* is ``True``, then we will replace the underlying
  134. stack's context with the current stack's.
  135. """
  136. self._push_textbuffer()
  137. self._depth -= 1
  138. if keep_context:
  139. context = self._context
  140. stack = self._stacks.pop()[0]
  141. self._context = context
  142. return stack
  143. return self._stacks.pop()[0]
  144. def _can_recurse(self):
  145. """Return whether or not our max recursion depth has been exceeded."""
  146. return self._depth < self.MAX_DEPTH
  147. def _memoize_bad_route(self):
  148. """Remember that the current route (head + context at push) is invalid.
  149. This will be noticed when calling _push with the same head and context,
  150. and the route will be failed immediately.
  151. """
  152. self._bad_routes.add(self._stack_ident)
  153. def _fail_route(self):
  154. """Fail the current tokenization route.
  155. Discards the current stack/context/textbuffer and raises
  156. :exc:`.BadRoute`.
  157. """
  158. context = self._context
  159. self._memoize_bad_route()
  160. self._pop()
  161. raise BadRoute(context)
  162. def _emit(self, token):
  163. """Write a token to the end of the current token stack."""
  164. self._push_textbuffer()
  165. self._stack.append(token)
  166. def _emit_first(self, token):
  167. """Write a token to the beginning of the current token stack."""
  168. self._push_textbuffer()
  169. self._stack.insert(0, token)
  170. def _emit_text(self, text):
  171. """Write text to the current textbuffer."""
  172. self._textbuffer.append(text)
  173. def _emit_all(self, tokenlist):
  174. """Write a series of tokens to the current stack at once."""
  175. if tokenlist and isinstance(tokenlist[0], tokens.Text):
  176. self._emit_text(tokenlist.pop(0).text)
  177. self._push_textbuffer()
  178. self._stack.extend(tokenlist)
  179. def _emit_text_then_stack(self, text):
  180. """Pop the current stack, write *text*, and then write the stack."""
  181. stack = self._pop()
  182. self._emit_text(text)
  183. if stack:
  184. self._emit_all(stack)
  185. self._head -= 1
  186. def _read(self, delta=0, wrap=False, strict=False):
  187. """Read the value at a relative point in the wikicode.
  188. The value is read from :attr:`self._head <_head>` plus the value of
  189. *delta* (which can be negative). If *wrap* is ``False``, we will not
  190. allow attempts to read from the end of the string if ``self._head +
  191. delta`` is negative. If *strict* is ``True``, the route will be failed
  192. (with :meth:`_fail_route`) if we try to read from past the end of the
  193. string; otherwise, :attr:`self.END <END>` is returned. If we try to
  194. read from before the start of the string, :attr:`self.START <START>` is
  195. returned.
  196. """
  197. index = self._head + delta
  198. if index < 0 and (not wrap or abs(index) > len(self._text)):
  199. return self.START
  200. try:
  201. return self._text[index]
  202. except IndexError:
  203. if strict:
  204. self._fail_route()
  205. return self.END
  206. def _parse_template(self, has_content):
  207. """Parse a template at the head of the wikicode string."""
  208. reset = self._head
  209. context = contexts.TEMPLATE_NAME
  210. if has_content:
  211. context |= contexts.HAS_TEMPLATE
  212. try:
  213. template = self._parse(context)
  214. except BadRoute:
  215. self._head = reset
  216. raise
  217. self._emit_first(tokens.TemplateOpen())
  218. self._emit_all(template)
  219. self._emit(tokens.TemplateClose())
  220. def _parse_argument(self):
  221. """Parse an argument at the head of the wikicode string."""
  222. reset = self._head
  223. try:
  224. argument = self._parse(contexts.ARGUMENT_NAME)
  225. except BadRoute:
  226. self._head = reset
  227. raise
  228. self._emit_first(tokens.ArgumentOpen())
  229. self._emit_all(argument)
  230. self._emit(tokens.ArgumentClose())
  231. def _parse_template_or_argument(self):
  232. """Parse a template or argument at the head of the wikicode string."""
  233. self._head += 2
  234. braces = 2
  235. while self._read() == "{":
  236. self._head += 1
  237. braces += 1
  238. has_content = False
  239. self._push()
  240. while braces:
  241. if braces == 1:
  242. return self._emit_text_then_stack("{")
  243. if braces == 2:
  244. try:
  245. self._parse_template(has_content)
  246. except BadRoute:
  247. return self._emit_text_then_stack("{{")
  248. break
  249. try:
  250. self._parse_argument()
  251. braces -= 3
  252. except BadRoute:
  253. try:
  254. self._parse_template(has_content)
  255. braces -= 2
  256. except BadRoute:
  257. return self._emit_text_then_stack("{" * braces)
  258. if braces:
  259. has_content = True
  260. self._head += 1
  261. self._emit_all(self._pop())
  262. if self._context & contexts.FAIL_NEXT:
  263. self._context ^= contexts.FAIL_NEXT
  264. def _handle_template_param(self):
  265. """Handle a template parameter at the head of the string."""
  266. if self._context & contexts.TEMPLATE_NAME:
  267. if not self._context & (contexts.HAS_TEXT | contexts.HAS_TEMPLATE):
  268. self._fail_route()
  269. self._context ^= contexts.TEMPLATE_NAME
  270. elif self._context & contexts.TEMPLATE_PARAM_VALUE:
  271. self._context ^= contexts.TEMPLATE_PARAM_VALUE
  272. else:
  273. self._emit_all(self._pop())
  274. self._context |= contexts.TEMPLATE_PARAM_KEY
  275. self._emit(tokens.TemplateParamSeparator())
  276. self._push(self._context)
  277. def _handle_template_param_value(self):
  278. """Handle a template parameter's value at the head of the string."""
  279. self._emit_all(self._pop())
  280. self._context ^= contexts.TEMPLATE_PARAM_KEY
  281. self._context |= contexts.TEMPLATE_PARAM_VALUE
  282. self._emit(tokens.TemplateParamEquals())
  283. def _handle_template_end(self):
  284. """Handle the end of a template at the head of the string."""
  285. if self._context & contexts.TEMPLATE_NAME:
  286. if not self._context & (contexts.HAS_TEXT | contexts.HAS_TEMPLATE):
  287. self._fail_route()
  288. elif self._context & contexts.TEMPLATE_PARAM_KEY:
  289. self._emit_all(self._pop())
  290. self._head += 1
  291. return self._pop()
  292. def _handle_argument_separator(self):
  293. """Handle the separator between an argument's name and default."""
  294. self._context ^= contexts.ARGUMENT_NAME
  295. self._context |= contexts.ARGUMENT_DEFAULT
  296. self._emit(tokens.ArgumentSeparator())
  297. def _handle_argument_end(self):
  298. """Handle the end of an argument at the head of the string."""
  299. self._head += 2
  300. return self._pop()
  301. def _parse_wikilink(self):
  302. """Parse an internal wikilink at the head of the wikicode string."""
  303. reset = self._head + 1
  304. self._head += 2
  305. try:
  306. # If the wikilink looks like an external link, parse it as such:
  307. link, _extra = self._really_parse_external_link(True)
  308. except BadRoute:
  309. self._head = reset + 1
  310. try:
  311. # Otherwise, actually parse it as a wikilink:
  312. wikilink = self._parse(contexts.WIKILINK_TITLE)
  313. except BadRoute:
  314. self._head = reset
  315. self._emit_text("[[")
  316. else:
  317. self._emit(tokens.WikilinkOpen())
  318. self._emit_all(wikilink)
  319. self._emit(tokens.WikilinkClose())
  320. else:
  321. if self._context & contexts.EXT_LINK_TITLE:
  322. # In this exceptional case, an external link that looks like a
  323. # wikilink inside of an external link is parsed as text:
  324. self._head = reset
  325. self._emit_text("[[")
  326. return
  327. self._emit_text("[")
  328. self._emit(tokens.ExternalLinkOpen(brackets=True))
  329. self._emit_all(link)
  330. self._emit(tokens.ExternalLinkClose())
  331. def _handle_wikilink_separator(self):
  332. """Handle the separator between a wikilink's title and its text."""
  333. self._context ^= contexts.WIKILINK_TITLE
  334. self._context |= contexts.WIKILINK_TEXT
  335. self._emit(tokens.WikilinkSeparator())
  336. def _handle_wikilink_end(self):
  337. """Handle the end of a wikilink at the head of the string."""
  338. self._head += 1
  339. return self._pop()
  340. def _parse_bracketed_uri_scheme(self):
  341. """Parse the URI scheme of a bracket-enclosed external link."""
  342. self._push(contexts.EXT_LINK_URI)
  343. if self._read() == self._read(1) == "/":
  344. self._emit_text("//")
  345. self._head += 2
  346. else:
  347. all_valid = lambda: all(char in self.URISCHEME for char in self._read())
  348. scheme = ""
  349. while self._read() is not self.END and all_valid():
  350. scheme += self._read()
  351. self._emit_text(self._read())
  352. self._head += 1
  353. if self._read() != ":":
  354. self._fail_route()
  355. self._emit_text(":")
  356. self._head += 1
  357. slashes = self._read() == self._read(1) == "/"
  358. if slashes:
  359. self._emit_text("//")
  360. self._head += 2
  361. if not is_scheme(scheme, slashes):
  362. self._fail_route()
  363. def _parse_free_uri_scheme(self):
  364. """Parse the URI scheme of a free (no brackets) external link."""
  365. scheme = []
  366. try:
  367. # We have to backtrack through the textbuffer looking for our
  368. # scheme since it was just parsed as text:
  369. for chunk in reversed(self._textbuffer):
  370. for char in reversed(chunk):
  371. # Stop at the first non-word character
  372. if re.fullmatch(r"\W", char):
  373. raise StopIteration()
  374. if char not in self.URISCHEME:
  375. raise BadRoute()
  376. scheme.append(char)
  377. except StopIteration:
  378. pass
  379. scheme = "".join(reversed(scheme))
  380. slashes = self._read() == self._read(1) == "/"
  381. if not is_scheme(scheme, slashes):
  382. raise BadRoute()
  383. self._push(self._context | contexts.EXT_LINK_URI)
  384. self._emit_text(scheme)
  385. self._emit_text(":")
  386. if slashes:
  387. self._emit_text("//")
  388. self._head += 2
  389. def _handle_free_link_text(self, punct, tail, this):
  390. """Handle text in a free ext link, including trailing punctuation."""
  391. if "(" in this and ")" in punct:
  392. punct = punct[:-1] # ')' is not longer valid punctuation
  393. if this.endswith(punct):
  394. for i in range(len(this) - 1, 0, -1):
  395. if this[i - 1] not in punct:
  396. break
  397. else:
  398. i = 0
  399. stripped = this[:i]
  400. if stripped and tail:
  401. self._emit_text(tail)
  402. tail = ""
  403. tail += this[i:]
  404. this = stripped
  405. elif tail:
  406. self._emit_text(tail)
  407. tail = ""
  408. self._emit_text(this)
  409. return punct, tail
  410. def _is_uri_end(self, this, nxt):
  411. """Return whether the current head is the end of a URI."""
  412. # Built from _parse()'s end sentinels:
  413. after, ctx = self._read(2), self._context
  414. return (
  415. this in (self.END, "\n", "[", "]", "<", ">", '"')
  416. or " " in this
  417. or this == nxt == "'"
  418. or (this == "|" and ctx & contexts.TEMPLATE)
  419. or (this == "=" and ctx & (contexts.TEMPLATE_PARAM_KEY | contexts.HEADING))
  420. or (this == nxt == "}" and ctx & contexts.TEMPLATE)
  421. or (this == nxt == after == "}" and ctx & contexts.ARGUMENT)
  422. )
  423. def _really_parse_external_link(self, brackets):
  424. """Really parse an external link."""
  425. if brackets:
  426. self._parse_bracketed_uri_scheme()
  427. invalid = ("\n", " ", "]")
  428. punct = ()
  429. else:
  430. self._parse_free_uri_scheme()
  431. invalid = ("\n", " ", "[", "]")
  432. punct = tuple(",;\\.:!?)")
  433. if self._read() is self.END or self._read()[0] in invalid:
  434. self._fail_route()
  435. tail = ""
  436. while True:
  437. this, nxt = self._read(), self._read(1)
  438. if this == "&":
  439. if tail:
  440. self._emit_text(tail)
  441. tail = ""
  442. self._parse_entity()
  443. elif this == "<" and nxt == "!" and self._read(2) == self._read(3) == "-":
  444. if tail:
  445. self._emit_text(tail)
  446. tail = ""
  447. self._parse_comment()
  448. elif this == nxt == "{" and self._can_recurse():
  449. if tail:
  450. self._emit_text(tail)
  451. tail = ""
  452. self._parse_template_or_argument()
  453. elif brackets:
  454. if this is self.END or this == "\n":
  455. self._fail_route()
  456. if this == "]":
  457. return self._pop(), None
  458. if self._is_uri_end(this, nxt):
  459. if " " in this:
  460. before, after = this.split(" ", 1)
  461. self._emit_text(before)
  462. self._emit(tokens.ExternalLinkSeparator())
  463. if after:
  464. self._emit_text(after)
  465. self._head += 1
  466. else:
  467. separator = tokens.ExternalLinkSeparator()
  468. separator.suppress_space = True
  469. self._emit(separator)
  470. self._context ^= contexts.EXT_LINK_URI
  471. self._context |= contexts.EXT_LINK_TITLE
  472. return self._parse(push=False), None
  473. self._emit_text(this)
  474. else:
  475. if self._is_uri_end(this, nxt):
  476. if this is not self.END and " " in this:
  477. before, after = this.split(" ", 1)
  478. punct, tail = self._handle_free_link_text(punct, tail, before)
  479. tail += " " + after
  480. else:
  481. self._head -= 1
  482. return self._pop(), tail
  483. punct, tail = self._handle_free_link_text(punct, tail, this)
  484. self._head += 1
  485. def _remove_uri_scheme_from_textbuffer(self, scheme):
  486. """Remove the URI scheme of a new external link from the textbuffer."""
  487. length = len(scheme)
  488. while length:
  489. if length < len(self._textbuffer[-1]):
  490. self._textbuffer[-1] = self._textbuffer[-1][:-length]
  491. break
  492. length -= len(self._textbuffer[-1])
  493. self._textbuffer.pop()
  494. def _parse_external_link(self, brackets):
  495. """Parse an external link at the head of the wikicode string."""
  496. if self._context & contexts.NO_EXT_LINKS or not self._can_recurse():
  497. if not brackets and self._context & contexts.DL_TERM:
  498. self._handle_dl_term()
  499. else:
  500. self._emit_text(self._read())
  501. return
  502. reset = self._head
  503. self._head += 1
  504. try:
  505. link, extra = self._really_parse_external_link(brackets)
  506. except BadRoute:
  507. self._head = reset
  508. if not brackets and self._context & contexts.DL_TERM:
  509. self._handle_dl_term()
  510. else:
  511. self._emit_text(self._read())
  512. else:
  513. if not brackets:
  514. scheme = link[0].text.split(":", 1)[0]
  515. self._remove_uri_scheme_from_textbuffer(scheme)
  516. self._emit(tokens.ExternalLinkOpen(brackets=brackets))
  517. self._emit_all(link)
  518. self._emit(tokens.ExternalLinkClose())
  519. if extra:
  520. self._emit_text(extra)
  521. def _parse_heading(self):
  522. """Parse a section heading at the head of the wikicode string."""
  523. self._global |= contexts.GL_HEADING
  524. reset = self._head
  525. self._head += 1
  526. best = 1
  527. while self._read() == "=":
  528. best += 1
  529. self._head += 1
  530. context = contexts.HEADING_LEVEL_1 << min(best - 1, 5)
  531. try:
  532. title, level = self._parse(context)
  533. except BadRoute:
  534. self._head = reset + best - 1
  535. self._emit_text("=" * best)
  536. else:
  537. self._emit(tokens.HeadingStart(level=level))
  538. if level < best:
  539. self._emit_text("=" * (best - level))
  540. self._emit_all(title)
  541. self._emit(tokens.HeadingEnd())
  542. finally:
  543. self._global ^= contexts.GL_HEADING
  544. def _handle_heading_end(self):
  545. """Handle the end of a section heading at the head of the string."""
  546. reset = self._head
  547. self._head += 1
  548. best = 1
  549. while self._read() == "=":
  550. best += 1
  551. self._head += 1
  552. current = int(log(self._context / contexts.HEADING_LEVEL_1, 2)) + 1
  553. level = min(current, min(best, 6))
  554. try: # Try to check for a heading closure after this one
  555. after, after_level = self._parse(self._context)
  556. except BadRoute:
  557. if level < best:
  558. self._emit_text("=" * (best - level))
  559. self._head = reset + best - 1
  560. return self._pop(), level
  561. else: # Found another closure
  562. self._emit_text("=" * best)
  563. self._emit_all(after)
  564. return self._pop(), after_level
  565. def _really_parse_entity(self):
  566. """Actually parse an HTML entity and ensure that it is valid."""
  567. self._emit(tokens.HTMLEntityStart())
  568. self._head += 1
  569. this = self._read(strict=True)
  570. if this == "#":
  571. numeric = True
  572. self._emit(tokens.HTMLEntityNumeric())
  573. self._head += 1
  574. this = self._read(strict=True)
  575. if this[0].lower() == "x":
  576. hexadecimal = True
  577. self._emit(tokens.HTMLEntityHex(char=this[0]))
  578. this = this[1:]
  579. if not this:
  580. self._fail_route()
  581. else:
  582. hexadecimal = False
  583. else:
  584. numeric = hexadecimal = False
  585. valid = "0123456789abcdefABCDEF" if hexadecimal else "0123456789"
  586. if not numeric and not hexadecimal:
  587. valid += "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
  588. if not all([char in valid for char in this]):
  589. self._fail_route()
  590. self._head += 1
  591. if self._read() != ";":
  592. self._fail_route()
  593. if numeric:
  594. test = int(this, 16) if hexadecimal else int(this)
  595. if test < 1 or test > 0x10FFFF:
  596. self._fail_route()
  597. else:
  598. if this not in htmlentities.entitydefs:
  599. self._fail_route()
  600. self._emit(tokens.Text(text=this))
  601. self._emit(tokens.HTMLEntityEnd())
  602. def _parse_entity(self):
  603. """Parse an HTML entity at the head of the wikicode string."""
  604. reset = self._head
  605. try:
  606. self._push(contexts.HTML_ENTITY)
  607. self._really_parse_entity()
  608. except BadRoute:
  609. self._head = reset
  610. self._emit_text(self._read())
  611. else:
  612. self._emit_all(self._pop())
  613. def _parse_comment(self):
  614. """Parse an HTML comment at the head of the wikicode string."""
  615. self._head += 4
  616. reset = self._head - 1
  617. self._push()
  618. while True:
  619. this = self._read()
  620. if this == self.END:
  621. self._pop()
  622. self._head = reset
  623. self._emit_text("<!--")
  624. return
  625. if this == self._read(1) == "-" and self._read(2) == ">":
  626. self._emit_first(tokens.CommentStart())
  627. self._emit(tokens.CommentEnd())
  628. self._emit_all(self._pop())
  629. self._head += 2
  630. if self._context & contexts.FAIL_NEXT:
  631. # _verify_safe() sets this flag while parsing a template
  632. # or link when it encounters what might be a comment -- we
  633. # must unset it to let _verify_safe() know it was correct:
  634. self._context ^= contexts.FAIL_NEXT
  635. return
  636. self._emit_text(this)
  637. self._head += 1
  638. def _push_tag_buffer(self, data):
  639. """Write a pending tag attribute from *data* to the stack."""
  640. if data.context & data.CX_QUOTED:
  641. self._emit_first(tokens.TagAttrQuote(char=data.quoter))
  642. self._emit_all(self._pop())
  643. buf = data.padding_buffer
  644. self._emit_first(
  645. tokens.TagAttrStart(
  646. pad_first=buf["first"],
  647. pad_before_eq=buf["before_eq"],
  648. pad_after_eq=buf["after_eq"],
  649. )
  650. )
  651. self._emit_all(self._pop())
  652. for key in data.padding_buffer:
  653. data.padding_buffer[key] = ""
  654. def _handle_tag_space(self, data, text):
  655. """Handle whitespace (*text*) inside of an HTML open tag."""
  656. ctx = data.context
  657. end_of_value = ctx & data.CX_ATTR_VALUE and not ctx & (
  658. data.CX_QUOTED | data.CX_NOTE_QUOTE
  659. )
  660. if end_of_value or (ctx & data.CX_QUOTED and ctx & data.CX_NOTE_SPACE):
  661. self._push_tag_buffer(data)
  662. data.context = data.CX_ATTR_READY
  663. elif ctx & data.CX_NOTE_SPACE:
  664. data.context = data.CX_ATTR_READY
  665. elif ctx & data.CX_ATTR_NAME:
  666. data.context |= data.CX_NOTE_EQUALS
  667. data.padding_buffer["before_eq"] += text
  668. if ctx & data.CX_QUOTED and not ctx & data.CX_NOTE_SPACE:
  669. self._emit_text(text)
  670. elif data.context & data.CX_ATTR_READY:
  671. data.padding_buffer["first"] += text
  672. elif data.context & data.CX_ATTR_VALUE:
  673. data.padding_buffer["after_eq"] += text
  674. def _handle_tag_text(self, text):
  675. """Handle regular *text* inside of an HTML open tag."""
  676. nxt = self._read(1)
  677. if not self._can_recurse() or text not in self.MARKERS:
  678. self._emit_text(text)
  679. elif text == nxt == "{":
  680. self._parse_template_or_argument()
  681. elif text == nxt == "[":
  682. self._parse_wikilink()
  683. elif text == "<":
  684. self._parse_tag()
  685. else:
  686. self._emit_text(text)
  687. def _handle_tag_data(self, data, text):
  688. """Handle all sorts of *text* data inside of an HTML open tag."""
  689. for chunk in self.tag_splitter.split(text):
  690. if not chunk:
  691. continue
  692. if data.context & data.CX_NAME:
  693. if chunk in self.MARKERS or chunk.isspace():
  694. self._fail_route() # Tags must start with text, not spaces
  695. data.context = data.CX_NOTE_SPACE
  696. elif chunk.isspace():
  697. self._handle_tag_space(data, chunk)
  698. continue
  699. elif data.context & data.CX_NOTE_SPACE:
  700. if data.context & data.CX_QUOTED:
  701. data.context = data.CX_ATTR_VALUE
  702. self._memoize_bad_route()
  703. self._pop()
  704. self._head = data.reset - 1 # Will be auto-incremented
  705. return # Break early
  706. self._fail_route()
  707. elif data.context & data.CX_ATTR_READY:
  708. data.context = data.CX_ATTR_NAME
  709. self._push(contexts.TAG_ATTR)
  710. elif data.context & data.CX_ATTR_NAME:
  711. if chunk == "=":
  712. data.context = data.CX_ATTR_VALUE | data.CX_NOTE_QUOTE
  713. self._emit(tokens.TagAttrEquals())
  714. continue
  715. if data.context & data.CX_NOTE_EQUALS:
  716. self._push_tag_buffer(data)
  717. data.context = data.CX_ATTR_NAME
  718. self._push(contexts.TAG_ATTR)
  719. else: # data.context & data.CX_ATTR_VALUE assured
  720. escaped = self._read(-1) == "\\" and self._read(-2) != "\\"
  721. if data.context & data.CX_NOTE_QUOTE:
  722. data.context ^= data.CX_NOTE_QUOTE
  723. if chunk in "'\"" and not escaped:
  724. data.context |= data.CX_QUOTED
  725. data.quoter = chunk
  726. data.reset = self._head
  727. try:
  728. self._push(self._context)
  729. except BadRoute:
  730. # Already failed to parse this as a quoted string
  731. data.context = data.CX_ATTR_VALUE
  732. self._head -= 1
  733. return
  734. continue
  735. elif data.context & data.CX_QUOTED:
  736. if chunk == data.quoter and not escaped:
  737. data.context |= data.CX_NOTE_SPACE
  738. continue
  739. self._handle_tag_text(chunk)
  740. def _handle_tag_close_open(self, data, token):
  741. """Handle the closing of a open tag (``<foo>``)."""
  742. if data.context & (data.CX_ATTR_NAME | data.CX_ATTR_VALUE):
  743. self._push_tag_buffer(data)
  744. self._emit(token(padding=data.padding_buffer["first"]))
  745. self._head += 1
  746. def _handle_tag_open_close(self):
  747. """Handle the opening of a closing tag (``</foo>``)."""
  748. self._emit(tokens.TagOpenClose())
  749. self._push(contexts.TAG_CLOSE)
  750. self._head += 1
  751. def _handle_tag_close_close(self):
  752. """Handle the ending of a closing tag (``</foo>``)."""
  753. strip = lambda tok: tok.text.rstrip().lower()
  754. closing = self._pop()
  755. if len(closing) != 1 or (
  756. not isinstance(closing[0], tokens.Text)
  757. or strip(closing[0]) != strip(self._stack[1])
  758. ):
  759. self._fail_route()
  760. self._emit_all(closing)
  761. self._emit(tokens.TagCloseClose())
  762. return self._pop()
  763. def _handle_blacklisted_tag(self):
  764. """Handle the body of an HTML tag that is parser-blacklisted."""
  765. strip = lambda text: text.rstrip().lower()
  766. while True:
  767. this, nxt = self._read(), self._read(1)
  768. if this is self.END:
  769. self._fail_route()
  770. elif this == "<" and nxt == "/":
  771. self._head += 3
  772. if self._read() != ">" or (
  773. strip(self._read(-1)) != strip(self._stack[1].text)
  774. ):
  775. self._head -= 1
  776. self._emit_text("</")
  777. continue
  778. self._emit(tokens.TagOpenClose())
  779. self._emit_text(self._read(-1))
  780. self._emit(tokens.TagCloseClose())
  781. return self._pop()
  782. elif this == "&":
  783. self._parse_entity()
  784. else:
  785. self._emit_text(this)
  786. self._head += 1
  787. def _handle_single_only_tag_end(self):
  788. """Handle the end of an implicitly closing single-only HTML tag."""
  789. padding = self._stack.pop().padding
  790. self._emit(tokens.TagCloseSelfclose(padding=padding, implicit=True))
  791. self._head -= 1 # Offset displacement done by _handle_tag_close_open
  792. return self._pop()
  793. def _handle_single_tag_end(self):
  794. """Handle the stream end when inside a single-supporting HTML tag."""
  795. stack = self._stack
  796. # We need to find the index of the TagCloseOpen token corresponding to
  797. # the TagOpenOpen token located at index 0:
  798. depth = 1
  799. for index, token in enumerate(stack[2:], 2):
  800. if isinstance(token, tokens.TagOpenOpen):
  801. depth += 1
  802. elif isinstance(token, tokens.TagCloseOpen):
  803. depth -= 1
  804. if depth == 0:
  805. break
  806. elif isinstance(token, tokens.TagCloseSelfclose):
  807. depth -= 1
  808. if depth == 0: # pragma: no cover (untestable/exceptional)
  809. raise ParserError(
  810. "_handle_single_tag_end() got an unexpected TagCloseSelfclose"
  811. )
  812. else: # pragma: no cover (untestable/exceptional case)
  813. raise ParserError("_handle_single_tag_end() missed a TagCloseOpen")
  814. padding = stack[index].padding
  815. stack[index] = tokens.TagCloseSelfclose(padding=padding, implicit=True)
  816. return self._pop()
  817. def _really_parse_tag(self):
  818. """Actually parse an HTML tag, starting with the open (``<foo>``)."""
  819. data = _TagOpenData()
  820. self._push(contexts.TAG_OPEN)
  821. self._emit(tokens.TagOpenOpen())
  822. while True:
  823. this, nxt = self._read(), self._read(1)
  824. can_exit = (
  825. not data.context & (data.CX_QUOTED | data.CX_NAME)
  826. or data.context & data.CX_NOTE_SPACE
  827. )
  828. if this is self.END:
  829. if self._context & contexts.TAG_ATTR:
  830. if data.context & data.CX_QUOTED:
  831. # Unclosed attribute quote: reset, don't die
  832. data.context = data.CX_ATTR_VALUE
  833. self._memoize_bad_route()
  834. self._pop()
  835. self._head = data.reset
  836. continue
  837. self._pop()
  838. self._fail_route()
  839. elif this == ">" and can_exit:
  840. self._handle_tag_close_open(data, tokens.TagCloseOpen)
  841. self._context = contexts.TAG_BODY
  842. if is_single_only(self._stack[1].text):
  843. return self._handle_single_only_tag_end()
  844. if is_parsable(self._stack[1].text):
  845. return self._parse(push=False)
  846. return self._handle_blacklisted_tag()
  847. elif this == "/" and nxt == ">" and can_exit:
  848. self._handle_tag_close_open(data, tokens.TagCloseSelfclose)
  849. return self._pop()
  850. else:
  851. self._handle_tag_data(data, this)
  852. self._head += 1
  853. def _handle_invalid_tag_start(self):
  854. """Handle the (possible) start of an implicitly closing single tag."""
  855. reset = self._head + 1
  856. self._head += 2
  857. try:
  858. if not is_single_only(self.tag_splitter.split(self._read())[0]):
  859. raise BadRoute()
  860. tag = self._really_parse_tag()
  861. except BadRoute:
  862. self._head = reset
  863. self._emit_text("</")
  864. else:
  865. tag[0].invalid = True # Set flag of TagOpenOpen
  866. self._emit_all(tag)
  867. def _parse_tag(self):
  868. """Parse an HTML tag at the head of the wikicode string."""
  869. reset = self._head
  870. self._head += 1
  871. try:
  872. tag = self._really_parse_tag()
  873. except BadRoute:
  874. self._head = reset
  875. self._emit_text("<")
  876. else:
  877. self._emit_all(tag)
  878. def _emit_style_tag(self, tag, markup, body):
  879. """Write the body of a tag and the tokens that should surround it."""
  880. self._emit(tokens.TagOpenOpen(wiki_markup=markup))
  881. self._emit_text(tag)
  882. self._emit(tokens.TagCloseOpen())
  883. self._emit_all(body)
  884. self._emit(tokens.TagOpenClose())
  885. self._emit_text(tag)
  886. self._emit(tokens.TagCloseClose())
  887. def _parse_italics(self):
  888. """Parse wiki-style italics."""
  889. reset = self._head
  890. try:
  891. stack = self._parse(contexts.STYLE_ITALICS)
  892. except BadRoute as route:
  893. self._head = reset
  894. if route.context & contexts.STYLE_PASS_AGAIN:
  895. new_ctx = contexts.STYLE_ITALICS | contexts.STYLE_SECOND_PASS
  896. try:
  897. stack = self._parse(new_ctx)
  898. except BadRoute:
  899. self._head = reset
  900. self._emit_text("''")
  901. return
  902. else:
  903. self._emit_text("''")
  904. return
  905. self._emit_style_tag("i", "''", stack)
  906. def _parse_bold(self):
  907. """Parse wiki-style bold."""
  908. reset = self._head
  909. try:
  910. stack = self._parse(contexts.STYLE_BOLD)
  911. except BadRoute:
  912. self._head = reset
  913. if self._context & contexts.STYLE_SECOND_PASS:
  914. self._emit_text("'")
  915. return True
  916. if self._context & contexts.STYLE_ITALICS:
  917. self._context |= contexts.STYLE_PASS_AGAIN
  918. self._emit_text("'''")
  919. else:
  920. self._emit_text("'")
  921. self._parse_italics()
  922. else:
  923. self._emit_style_tag("b", "'''", stack)
  924. return False
  925. def _parse_italics_and_bold(self):
  926. """Parse wiki-style italics and bold together (i.e., five ticks)."""
  927. reset = self._head
  928. try:
  929. stack = self._parse(contexts.STYLE_BOLD)
  930. except BadRoute:
  931. self._head = reset
  932. try:
  933. stack = self._parse(contexts.STYLE_ITALICS)
  934. except BadRoute:
  935. self._head = reset
  936. self._emit_text("'''''")
  937. else:
  938. reset = self._head
  939. try:
  940. stack2 = self._parse(contexts.STYLE_BOLD)
  941. except BadRoute:
  942. self._head = reset
  943. self._emit_text("'''")
  944. self._emit_style_tag("i", "''", stack)
  945. else:
  946. self._push()
  947. self._emit_style_tag("i", "''", stack)
  948. self._emit_all(stack2)
  949. self._emit_style_tag("b", "'''", self._pop())
  950. else:
  951. reset = self._head
  952. try:
  953. stack2 = self._parse(contexts.STYLE_ITALICS)
  954. except BadRoute:
  955. self._head = reset
  956. self._emit_text("''")
  957. self._emit_style_tag("b", "'''", stack)
  958. else:
  959. self._push()
  960. self._emit_style_tag("b", "'''", stack)
  961. self._emit_all(stack2)
  962. self._emit_style_tag("i", "''", self._pop())
  963. def _parse_style(self):
  964. """Parse wiki-style formatting (``''``/``'''`` for italics/bold)."""
  965. self._head += 2
  966. ticks = 2
  967. while self._read() == "'":
  968. self._head += 1
  969. ticks += 1
  970. italics = self._context & contexts.STYLE_ITALICS
  971. bold = self._context & contexts.STYLE_BOLD
  972. if ticks > 5:
  973. self._emit_text("'" * (ticks - 5))
  974. ticks = 5
  975. elif ticks == 4:
  976. self._emit_text("'")
  977. ticks = 3
  978. if (italics and ticks in (2, 5)) or (bold and ticks in (3, 5)):
  979. if ticks == 5:
  980. self._head -= 3 if italics else 2
  981. return self._pop()
  982. if not self._can_recurse():
  983. if ticks == 3:
  984. if self._context & contexts.STYLE_SECOND_PASS:
  985. self._emit_text("'")
  986. return self._pop()
  987. if self._context & contexts.STYLE_ITALICS:
  988. self._context |= contexts.STYLE_PASS_AGAIN
  989. self._emit_text("'" * ticks)
  990. elif ticks == 2:
  991. self._parse_italics()
  992. elif ticks == 3:
  993. if self._parse_bold():
  994. return self._pop()
  995. else: # ticks == 5
  996. self._parse_italics_and_bold()
  997. self._head -= 1
  998. def _handle_list_marker(self):
  999. """Handle a list marker at the head (``#``, ``*``, ``;``, ``:``)."""
  1000. markup = self._read()
  1001. if markup == ";":
  1002. self._context |= contexts.DL_TERM
  1003. self._emit(tokens.TagOpenOpen(wiki_markup=markup))
  1004. self._emit_text(get_html_tag(markup))
  1005. self._emit(tokens.TagCloseSelfclose())
  1006. def _handle_list(self):
  1007. """Handle a wiki-style list (``#``, ``*``, ``;``, ``:``)."""
  1008. self._handle_list_marker()
  1009. while self._read(1) in ("#", "*", ";", ":"):
  1010. self._head += 1
  1011. self._handle_list_marker()
  1012. def _handle_hr(self):
  1013. """Handle a wiki-style horizontal rule (``----``) in the string."""
  1014. length = 4
  1015. self._head += 3
  1016. while self._read(1) == "-":
  1017. length += 1
  1018. self._head += 1
  1019. self._emit(tokens.TagOpenOpen(wiki_markup="-" * length))
  1020. self._emit_text("hr")
  1021. self._emit(tokens.TagCloseSelfclose())
  1022. def _handle_dl_term(self):
  1023. """Handle the term in a description list (``foo`` in ``;foo:bar``)."""
  1024. self._context ^= contexts.DL_TERM
  1025. if self._read() == ":":
  1026. self._handle_list_marker()
  1027. else:
  1028. self._emit_text("\n")
  1029. def _emit_table_tag(
  1030. self,
  1031. open_open_markup,
  1032. tag,
  1033. style,
  1034. padding,
  1035. close_open_markup,
  1036. contents,
  1037. open_close_markup,
  1038. ):
  1039. """Emit a table tag."""
  1040. self._emit(tokens.TagOpenOpen(wiki_markup=open_open_markup))
  1041. self._emit_text(tag)
  1042. if style:
  1043. self._emit_all(style)
  1044. if close_open_markup:
  1045. self._emit(
  1046. tokens.TagCloseOpen(wiki_markup=close_open_markup, padding=padding)
  1047. )
  1048. else:
  1049. self._emit(tokens.TagCloseOpen(padding=padding))
  1050. if contents:
  1051. self._emit_all(contents)
  1052. self._emit(tokens.TagOpenClose(wiki_markup=open_close_markup))
  1053. self._emit_text(tag)
  1054. self._emit(tokens.TagCloseClose())
  1055. def _handle_table_style(self, end_token):
  1056. """Handle style attributes for a table until ``end_token``."""
  1057. data = _TagOpenData()
  1058. data.context = _TagOpenData.CX_ATTR_READY
  1059. while True:
  1060. this = self._read()
  1061. can_exit = (
  1062. not data.context & data.CX_QUOTED or data.context & data.CX_NOTE_SPACE
  1063. )
  1064. if this == end_token and can_exit:
  1065. if data.context & (data.CX_ATTR_NAME | data.CX_ATTR_VALUE):
  1066. self._push_tag_buffer(data)
  1067. if this.isspace():
  1068. data.padding_buffer["first"] += this
  1069. return data.padding_buffer["first"]
  1070. if this is self.END or this == end_token:
  1071. if self._context & contexts.TAG_ATTR:
  1072. if data.context & data.CX_QUOTED:
  1073. # Unclosed attribute quote: reset, don't die
  1074. data.context = data.CX_ATTR_VALUE
  1075. self._memoize_bad_route()
  1076. self._pop()
  1077. self._head = data.reset
  1078. continue
  1079. self._pop()
  1080. self._fail_route()
  1081. else:
  1082. self._handle_tag_data(data, this)
  1083. self._head += 1
  1084. def _parse_table(self):
  1085. """Parse a wikicode table by starting with the first line."""
  1086. reset = self._head
  1087. self._head += 2
  1088. try:
  1089. self._push(contexts.TABLE_OPEN)
  1090. padding = self._handle_table_style("\n")
  1091. except BadRoute:
  1092. self._head = reset
  1093. self._emit_text("{")
  1094. return
  1095. style = self._pop()
  1096. self._head += 1
  1097. restore_point = self._stack_ident
  1098. try:
  1099. table = self._parse(contexts.TABLE_OPEN)
  1100. except BadRoute:
  1101. while self._stack_ident != restore_point:
  1102. self._memoize_bad_route()
  1103. self._pop()
  1104. self._head = reset
  1105. self._emit_text("{")
  1106. return
  1107. self._emit_table_tag("{|", "table", style, padding, None, table, "|}")
  1108. # Offset displacement done by _parse():
  1109. self._head -= 1
  1110. def _handle_table_row(self):
  1111. """Parse as style until end of the line, then continue."""
  1112. self._head += 2
  1113. if not self._can_recurse():
  1114. self._emit_text("|-")
  1115. self._head -= 1
  1116. return
  1117. self._push(contexts.TABLE_OPEN | contexts.TABLE_ROW_OPEN)
  1118. padding = self._handle_table_style("\n")
  1119. style = self._pop()
  1120. # Don't parse the style separator:
  1121. self._head += 1
  1122. row = self._parse(contexts.TABLE_OPEN | contexts.TABLE_ROW_OPEN)
  1123. self._emit_table_tag("|-", "tr", style, padding, None, row, "")
  1124. # Offset displacement done by parse():
  1125. self._head -= 1
  1126. def _handle_table_cell(self, markup, tag, line_context):
  1127. """Parse as normal syntax unless we hit a style marker, then parse
  1128. style as HTML attributes and the remainder as normal syntax."""
  1129. old_context = self._context
  1130. padding, style = "", None
  1131. self._head += len(markup)
  1132. reset = self._head
  1133. if not self._can_recurse():
  1134. self._emit_text(markup)
  1135. self._head -= 1
  1136. return
  1137. cell = self._parse(
  1138. contexts.TABLE_OPEN
  1139. | contexts.TABLE_CELL_OPEN
  1140. | line_context
  1141. | contexts.TABLE_CELL_STYLE
  1142. )
  1143. cell_context = self._context
  1144. self._context = old_context
  1145. reset_for_style = cell_context & contexts.TABLE_CELL_STYLE
  1146. if reset_for_style:
  1147. self._head = reset
  1148. self._push(contexts.TABLE_OPEN | contexts.TABLE_CELL_OPEN | line_context)
  1149. padding = self._handle_table_style("|")
  1150. style = self._pop()
  1151. # Don't parse the style separator:
  1152. self._head += 1
  1153. cell = self._parse(
  1154. contexts.TABLE_OPEN | contexts.TABLE_CELL_OPEN | line_context
  1155. )
  1156. cell_context = self._context
  1157. self._context = old_context
  1158. close_open_markup = "|" if reset_for_style else None
  1159. self._emit_table_tag(markup, tag, style, padding, close_open_markup, cell, "")
  1160. # Keep header/cell line contexts:
  1161. self._context |= cell_context & (
  1162. contexts.TABLE_TH_LINE | contexts.TABLE_TD_LINE
  1163. )
  1164. # Offset displacement done by parse():
  1165. self._head -= 1
  1166. def _handle_table_cell_end(self, reset_for_style=False):
  1167. """Returns the current context, with the TABLE_CELL_STYLE flag set if
  1168. it is necessary to reset and parse style attributes."""
  1169. if reset_for_style:
  1170. self._context |= contexts.TABLE_CELL_STYLE
  1171. else:
  1172. self._context &= ~contexts.TABLE_CELL_STYLE
  1173. return self._pop(keep_context=True)
  1174. def _handle_table_row_end(self):
  1175. """Return the stack in order to handle the table row end."""
  1176. return self._pop()
  1177. def _handle_table_end(self):
  1178. """Return the stack in order to handle the table end."""
  1179. self._head += 2
  1180. return self._pop()
  1181. def _handle_end(self):
  1182. """Handle the end of the stream of wikitext."""
  1183. if self._context & contexts.FAIL:
  1184. if self._context & contexts.TAG_BODY:
  1185. if is_single(self._stack[1].text):
  1186. return self._handle_single_tag_end()
  1187. if self._context & contexts.TABLE_CELL_OPEN:
  1188. self._pop()
  1189. if self._context & contexts.DOUBLE:
  1190. self._pop()
  1191. self._fail_route()
  1192. return self._pop()
  1193. def _verify_safe(self, this):
  1194. """Make sure we are not trying to write an invalid character."""
  1195. context = self._context
  1196. if context & contexts.FAIL_NEXT:
  1197. return False
  1198. if context & contexts.WIKILINK_TITLE:
  1199. if this in ("]", "{"):
  1200. self._context |= contexts.FAIL_NEXT
  1201. elif this in ("\n", "[", "}", ">"):
  1202. return False
  1203. elif this == "<":
  1204. if self._read(1) == "!":
  1205. self._context |= contexts.FAIL_NEXT
  1206. else:
  1207. return False
  1208. return True
  1209. if context & contexts.EXT_LINK_TITLE:
  1210. return this != "\n"
  1211. if context & contexts.TEMPLATE_NAME:
  1212. if this == "{":
  1213. self._context |= contexts.HAS_TEMPLATE | contexts.FAIL_NEXT
  1214. return True
  1215. if this == "}" or (this == "<" and self._read(1) == "!"):
  1216. self._context |= contexts.FAIL_NEXT
  1217. return True
  1218. if this in ("[", "]", "<", ">"):
  1219. return False
  1220. if this == "|":
  1221. return True
  1222. if context & contexts.HAS_TEXT:
  1223. if context & contexts.FAIL_ON_TEXT:
  1224. if this is self.END or not this.isspace():
  1225. return False
  1226. elif this == "\n":
  1227. self._context |= contexts.FAIL_ON_TEXT
  1228. elif this is self.END or not this.isspace():
  1229. self._context |= contexts.HAS_TEXT
  1230. return True
  1231. if context & contexts.TAG_CLOSE:
  1232. return this != "<"
  1233. if context & contexts.FAIL_ON_EQUALS:
  1234. if this == "=":
  1235. return False
  1236. elif context & contexts.FAIL_ON_LBRACE:
  1237. if this == "{" or (self._read(-1) == self._read(-2) == "{"):
  1238. if context & contexts.TEMPLATE:
  1239. self._context |= contexts.FAIL_ON_EQUALS
  1240. else:
  1241. self._context |= contexts.FAIL_NEXT
  1242. return True
  1243. self._context ^= contexts.FAIL_ON_LBRACE
  1244. elif context & contexts.FAIL_ON_RBRACE:
  1245. if this == "}":
  1246. self._context |= contexts.FAIL_NEXT
  1247. return True
  1248. self._context ^= contexts.FAIL_ON_RBRACE
  1249. elif this == "{":
  1250. self._context |= contexts.FAIL_ON_LBRACE
  1251. elif this == "}":
  1252. self._context |= contexts.FAIL_ON_RBRACE
  1253. return True
  1254. def _parse(self, context=0, push=True):
  1255. """Parse the wikicode string, using *context* for when to stop."""
  1256. if push:
  1257. self._push(context)
  1258. while True:
  1259. this = self._read()
  1260. if self._context & contexts.UNSAFE:
  1261. if not self._verify_safe(this):
  1262. if self._context & contexts.DOUBLE:
  1263. self._pop()
  1264. self._fail_route()
  1265. if this not in self.MARKERS:
  1266. self._emit_text(this)
  1267. self._head += 1
  1268. continue
  1269. if this is self.END:
  1270. return self._handle_end()
  1271. nxt = self._read(1)
  1272. if this == nxt == "{":
  1273. if self._can_recurse():
  1274. self._parse_template_or_argument()
  1275. else:
  1276. self._emit_text("{")
  1277. elif this == "|" and self._context & contexts.TEMPLATE:
  1278. self._handle_template_param()
  1279. elif this == "=" and self._context & contexts.TEMPLATE_PARAM_KEY:
  1280. if (
  1281. not self._global & contexts.GL_HEADING
  1282. and self._read(-1) in ("\n", self.START)
  1283. and nxt == "="
  1284. ):
  1285. self._parse_heading()
  1286. else:
  1287. self._handle_template_param_value()
  1288. elif this == nxt == "}" and self._context & contexts.TEMPLATE:
  1289. return self._handle_template_end()
  1290. elif this == "|" and self._context & contexts.ARGUMENT_NAME:
  1291. self._handle_argument_separator()
  1292. elif this == nxt == "}" and self._context & contexts.ARGUMENT:
  1293. if self._read(2) == "}":
  1294. return self._handle_argument_end()
  1295. self._emit_text("}")
  1296. elif this == nxt == "[" and self._can_recurse():
  1297. if self._context & contexts.WIKILINK_TEXT:
  1298. self._fail_route()
  1299. if not self._context & contexts.NO_WIKILINKS:
  1300. self._parse_wikilink()
  1301. else:
  1302. self._emit_text("[")
  1303. elif this == "|" and self._context & contexts.WIKILINK_TITLE:
  1304. self._handle_wikilink_separator()
  1305. elif this == nxt == "]" and self._context & contexts.WIKILINK:
  1306. return self._handle_wikilink_end()
  1307. elif this == "[":
  1308. self._parse_external_link(True)
  1309. elif this == ":" and self._read(-1) not in self.MARKERS:
  1310. self._parse_external_link(False)
  1311. elif this == "]" and self._context & contexts.EXT_LINK_TITLE:
  1312. return self._pop()
  1313. elif (
  1314. this == "="
  1315. and not self._global & contexts.GL_HEADING
  1316. and not self._context & contexts.TEMPLATE
  1317. ):
  1318. if self._read(-1) in ("\n", self.START):
  1319. self._parse_heading()
  1320. else:
  1321. self._emit_text("=")
  1322. elif this == "=" and self._context & contexts.HEADING:
  1323. return self._handle_heading_end()
  1324. elif this == "\n" and self._context & contexts.HEADING:
  1325. self._fail_route()
  1326. elif this == "&":
  1327. self._parse_entity()
  1328. elif this == "<" and nxt == "!":
  1329. if self._read(2) == self._read(3) == "-":
  1330. self._parse_comment()
  1331. else:
  1332. self._emit_text(this)
  1333. elif this == "<" and nxt == "/" and self._read(2) is not self.END:
  1334. if self._context & contexts.TAG_BODY:
  1335. self._handle_tag_open_close()
  1336. else:
  1337. self._handle_invalid_tag_start()
  1338. elif this == "<" and not self._context & contexts.TAG_CLOSE:
  1339. if self._can_recurse():
  1340. self._parse_tag()
  1341. else:
  1342. self._emit_text("<")
  1343. elif this == ">" and self._context & contexts.TAG_CLOSE:
  1344. return self._handle_tag_close_close()
  1345. elif this == nxt == "'" and not self._skip_style_tags:
  1346. result = self._parse_style()
  1347. if result is not None:
  1348. return result
  1349. elif self._read(-1) in ("\n", self.START) and this in ("#", "*", ";", ":"):
  1350. self._handle_list()
  1351. elif self._read(-1) in ("\n", self.START) and (
  1352. this == nxt == self._read(2) == self._read(3) == "-"
  1353. ):
  1354. self._handle_hr()
  1355. elif this in ("\n", ":") and self._context & contexts.DL_TERM:
  1356. self._handle_dl_term()
  1357. if this == "\n":
  1358. # Kill potential table contexts
  1359. self._context &= ~contexts.TABLE_CELL_LINE_CONTEXTS
  1360. # Start of table parsing
  1361. elif (
  1362. this == "{"
  1363. and nxt == "|"
  1364. and (
  1365. self._read(-1) in ("\n", self.START)
  1366. or (
  1367. self._read(-2) in ("\n", self.START)
  1368. and self._read(-1).isspace()
  1369. )
  1370. )
  1371. ):
  1372. if self._can_recurse():
  1373. self._parse_table()
  1374. else:
  1375. self._emit_text("{")
  1376. elif self._context & contexts.TABLE_OPEN:
  1377. if this == nxt == "|" and self._context & contexts.TABLE_TD_LINE:
  1378. if self._context & contexts.TABLE_CELL_OPEN:
  1379. return self._handle_table_cell_end()
  1380. self._handle_table_cell("||", "td", contexts.TABLE_TD_LINE)
  1381. elif this == nxt == "|" and self._context & contexts.TABLE_TH_LINE:
  1382. if self._context & contexts.TABLE_CELL_OPEN:
  1383. return self._handle_table_cell_end()
  1384. self._handle_table_cell("||", "th", contexts.TABLE_TH_LINE)
  1385. elif this == nxt == "!" and self._context & contexts.TABLE_TH_LINE:
  1386. if self._context & contexts.TABLE_CELL_OPEN:
  1387. return self._handle_table_cell_end()
  1388. self._handle_table_cell("!!", "th", contexts.TABLE_TH_LINE)
  1389. elif this == "|" and self._context & contexts.TABLE_CELL_STYLE:
  1390. return self._handle_table_cell_end(reset_for_style=True)
  1391. # on newline, clear out cell line contexts
  1392. elif this == "\n" and self._context & contexts.TABLE_CELL_LINE_CONTEXTS:
  1393. self._context &= ~contexts.TABLE_CELL_LINE_CONTEXTS
  1394. self._emit_text(this)
  1395. elif self._read(-1) in ("\n", self.START) or (
  1396. self._read(-2) in ("\n", self.START) and self._read(-1).isspace()
  1397. ):
  1398. if this == "|" and nxt == "}":
  1399. if self._context & contexts.TABLE_CELL_OPEN:
  1400. return self._handle_table_cell_end()
  1401. if self._context & contexts.TABLE_ROW_OPEN:
  1402. return self._handle_table_row_end()
  1403. return self._handle_table_end()
  1404. if this == "|" and nxt == "-":
  1405. if self._context & contexts.TABLE_CELL_OPEN:
  1406. return self._handle_table_cell_end()
  1407. if self._context & contexts.TABLE_ROW_OPEN:
  1408. return self._handle_table_row_end()
  1409. self._handle_table_row()
  1410. elif this == "|":
  1411. if self._context & contexts.TABLE_CELL_OPEN:
  1412. return self._handle_table_cell_end()
  1413. self._handle_table_cell("|", "td", contexts.TABLE_TD_LINE)
  1414. elif this == "!":
  1415. if self._context & contexts.TABLE_CELL_OPEN:
  1416. return self._handle_table_cell_end()
  1417. self._handle_table_cell("!", "th", contexts.TABLE_TH_LINE)
  1418. else:
  1419. self._emit_text(this)
  1420. else:
  1421. self._emit_text(this)
  1422. else:
  1423. self._emit_text(this)
  1424. self._head += 1
  1425. def tokenize(self, text, context=0, skip_style_tags=False):
  1426. """Build a list of tokens from a string of wikicode and return it."""
  1427. split = self.regex.split(text)
  1428. self._text = [segment for segment in split if segment]
  1429. self._head = self._global = self._depth = 0
  1430. self._bad_routes = set()
  1431. self._skip_style_tags = skip_style_tags
  1432. try:
  1433. result = self._parse(context)
  1434. except BadRoute as exc: # pragma: no cover (untestable/exceptional case)
  1435. raise ParserError("Python tokenizer exited with BadRoute") from exc
  1436. if self._stacks: # pragma: no cover (untestable/exceptional case)
  1437. err = "Python tokenizer exited with non-empty token stack"
  1438. raise ParserError(err)
  1439. return result