A Python parser for MediaWiki wikicode https://mwparserfromhell.readthedocs.io/
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 

1134 lines
43 KiB

  1. # -*- coding: utf-8 -*-
  2. #
  3. # Copyright (C) 2012-2013 Ben Kurtovic <ben.kurtovic@verizon.net>
  4. #
  5. # Permission is hereby granted, free of charge, to any person obtaining a copy
  6. # of this software and associated documentation files (the "Software"), to deal
  7. # in the Software without restriction, including without limitation the rights
  8. # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  9. # copies of the Software, and to permit persons to whom the Software is
  10. # furnished to do so, subject to the following conditions:
  11. #
  12. # The above copyright notice and this permission notice shall be included in
  13. # all copies or substantial portions of the Software.
  14. #
  15. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16. # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17. # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  18. # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  19. # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  20. # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
  21. # SOFTWARE.
  22. from __future__ import unicode_literals
  23. from math import log
  24. import re
  25. from . import contexts, tokens
  26. from ..compat import htmlentities
  27. from ..definitions import (get_html_tag, is_parsable, is_single,
  28. is_single_only, is_scheme)
  29. __all__ = ["Tokenizer"]
  30. class BadRoute(Exception):
  31. """Raised internally when the current tokenization route is invalid."""
  32. def __init__(self, context=0):
  33. self.context = context
  34. class _TagOpenData(object):
  35. """Stores data about an HTML open tag, like ``<ref name="foo">``."""
  36. CX_NAME = 1 << 0
  37. CX_ATTR_READY = 1 << 1
  38. CX_ATTR_NAME = 1 << 2
  39. CX_ATTR_VALUE = 1 << 3
  40. CX_QUOTED = 1 << 4
  41. CX_NOTE_SPACE = 1 << 5
  42. CX_NOTE_EQUALS = 1 << 6
  43. CX_NOTE_QUOTE = 1 << 7
  44. def __init__(self):
  45. self.context = self.CX_NAME
  46. self.padding_buffer = {"first": "", "before_eq": "", "after_eq": ""}
  47. self.reset = 0
  48. class Tokenizer(object):
  49. """Creates a list of tokens from a string of wikicode."""
  50. USES_C = False
  51. START = object()
  52. END = object()
  53. MARKERS = ["{", "}", "[", "]", "<", ">", "|", "=", "&", "'", "#", "*", ";",
  54. ":", "/", "-", "\n", START, END]
  55. MAX_DEPTH = 40
  56. MAX_CYCLES = 100000
  57. regex = re.compile(r"([{}\[\]<>|=&'#*;:/\\\"\-!\n])", flags=re.IGNORECASE)
  58. tag_splitter = re.compile(r"([\s\"\\]+)")
  59. def __init__(self):
  60. self._text = None
  61. self._head = 0
  62. self._stacks = []
  63. self._global = 0
  64. self._depth = 0
  65. self._cycles = 0
  66. @property
  67. def _stack(self):
  68. """The current token stack."""
  69. return self._stacks[-1][0]
  70. @property
  71. def _context(self):
  72. """The current token context."""
  73. return self._stacks[-1][1]
  74. @_context.setter
  75. def _context(self, value):
  76. self._stacks[-1][1] = value
  77. @property
  78. def _textbuffer(self):
  79. """The current textbuffer."""
  80. return self._stacks[-1][2]
  81. @_textbuffer.setter
  82. def _textbuffer(self, value):
  83. self._stacks[-1][2] = value
  84. def _push(self, context=0):
  85. """Add a new token stack, context, and textbuffer to the list."""
  86. self._stacks.append([[], context, []])
  87. self._depth += 1
  88. self._cycles += 1
  89. def _push_textbuffer(self):
  90. """Push the textbuffer onto the stack as a Text node and clear it."""
  91. if self._textbuffer:
  92. self._stack.append(tokens.Text(text="".join(self._textbuffer)))
  93. self._textbuffer = []
  94. def _pop(self, keep_context=False):
  95. """Pop the current stack/context/textbuffer, returing the stack.
  96. If *keep_context* is ``True``, then we will replace the underlying
  97. stack's context with the current stack's.
  98. """
  99. self._push_textbuffer()
  100. self._depth -= 1
  101. if keep_context:
  102. context = self._context
  103. stack = self._stacks.pop()[0]
  104. self._context = context
  105. return stack
  106. return self._stacks.pop()[0]
  107. def _can_recurse(self):
  108. """Return whether or not our max recursion depth has been exceeded."""
  109. return self._depth < self.MAX_DEPTH and self._cycles < self.MAX_CYCLES
  110. def _fail_route(self):
  111. """Fail the current tokenization route.
  112. Discards the current stack/context/textbuffer and raises
  113. :py:exc:`~.BadRoute`.
  114. """
  115. context = self._context
  116. self._pop()
  117. raise BadRoute(context)
  118. def _emit(self, token):
  119. """Write a token to the end of the current token stack."""
  120. self._push_textbuffer()
  121. self._stack.append(token)
  122. def _emit_first(self, token):
  123. """Write a token to the beginning of the current token stack."""
  124. self._push_textbuffer()
  125. self._stack.insert(0, token)
  126. def _emit_text(self, text):
  127. """Write text to the current textbuffer."""
  128. self._textbuffer.append(text)
  129. def _emit_all(self, tokenlist):
  130. """Write a series of tokens to the current stack at once."""
  131. if tokenlist and isinstance(tokenlist[0], tokens.Text):
  132. self._emit_text(tokenlist.pop(0).text)
  133. self._push_textbuffer()
  134. self._stack.extend(tokenlist)
  135. def _emit_text_then_stack(self, text):
  136. """Pop the current stack, write *text*, and then write the stack."""
  137. stack = self._pop()
  138. self._emit_text(text)
  139. if stack:
  140. self._emit_all(stack)
  141. self._head -= 1
  142. def _read(self, delta=0, wrap=False, strict=False):
  143. """Read the value at a relative point in the wikicode.
  144. The value is read from :py:attr:`self._head <_head>` plus the value of
  145. *delta* (which can be negative). If *wrap* is ``False``, we will not
  146. allow attempts to read from the end of the string if ``self._head +
  147. delta`` is negative. If *strict* is ``True``, the route will be failed
  148. (with :py:meth:`_fail_route`) if we try to read from past the end of
  149. the string; otherwise, :py:attr:`self.END <END>` is returned. If we try
  150. to read from before the start of the string, :py:attr:`self.START
  151. <START>` is returned.
  152. """
  153. index = self._head + delta
  154. if index < 0 and (not wrap or abs(index) > len(self._text)):
  155. return self.START
  156. try:
  157. return self._text[index]
  158. except IndexError:
  159. if strict:
  160. self._fail_route()
  161. return self.END
  162. def _parse_template(self):
  163. """Parse a template at the head of the wikicode string."""
  164. reset = self._head
  165. try:
  166. template = self._parse(contexts.TEMPLATE_NAME)
  167. except BadRoute:
  168. self._head = reset
  169. raise
  170. self._emit_first(tokens.TemplateOpen())
  171. self._emit_all(template)
  172. self._emit(tokens.TemplateClose())
  173. def _parse_argument(self):
  174. """Parse an argument at the head of the wikicode string."""
  175. reset = self._head
  176. try:
  177. argument = self._parse(contexts.ARGUMENT_NAME)
  178. except BadRoute:
  179. self._head = reset
  180. raise
  181. self._emit_first(tokens.ArgumentOpen())
  182. self._emit_all(argument)
  183. self._emit(tokens.ArgumentClose())
  184. def _parse_template_or_argument(self):
  185. """Parse a template or argument at the head of the wikicode string."""
  186. self._head += 2
  187. braces = 2
  188. while self._read() == "{":
  189. self._head += 1
  190. braces += 1
  191. self._push()
  192. while braces:
  193. if braces == 1:
  194. return self._emit_text_then_stack("{")
  195. if braces == 2:
  196. try:
  197. self._parse_template()
  198. except BadRoute:
  199. return self._emit_text_then_stack("{{")
  200. break
  201. try:
  202. self._parse_argument()
  203. braces -= 3
  204. except BadRoute:
  205. try:
  206. self._parse_template()
  207. braces -= 2
  208. except BadRoute:
  209. return self._emit_text_then_stack("{" * braces)
  210. if braces:
  211. self._head += 1
  212. self._emit_all(self._pop())
  213. if self._context & contexts.FAIL_NEXT:
  214. self._context ^= contexts.FAIL_NEXT
  215. def _handle_template_param(self):
  216. """Handle a template parameter at the head of the string."""
  217. if self._context & contexts.TEMPLATE_NAME:
  218. self._context ^= contexts.TEMPLATE_NAME
  219. elif self._context & contexts.TEMPLATE_PARAM_VALUE:
  220. self._context ^= contexts.TEMPLATE_PARAM_VALUE
  221. elif self._context & contexts.TEMPLATE_PARAM_KEY:
  222. self._emit_all(self._pop(keep_context=True))
  223. self._context |= contexts.TEMPLATE_PARAM_KEY
  224. self._emit(tokens.TemplateParamSeparator())
  225. self._push(self._context)
  226. def _handle_template_param_value(self):
  227. """Handle a template parameter's value at the head of the string."""
  228. self._emit_all(self._pop(keep_context=True))
  229. self._context ^= contexts.TEMPLATE_PARAM_KEY
  230. self._context |= contexts.TEMPLATE_PARAM_VALUE
  231. self._emit(tokens.TemplateParamEquals())
  232. def _handle_template_end(self):
  233. """Handle the end of a template at the head of the string."""
  234. if self._context & contexts.TEMPLATE_PARAM_KEY:
  235. self._emit_all(self._pop(keep_context=True))
  236. self._head += 1
  237. return self._pop()
  238. def _handle_argument_separator(self):
  239. """Handle the separator between an argument's name and default."""
  240. self._context ^= contexts.ARGUMENT_NAME
  241. self._context |= contexts.ARGUMENT_DEFAULT
  242. self._emit(tokens.ArgumentSeparator())
  243. def _handle_argument_end(self):
  244. """Handle the end of an argument at the head of the string."""
  245. self._head += 2
  246. return self._pop()
  247. def _parse_wikilink(self):
  248. """Parse an internal wikilink at the head of the wikicode string."""
  249. self._head += 2
  250. reset = self._head - 1
  251. try:
  252. wikilink = self._parse(contexts.WIKILINK_TITLE)
  253. except BadRoute:
  254. self._head = reset
  255. self._emit_text("[[")
  256. else:
  257. if self._context & contexts.FAIL_NEXT:
  258. self._context ^= contexts.FAIL_NEXT
  259. self._emit(tokens.WikilinkOpen())
  260. self._emit_all(wikilink)
  261. self._emit(tokens.WikilinkClose())
  262. def _handle_wikilink_separator(self):
  263. """Handle the separator between a wikilink's title and its text."""
  264. self._context ^= contexts.WIKILINK_TITLE
  265. self._context |= contexts.WIKILINK_TEXT
  266. self._emit(tokens.WikilinkSeparator())
  267. def _handle_wikilink_end(self):
  268. """Handle the end of a wikilink at the head of the string."""
  269. self._head += 1
  270. return self._pop()
  271. def _parse_bracketed_uri_scheme(self):
  272. """Parse the URI scheme of a bracket-enclosed external link."""
  273. self._push(contexts.EXT_LINK_URI)
  274. if self._read() == self._read(1) == "/":
  275. self._emit_text("//")
  276. self._head += 2
  277. else:
  278. valid = "abcdefghijklmnopqrstuvwxyz0123456789+.-"
  279. all_valid = lambda: all(char in valid for char in self._read())
  280. scheme = ""
  281. while self._read() is not self.END and all_valid():
  282. scheme += self._read()
  283. self._emit_text(self._read())
  284. self._head += 1
  285. if self._read() != ":":
  286. self._fail_route()
  287. self._emit_text(":")
  288. self._head += 1
  289. slashes = self._read() == self._read(1) == "/"
  290. if slashes:
  291. self._emit_text("//")
  292. self._head += 2
  293. if not is_scheme(scheme, slashes):
  294. self._fail_route()
  295. def _parse_free_uri_scheme(self):
  296. """Parse the URI scheme of a free (no brackets) external link."""
  297. valid = "abcdefghijklmnopqrstuvwxyz0123456789+.-"
  298. scheme = []
  299. try:
  300. # We have to backtrack through the textbuffer looking for our
  301. # scheme since it was just parsed as text:
  302. for chunk in reversed(self._textbuffer):
  303. for char in reversed(chunk):
  304. if char.isspace() or char in self.MARKERS:
  305. raise StopIteration()
  306. if char not in valid:
  307. raise BadRoute()
  308. scheme.append(char)
  309. except StopIteration:
  310. pass
  311. scheme = "".join(reversed(scheme))
  312. slashes = self._read() == self._read(1) == "/"
  313. if not is_scheme(scheme, slashes):
  314. raise BadRoute()
  315. self._push(contexts.EXT_LINK_URI)
  316. self._emit_text(scheme)
  317. self._emit_text(":")
  318. if slashes:
  319. self._emit_text("//")
  320. self._head += 2
  321. def _handle_free_link_text(self, punct, tail, this):
  322. """Handle text in a free ext link, including trailing punctuation."""
  323. if "(" in this and ")" in punct:
  324. punct = punct[:-1] # ')' is not longer valid punctuation
  325. if this.endswith(punct):
  326. for i in reversed(range(-len(this), 0)):
  327. if i == -len(this) or this[i - 1] not in punct:
  328. break
  329. stripped = this[:i]
  330. if stripped and tail:
  331. self._emit_text(tail)
  332. tail = ""
  333. tail += this[i:]
  334. this = stripped
  335. elif tail:
  336. self._emit_text(tail)
  337. tail = ""
  338. self._emit_text(this)
  339. return punct, tail
  340. def _really_parse_external_link(self, brackets):
  341. """Really parse an external link."""
  342. if brackets:
  343. self._parse_bracketed_uri_scheme()
  344. invalid = ("\n", " ", "]")
  345. else:
  346. self._parse_free_uri_scheme()
  347. invalid = ("\n", " ", "[", "]")
  348. punct = tuple(",;\.:!?)")
  349. if self._read() is self.END or self._read()[0] in invalid:
  350. self._fail_route()
  351. tail = ""
  352. while True:
  353. this, next = self._read(), self._read(1)
  354. if this is self.END or this == "\n":
  355. if brackets:
  356. self._fail_route()
  357. return self._pop(), tail, -1
  358. elif this == next == "{" and self._can_recurse():
  359. if tail:
  360. self._emit_text(tail)
  361. tail = ""
  362. self._parse_template_or_argument()
  363. elif this == "[":
  364. if brackets:
  365. self._emit_text("[")
  366. else:
  367. return self._pop(), tail, -1
  368. elif this == "]":
  369. return self._pop(), tail, 0 if brackets else -1
  370. elif this == "&":
  371. if tail:
  372. self._emit_text(tail)
  373. tail = ""
  374. self._parse_entity()
  375. elif " " in this:
  376. before, after = this.split(" ", 1)
  377. if brackets:
  378. self._emit_text(before)
  379. self._emit(tokens.ExternalLinkSeparator())
  380. if after:
  381. self._emit_text(after)
  382. self._context ^= contexts.EXT_LINK_URI
  383. self._context |= contexts.EXT_LINK_TITLE
  384. self._head += 1
  385. return self._parse(push=False), None, 0
  386. punct, tail = self._handle_free_link_text(punct, tail, before)
  387. return self._pop(), tail + " " + after, 0
  388. elif not brackets:
  389. punct, tail = self._handle_free_link_text(punct, tail, this)
  390. else:
  391. self._emit_text(this)
  392. self._head += 1
  393. def _remove_uri_scheme_from_textbuffer(self, scheme):
  394. """Remove the URI scheme of a new external link from the textbuffer."""
  395. length = len(scheme)
  396. while length:
  397. if length < len(self._textbuffer[-1]):
  398. self._textbuffer[-1] = self._textbuffer[-1][:-length]
  399. break
  400. length -= len(self._textbuffer[-1])
  401. self._textbuffer.pop()
  402. def _parse_external_link(self, brackets):
  403. """Parse an external link at the head of the wikicode string."""
  404. reset = self._head
  405. self._head += 1
  406. try:
  407. bad_context = self._context & contexts.INVALID_LINK
  408. if bad_context or not self._can_recurse():
  409. raise BadRoute()
  410. link, extra, delta = self._really_parse_external_link(brackets)
  411. except BadRoute:
  412. self._head = reset
  413. if not brackets and self._context & contexts.DL_TERM:
  414. self._handle_dl_term()
  415. else:
  416. self._emit_text(self._read())
  417. else:
  418. if not brackets:
  419. scheme = link[0].text.split(":", 1)[0]
  420. self._remove_uri_scheme_from_textbuffer(scheme)
  421. self._emit(tokens.ExternalLinkOpen(brackets=brackets))
  422. self._emit_all(link)
  423. self._emit(tokens.ExternalLinkClose())
  424. self._head += delta
  425. if extra:
  426. self._emit_text(extra)
  427. def _parse_heading(self):
  428. """Parse a section heading at the head of the wikicode string."""
  429. self._global |= contexts.GL_HEADING
  430. reset = self._head
  431. self._head += 1
  432. best = 1
  433. while self._read() == "=":
  434. best += 1
  435. self._head += 1
  436. context = contexts.HEADING_LEVEL_1 << min(best - 1, 5)
  437. try:
  438. title, level = self._parse(context)
  439. except BadRoute:
  440. self._head = reset + best - 1
  441. self._emit_text("=" * best)
  442. else:
  443. self._emit(tokens.HeadingStart(level=level))
  444. if level < best:
  445. self._emit_text("=" * (best - level))
  446. self._emit_all(title)
  447. self._emit(tokens.HeadingEnd())
  448. finally:
  449. self._global ^= contexts.GL_HEADING
  450. def _handle_heading_end(self):
  451. """Handle the end of a section heading at the head of the string."""
  452. reset = self._head
  453. self._head += 1
  454. best = 1
  455. while self._read() == "=":
  456. best += 1
  457. self._head += 1
  458. current = int(log(self._context / contexts.HEADING_LEVEL_1, 2)) + 1
  459. level = min(current, min(best, 6))
  460. try: # Try to check for a heading closure after this one
  461. after, after_level = self._parse(self._context)
  462. except BadRoute:
  463. if level < best:
  464. self._emit_text("=" * (best - level))
  465. self._head = reset + best - 1
  466. return self._pop(), level
  467. else: # Found another closure
  468. self._emit_text("=" * best)
  469. self._emit_all(after)
  470. return self._pop(), after_level
  471. def _really_parse_entity(self):
  472. """Actually parse an HTML entity and ensure that it is valid."""
  473. self._emit(tokens.HTMLEntityStart())
  474. self._head += 1
  475. this = self._read(strict=True)
  476. if this == "#":
  477. numeric = True
  478. self._emit(tokens.HTMLEntityNumeric())
  479. self._head += 1
  480. this = self._read(strict=True)
  481. if this[0].lower() == "x":
  482. hexadecimal = True
  483. self._emit(tokens.HTMLEntityHex(char=this[0]))
  484. this = this[1:]
  485. if not this:
  486. self._fail_route()
  487. else:
  488. hexadecimal = False
  489. else:
  490. numeric = hexadecimal = False
  491. valid = "0123456789abcdefABCDEF" if hexadecimal else "0123456789"
  492. if not numeric and not hexadecimal:
  493. valid += "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
  494. if not all([char in valid for char in this]):
  495. self._fail_route()
  496. self._head += 1
  497. if self._read() != ";":
  498. self._fail_route()
  499. if numeric:
  500. test = int(this, 16) if hexadecimal else int(this)
  501. if test < 1 or test > 0x10FFFF:
  502. self._fail_route()
  503. else:
  504. if this not in htmlentities.entitydefs:
  505. self._fail_route()
  506. self._emit(tokens.Text(text=this))
  507. self._emit(tokens.HTMLEntityEnd())
  508. def _parse_entity(self):
  509. """Parse an HTML entity at the head of the wikicode string."""
  510. reset = self._head
  511. self._push()
  512. try:
  513. self._really_parse_entity()
  514. except BadRoute:
  515. self._head = reset
  516. self._emit_text(self._read())
  517. else:
  518. self._emit_all(self._pop())
  519. def _parse_comment(self):
  520. """Parse an HTML comment at the head of the wikicode string."""
  521. self._head += 4
  522. reset = self._head - 1
  523. self._push()
  524. while True:
  525. this = self._read()
  526. if this == self.END:
  527. self._pop()
  528. self._head = reset
  529. self._emit_text("<!--")
  530. return
  531. if this == self._read(1) == "-" and self._read(2) == ">":
  532. self._emit_first(tokens.CommentStart())
  533. self._emit(tokens.CommentEnd())
  534. self._emit_all(self._pop())
  535. self._head += 2
  536. return
  537. self._emit_text(this)
  538. self._head += 1
  539. def _push_tag_buffer(self, data):
  540. """Write a pending tag attribute from *data* to the stack."""
  541. if data.context & data.CX_QUOTED:
  542. self._emit_first(tokens.TagAttrQuote())
  543. self._emit_all(self._pop())
  544. buf = data.padding_buffer
  545. self._emit_first(tokens.TagAttrStart(pad_first=buf["first"],
  546. pad_before_eq=buf["before_eq"], pad_after_eq=buf["after_eq"]))
  547. self._emit_all(self._pop())
  548. data.padding_buffer = {key: "" for key in data.padding_buffer}
  549. def _handle_tag_space(self, data, text):
  550. """Handle whitespace (*text*) inside of an HTML open tag."""
  551. ctx = data.context
  552. end_of_value = ctx & data.CX_ATTR_VALUE and not ctx & (data.CX_QUOTED | data.CX_NOTE_QUOTE)
  553. if end_of_value or (ctx & data.CX_QUOTED and ctx & data.CX_NOTE_SPACE):
  554. self._push_tag_buffer(data)
  555. data.context = data.CX_ATTR_READY
  556. elif ctx & data.CX_NOTE_SPACE:
  557. data.context = data.CX_ATTR_READY
  558. elif ctx & data.CX_ATTR_NAME:
  559. data.context |= data.CX_NOTE_EQUALS
  560. data.padding_buffer["before_eq"] += text
  561. if ctx & data.CX_QUOTED and not ctx & data.CX_NOTE_SPACE:
  562. self._emit_text(text)
  563. elif data.context & data.CX_ATTR_READY:
  564. data.padding_buffer["first"] += text
  565. elif data.context & data.CX_ATTR_VALUE:
  566. data.padding_buffer["after_eq"] += text
  567. def _handle_tag_text(self, text):
  568. """Handle regular *text* inside of an HTML open tag."""
  569. next = self._read(1)
  570. if not self._can_recurse() or text not in self.MARKERS:
  571. self._emit_text(text)
  572. elif text == next == "{":
  573. self._parse_template_or_argument()
  574. elif text == next == "[":
  575. self._parse_wikilink()
  576. elif text == "<":
  577. self._parse_tag()
  578. else:
  579. self._emit_text(text)
  580. def _handle_tag_data(self, data, text):
  581. """Handle all sorts of *text* data inside of an HTML open tag."""
  582. for chunk in self.tag_splitter.split(text):
  583. if not chunk:
  584. continue
  585. if data.context & data.CX_NAME:
  586. if chunk in self.MARKERS or chunk.isspace():
  587. self._fail_route() # Tags must start with text, not spaces
  588. data.context = data.CX_NOTE_SPACE
  589. elif chunk.isspace():
  590. self._handle_tag_space(data, chunk)
  591. continue
  592. elif data.context & data.CX_NOTE_SPACE:
  593. if data.context & data.CX_QUOTED:
  594. data.context = data.CX_ATTR_VALUE
  595. self._pop()
  596. self._head = data.reset - 1 # Will be auto-incremented
  597. return # Break early
  598. self._fail_route()
  599. elif data.context & data.CX_ATTR_READY:
  600. data.context = data.CX_ATTR_NAME
  601. self._push(contexts.TAG_ATTR)
  602. elif data.context & data.CX_ATTR_NAME:
  603. if chunk == "=":
  604. data.context = data.CX_ATTR_VALUE | data.CX_NOTE_QUOTE
  605. self._emit(tokens.TagAttrEquals())
  606. continue
  607. if data.context & data.CX_NOTE_EQUALS:
  608. self._push_tag_buffer(data)
  609. data.context = data.CX_ATTR_NAME
  610. self._push(contexts.TAG_ATTR)
  611. elif data.context & data.CX_ATTR_VALUE:
  612. escaped = self._read(-1) == "\\" and self._read(-2) != "\\"
  613. if data.context & data.CX_NOTE_QUOTE:
  614. data.context ^= data.CX_NOTE_QUOTE
  615. if chunk == '"' and not escaped:
  616. data.context |= data.CX_QUOTED
  617. self._push(self._context)
  618. data.reset = self._head
  619. continue
  620. elif data.context & data.CX_QUOTED:
  621. if chunk == '"' and not escaped:
  622. data.context |= data.CX_NOTE_SPACE
  623. continue
  624. self._handle_tag_text(chunk)
  625. def _handle_tag_close_open(self, data, token):
  626. """Handle the closing of a open tag (``<foo>``)."""
  627. if data.context & (data.CX_ATTR_NAME | data.CX_ATTR_VALUE):
  628. self._push_tag_buffer(data)
  629. self._emit(token(padding=data.padding_buffer["first"]))
  630. self._head += 1
  631. def _handle_tag_open_close(self):
  632. """Handle the opening of a closing tag (``</foo>``)."""
  633. self._emit(tokens.TagOpenClose())
  634. self._push(contexts.TAG_CLOSE)
  635. self._head += 1
  636. def _handle_tag_close_close(self):
  637. """Handle the ending of a closing tag (``</foo>``)."""
  638. strip = lambda tok: tok.text.rstrip().lower()
  639. closing = self._pop()
  640. if len(closing) != 1 or (not isinstance(closing[0], tokens.Text) or
  641. strip(closing[0]) != strip(self._stack[1])):
  642. self._fail_route()
  643. self._emit_all(closing)
  644. self._emit(tokens.TagCloseClose())
  645. return self._pop()
  646. def _handle_blacklisted_tag(self):
  647. """Handle the body of an HTML tag that is parser-blacklisted."""
  648. while True:
  649. this, next = self._read(), self._read(1)
  650. if this is self.END:
  651. self._fail_route()
  652. elif this == "<" and next == "/":
  653. self._handle_tag_open_close()
  654. self._head += 1
  655. return self._parse(push=False)
  656. elif this == "&":
  657. self._parse_entity()
  658. else:
  659. self._emit_text(this)
  660. self._head += 1
  661. def _handle_single_only_tag_end(self):
  662. """Handle the end of an implicitly closing single-only HTML tag."""
  663. padding = self._stack.pop().padding
  664. self._emit(tokens.TagCloseSelfclose(padding=padding, implicit=True))
  665. self._head -= 1 # Offset displacement done by _handle_tag_close_open
  666. return self._pop()
  667. def _handle_single_tag_end(self):
  668. """Handle the stream end when inside a single-supporting HTML tag."""
  669. gen = enumerate(self._stack)
  670. index = next(i for i, t in gen if isinstance(t, tokens.TagCloseOpen))
  671. padding = self._stack[index].padding
  672. token = tokens.TagCloseSelfclose(padding=padding, implicit=True)
  673. self._stack[index] = token
  674. return self._pop()
  675. def _really_parse_tag(self):
  676. """Actually parse an HTML tag, starting with the open (``<foo>``)."""
  677. data = _TagOpenData()
  678. self._push(contexts.TAG_OPEN)
  679. self._emit(tokens.TagOpenOpen())
  680. while True:
  681. this, next = self._read(), self._read(1)
  682. can_exit = (not data.context & (data.CX_QUOTED | data.CX_NAME) or
  683. data.context & data.CX_NOTE_SPACE)
  684. if this is self.END:
  685. if self._context & contexts.TAG_ATTR:
  686. if data.context & data.CX_QUOTED:
  687. # Unclosed attribute quote: reset, don't die
  688. data.context = data.CX_ATTR_VALUE
  689. self._pop()
  690. self._head = data.reset
  691. continue
  692. self._pop()
  693. self._fail_route()
  694. elif this == ">" and can_exit:
  695. self._handle_tag_close_open(data, tokens.TagCloseOpen)
  696. self._context = contexts.TAG_BODY
  697. if is_single_only(self._stack[1].text):
  698. return self._handle_single_only_tag_end()
  699. if is_parsable(self._stack[1].text):
  700. return self._parse(push=False)
  701. return self._handle_blacklisted_tag()
  702. elif this == "/" and next == ">" and can_exit:
  703. self._handle_tag_close_open(data, tokens.TagCloseSelfclose)
  704. return self._pop()
  705. else:
  706. self._handle_tag_data(data, this)
  707. self._head += 1
  708. def _handle_invalid_tag_start(self):
  709. """Handle the (possible) start of an implicitly closing single tag."""
  710. reset = self._head + 1
  711. self._head += 2
  712. try:
  713. if not is_single_only(self.tag_splitter.split(self._read())[0]):
  714. raise BadRoute()
  715. tag = self._really_parse_tag()
  716. except BadRoute:
  717. self._head = reset
  718. self._emit_text("</")
  719. else:
  720. tag[0].invalid = True # Set flag of TagOpenOpen
  721. self._emit_all(tag)
  722. def _parse_tag(self):
  723. """Parse an HTML tag at the head of the wikicode string."""
  724. reset = self._head
  725. self._head += 1
  726. try:
  727. tag = self._really_parse_tag()
  728. except BadRoute:
  729. self._head = reset
  730. self._emit_text("<")
  731. else:
  732. self._emit_all(tag)
  733. def _emit_style_tag(self, tag, markup, body):
  734. """Write the body of a tag and the tokens that should surround it."""
  735. self._emit(tokens.TagOpenOpen(wiki_markup=markup))
  736. self._emit_text(tag)
  737. self._emit(tokens.TagCloseOpen())
  738. self._emit_all(body)
  739. self._emit(tokens.TagOpenClose())
  740. self._emit_text(tag)
  741. self._emit(tokens.TagCloseClose())
  742. def _parse_italics(self):
  743. """Parse wiki-style italics."""
  744. reset = self._head
  745. try:
  746. stack = self._parse(contexts.STYLE_ITALICS)
  747. except BadRoute as route:
  748. self._head = reset
  749. if route.context & contexts.STYLE_PASS_AGAIN:
  750. stack = self._parse(route.context | contexts.STYLE_SECOND_PASS)
  751. else:
  752. return self._emit_text("''")
  753. self._emit_style_tag("i", "''", stack)
  754. def _parse_bold(self):
  755. """Parse wiki-style bold."""
  756. reset = self._head
  757. try:
  758. stack = self._parse(contexts.STYLE_BOLD)
  759. except BadRoute:
  760. self._head = reset
  761. if self._context & contexts.STYLE_SECOND_PASS:
  762. self._emit_text("'")
  763. return True
  764. elif self._context & contexts.STYLE_ITALICS:
  765. self._context |= contexts.STYLE_PASS_AGAIN
  766. self._emit_text("'''")
  767. else:
  768. self._emit_text("'")
  769. self._parse_italics()
  770. else:
  771. self._emit_style_tag("b", "'''", stack)
  772. def _parse_italics_and_bold(self):
  773. """Parse wiki-style italics and bold together (i.e., five ticks)."""
  774. reset = self._head
  775. try:
  776. stack = self._parse(contexts.STYLE_BOLD)
  777. except BadRoute:
  778. self._head = reset
  779. try:
  780. stack = self._parse(contexts.STYLE_ITALICS)
  781. except BadRoute:
  782. self._head = reset
  783. self._emit_text("'''''")
  784. else:
  785. reset = self._head
  786. try:
  787. stack2 = self._parse(contexts.STYLE_BOLD)
  788. except BadRoute:
  789. self._head = reset
  790. self._emit_text("'''")
  791. self._emit_style_tag("i", "''", stack)
  792. else:
  793. self._push()
  794. self._emit_style_tag("i", "''", stack)
  795. self._emit_all(stack2)
  796. self._emit_style_tag("b", "'''", self._pop())
  797. else:
  798. reset = self._head
  799. try:
  800. stack2 = self._parse(contexts.STYLE_ITALICS)
  801. except BadRoute:
  802. self._head = reset
  803. self._emit_text("''")
  804. self._emit_style_tag("b", "'''", stack)
  805. else:
  806. self._push()
  807. self._emit_style_tag("b", "'''", stack)
  808. self._emit_all(stack2)
  809. self._emit_style_tag("i", "''", self._pop())
  810. def _parse_style(self):
  811. """Parse wiki-style formatting (``''``/``'''`` for italics/bold)."""
  812. self._head += 2
  813. ticks = 2
  814. while self._read() == "'":
  815. self._head += 1
  816. ticks += 1
  817. italics = self._context & contexts.STYLE_ITALICS
  818. bold = self._context & contexts.STYLE_BOLD
  819. if ticks > 5:
  820. self._emit_text("'" * (ticks - 5))
  821. ticks = 5
  822. elif ticks == 4:
  823. self._emit_text("'")
  824. ticks = 3
  825. if (italics and ticks in (2, 5)) or (bold and ticks in (3, 5)):
  826. if ticks == 5:
  827. self._head -= 3 if italics else 2
  828. return self._pop()
  829. elif not self._can_recurse():
  830. if ticks == 3:
  831. if self._context & contexts.STYLE_SECOND_PASS:
  832. self._emit_text("'")
  833. return self._pop()
  834. self._context |= contexts.STYLE_PASS_AGAIN
  835. self._emit_text("'" * ticks)
  836. elif ticks == 2:
  837. self._parse_italics()
  838. elif ticks == 3:
  839. if self._parse_bold():
  840. return self._pop()
  841. elif ticks == 5:
  842. self._parse_italics_and_bold()
  843. self._head -= 1
  844. def _handle_list_marker(self):
  845. """Handle a list marker at the head (``#``, ``*``, ``;``, ``:``)."""
  846. markup = self._read()
  847. if markup == ";":
  848. self._context |= contexts.DL_TERM
  849. self._emit(tokens.TagOpenOpen(wiki_markup=markup))
  850. self._emit_text(get_html_tag(markup))
  851. self._emit(tokens.TagCloseSelfclose())
  852. def _handle_list(self):
  853. """Handle a wiki-style list (``#``, ``*``, ``;``, ``:``)."""
  854. self._handle_list_marker()
  855. while self._read(1) in ("#", "*", ";", ":"):
  856. self._head += 1
  857. self._handle_list_marker()
  858. def _handle_hr(self):
  859. """Handle a wiki-style horizontal rule (``----``) in the string."""
  860. length = 4
  861. self._head += 3
  862. while self._read(1) == "-":
  863. length += 1
  864. self._head += 1
  865. self._emit(tokens.TagOpenOpen(wiki_markup="-" * length))
  866. self._emit_text("hr")
  867. self._emit(tokens.TagCloseSelfclose())
  868. def _handle_dl_term(self):
  869. """Handle the term in a description list (``foo`` in ``;foo:bar``)."""
  870. self._context ^= contexts.DL_TERM
  871. if self._read() == ":":
  872. self._handle_list_marker()
  873. else:
  874. self._emit_text("\n")
  875. def _handle_end(self):
  876. """Handle the end of the stream of wikitext."""
  877. if self._context & contexts.FAIL:
  878. if self._context & contexts.TAG_BODY:
  879. if is_single(self._stack[1].text):
  880. return self._handle_single_tag_end()
  881. if self._context & contexts.DOUBLE:
  882. self._pop()
  883. self._fail_route()
  884. return self._pop()
  885. def _verify_safe(self, this):
  886. """Make sure we are not trying to write an invalid character."""
  887. context = self._context
  888. if context & contexts.FAIL_NEXT:
  889. return False
  890. if context & contexts.WIKILINK:
  891. if context & contexts.WIKILINK_TEXT:
  892. return not (this == self._read(1) == "[")
  893. elif this == "]" or this == "{":
  894. self._context |= contexts.FAIL_NEXT
  895. elif this == "\n" or this == "[" or this == "}":
  896. return False
  897. return True
  898. elif context & contexts.EXT_LINK_TITLE:
  899. return this != "\n"
  900. elif context & contexts.TEMPLATE_NAME:
  901. if this == "{" or this == "}" or this == "[":
  902. self._context |= contexts.FAIL_NEXT
  903. return True
  904. if this == "]":
  905. return False
  906. if this == "|":
  907. return True
  908. if context & contexts.HAS_TEXT:
  909. if context & contexts.FAIL_ON_TEXT:
  910. if this is self.END or not this.isspace():
  911. return False
  912. else:
  913. if this == "\n":
  914. self._context |= contexts.FAIL_ON_TEXT
  915. elif this is self.END or not this.isspace():
  916. self._context |= contexts.HAS_TEXT
  917. return True
  918. elif context & contexts.TAG_CLOSE:
  919. return this != "<"
  920. else:
  921. if context & contexts.FAIL_ON_EQUALS:
  922. if this == "=":
  923. return False
  924. elif context & contexts.FAIL_ON_LBRACE:
  925. if this == "{" or (self._read(-1) == self._read(-2) == "{"):
  926. if context & contexts.TEMPLATE:
  927. self._context |= contexts.FAIL_ON_EQUALS
  928. else:
  929. self._context |= contexts.FAIL_NEXT
  930. return True
  931. self._context ^= contexts.FAIL_ON_LBRACE
  932. elif context & contexts.FAIL_ON_RBRACE:
  933. if this == "}":
  934. if context & contexts.TEMPLATE:
  935. self._context |= contexts.FAIL_ON_EQUALS
  936. else:
  937. self._context |= contexts.FAIL_NEXT
  938. return True
  939. self._context ^= contexts.FAIL_ON_RBRACE
  940. elif this == "{":
  941. self._context |= contexts.FAIL_ON_LBRACE
  942. elif this == "}":
  943. self._context |= contexts.FAIL_ON_RBRACE
  944. return True
  945. def _parse(self, context=0, push=True):
  946. """Parse the wikicode string, using *context* for when to stop."""
  947. if push:
  948. self._push(context)
  949. while True:
  950. this = self._read()
  951. if self._context & contexts.UNSAFE:
  952. if not self._verify_safe(this):
  953. if self._context & contexts.DOUBLE:
  954. self._pop()
  955. self._fail_route()
  956. if this not in self.MARKERS:
  957. self._emit_text(this)
  958. self._head += 1
  959. continue
  960. if this is self.END:
  961. return self._handle_end()
  962. next = self._read(1)
  963. if this == next == "{":
  964. if self._can_recurse():
  965. self._parse_template_or_argument()
  966. else:
  967. self._emit_text("{")
  968. elif this == "|" and self._context & contexts.TEMPLATE:
  969. self._handle_template_param()
  970. elif this == "=" and self._context & contexts.TEMPLATE_PARAM_KEY:
  971. self._handle_template_param_value()
  972. elif this == next == "}" and self._context & contexts.TEMPLATE:
  973. return self._handle_template_end()
  974. elif this == "|" and self._context & contexts.ARGUMENT_NAME:
  975. self._handle_argument_separator()
  976. elif this == next == "}" and self._context & contexts.ARGUMENT:
  977. if self._read(2) == "}":
  978. return self._handle_argument_end()
  979. else:
  980. self._emit_text("}")
  981. elif this == next == "[" and self._can_recurse():
  982. if not self._context & contexts.INVALID_LINK:
  983. self._parse_wikilink()
  984. else:
  985. self._emit_text("[")
  986. elif this == "|" and self._context & contexts.WIKILINK_TITLE:
  987. self._handle_wikilink_separator()
  988. elif this == next == "]" and self._context & contexts.WIKILINK:
  989. return self._handle_wikilink_end()
  990. elif this == "[":
  991. self._parse_external_link(True)
  992. elif this == ":" and self._read(-1) not in self.MARKERS:
  993. self._parse_external_link(False)
  994. elif this == "]" and self._context & contexts.EXT_LINK_TITLE:
  995. return self._pop()
  996. elif this == "=" and not self._global & contexts.GL_HEADING:
  997. if self._read(-1) in ("\n", self.START):
  998. self._parse_heading()
  999. else:
  1000. self._emit_text("=")
  1001. elif this == "=" and self._context & contexts.HEADING:
  1002. return self._handle_heading_end()
  1003. elif this == "\n" and self._context & contexts.HEADING:
  1004. self._fail_route()
  1005. elif this == "&":
  1006. self._parse_entity()
  1007. elif this == "<" and next == "!":
  1008. if self._read(2) == self._read(3) == "-":
  1009. self._parse_comment()
  1010. else:
  1011. self._emit_text(this)
  1012. elif this == "<" and next == "/" and self._read(2) is not self.END:
  1013. if self._context & contexts.TAG_BODY:
  1014. self._handle_tag_open_close()
  1015. else:
  1016. self._handle_invalid_tag_start()
  1017. elif this == "<" and not self._context & contexts.TAG_CLOSE:
  1018. if self._can_recurse():
  1019. self._parse_tag()
  1020. else:
  1021. self._emit_text("<")
  1022. elif this == ">" and self._context & contexts.TAG_CLOSE:
  1023. return self._handle_tag_close_close()
  1024. elif this == next == "'":
  1025. result = self._parse_style()
  1026. if result is not None:
  1027. return result
  1028. elif self._read(-1) in ("\n", self.START):
  1029. if this in ("#", "*", ";", ":"):
  1030. self._handle_list()
  1031. elif this == next == self._read(2) == self._read(3) == "-":
  1032. self._handle_hr()
  1033. else:
  1034. self._emit_text(this)
  1035. elif this in ("\n", ":") and self._context & contexts.DL_TERM:
  1036. self._handle_dl_term()
  1037. else:
  1038. self._emit_text(this)
  1039. self._head += 1
  1040. def tokenize(self, text, context=0):
  1041. """Build a list of tokens from a string of wikicode and return it."""
  1042. split = self.regex.split(text)
  1043. self._text = [segment for segment in split if segment]
  1044. self._head = self._global = self._depth = self._cycles = 0
  1045. return self._parse(context)