A Python parser for MediaWiki wikicode https://mwparserfromhell.readthedocs.io/
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 

496 lines
18 KiB

  1. # -*- coding: utf-8 -*-
  2. #
  3. # Copyright (C) 2012-2013 Ben Kurtovic <ben.kurtovic@verizon.net>
  4. #
  5. # Permission is hereby granted, free of charge, to any person obtaining a copy
  6. # of this software and associated documentation files (the "Software"), to deal
  7. # in the Software without restriction, including without limitation the rights
  8. # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  9. # copies of the Software, and to permit persons to whom the Software is
  10. # furnished to do so, subject to the following conditions:
  11. #
  12. # The above copyright notice and this permission notice shall be included in
  13. # all copies or substantial portions of the Software.
  14. #
  15. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16. # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17. # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  18. # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  19. # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  20. # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
  21. # SOFTWARE.
  22. from __future__ import unicode_literals
  23. from math import log
  24. import re
  25. import string
  26. from . import contexts
  27. from . import tokens
  28. from ..compat import htmlentities
  29. __all__ = ["Tokenizer"]
  30. class BadRoute(Exception):
  31. """Raised internally when the current tokenization route is invalid."""
  32. pass
  33. class Tokenizer(object):
  34. """Creates a list of tokens from a string of wikicode."""
  35. START = object()
  36. END = object()
  37. MARKERS = ["{", "}", "[", "]", "<", ">", "|", "=", "&", "#", "*", ";", ":",
  38. "/", "-", "!", "\n", END]
  39. regex = re.compile(r"([{}\[\]<>|=&#*;:/\-!\n])", flags=re.IGNORECASE)
  40. def __init__(self):
  41. self._text = None
  42. self._head = 0
  43. self._stacks = []
  44. self._global = 0
  45. @property
  46. def _stack(self):
  47. """The current token stack."""
  48. return self._stacks[-1][0]
  49. @property
  50. def _context(self):
  51. """The current token context."""
  52. return self._stacks[-1][1]
  53. @_context.setter
  54. def _context(self, value):
  55. self._stacks[-1][1] = value
  56. @property
  57. def _textbuffer(self):
  58. """The current textbuffer."""
  59. return self._stacks[-1][2]
  60. @_textbuffer.setter
  61. def _textbuffer(self, value):
  62. self._stacks[-1][2] = value
  63. def _push(self, context=0):
  64. """Add a new token stack, context, and textbuffer to the list."""
  65. self._stacks.append([[], context, []])
  66. def _push_textbuffer(self):
  67. """Push the textbuffer onto the stack as a Text node and clear it."""
  68. if self._textbuffer:
  69. self._stack.append(tokens.Text(text="".join(self._textbuffer)))
  70. self._textbuffer = []
  71. def _pop(self, keep_context=False):
  72. """Pop the current stack/context/textbuffer, returing the stack.
  73. If *keep_context* is ``True``, then we will replace the underlying
  74. stack's context with the current stack's.
  75. """
  76. self._push_textbuffer()
  77. if keep_context:
  78. context = self._context
  79. stack = self._stacks.pop()[0]
  80. self._context = context
  81. return stack
  82. return self._stacks.pop()[0]
  83. def _fail_route(self):
  84. """Fail the current tokenization route.
  85. Discards the current stack/context/textbuffer and raises
  86. :py:exc:`~.BadRoute`.
  87. """
  88. self._pop()
  89. raise BadRoute()
  90. def _write(self, token):
  91. """Write a token to the end of the current token stack."""
  92. self._push_textbuffer()
  93. self._stack.append(token)
  94. def _write_first(self, token):
  95. """Write a token to the beginning of the current token stack."""
  96. self._push_textbuffer()
  97. self._stack.insert(0, token)
  98. def _write_text(self, text):
  99. """Write text to the current textbuffer."""
  100. self._textbuffer.append(text)
  101. def _write_all(self, tokenlist):
  102. """Write a series of tokens to the current stack at once."""
  103. if tokenlist and isinstance(tokenlist[0], tokens.Text):
  104. self._write_text(tokenlist.pop(0).text)
  105. self._push_textbuffer()
  106. self._stack.extend(tokenlist)
  107. def _write_text_then_stack(self, text):
  108. """Pop the current stack, write *text*, and then write the stack."""
  109. stack = self._pop()
  110. self._write_text(text)
  111. if stack:
  112. self._write_all(stack)
  113. self._head -= 1
  114. def _read(self, delta=0, wrap=False, strict=False):
  115. """Read the value at a relative point in the wikicode.
  116. The value is read from :py:attr:`self._head <_head>` plus the value of
  117. *delta* (which can be negative). If *wrap* is ``False``, we will not
  118. allow attempts to read from the end of the string if ``self._head +
  119. delta`` is negative. If *strict* is ``True``, the route will be failed
  120. (with :py:meth:`_fail_route`) if we try to read from past the end of
  121. the string; otherwise, :py:attr:`self.END <END>` is returned. If we try
  122. to read from before the start of the string, :py:attr:`self.START
  123. <START>` is returned.
  124. """
  125. index = self._head + delta
  126. if index < 0 and (not wrap or abs(index) > len(self._text)):
  127. return self.START
  128. try:
  129. return self._text[index]
  130. except IndexError:
  131. if strict:
  132. self._fail_route()
  133. return self.END
  134. def _parse_template_or_argument(self):
  135. """Parse a template or argument at the head of the wikicode string."""
  136. self._head += 2
  137. braces = 2
  138. while self._read() == "{":
  139. self._head += 1
  140. braces += 1
  141. self._push()
  142. while braces:
  143. if braces == 1:
  144. return self._write_text_then_stack("{")
  145. if braces == 2:
  146. try:
  147. self._parse_template()
  148. except BadRoute:
  149. return self._write_text_then_stack("{{")
  150. break
  151. try:
  152. self._parse_argument()
  153. braces -= 3
  154. except BadRoute:
  155. try:
  156. self._parse_template()
  157. braces -= 2
  158. except BadRoute:
  159. return self._write_text_then_stack("{" * braces)
  160. if braces:
  161. self._head += 1
  162. self._write_all(self._pop())
  163. def _parse_template(self):
  164. """Parse a template at the head of the wikicode string."""
  165. reset = self._head
  166. try:
  167. template = self._parse(contexts.TEMPLATE_NAME)
  168. except BadRoute:
  169. self._head = reset
  170. raise
  171. self._write_first(tokens.TemplateOpen())
  172. self._write_all(template)
  173. self._write(tokens.TemplateClose())
  174. def _parse_argument(self):
  175. """Parse an argument at the head of the wikicode string."""
  176. reset = self._head
  177. try:
  178. argument = self._parse(contexts.ARGUMENT_NAME)
  179. except BadRoute:
  180. self._head = reset
  181. raise
  182. self._write_first(tokens.ArgumentOpen())
  183. self._write_all(argument)
  184. self._write(tokens.ArgumentClose())
  185. def _verify_safe(self, unsafes):
  186. """Verify that there are no unsafe characters in the current stack.
  187. The route will be failed if the name contains any element of *unsafes*
  188. in it (not merely at the beginning or end). This is used when parsing a
  189. template name or parameter key, which cannot contain newlines.
  190. """
  191. self._push_textbuffer()
  192. if self._stack:
  193. text = [tok for tok in self._stack if isinstance(tok, tokens.Text)]
  194. text = "".join([token.text for token in text]).strip()
  195. if text and any([unsafe in text for unsafe in unsafes]):
  196. self._fail_route()
  197. def _handle_template_param(self):
  198. """Handle a template parameter at the head of the string."""
  199. if self._context & contexts.TEMPLATE_NAME:
  200. self._verify_safe(["\n", "{", "}", "[", "]"])
  201. self._context ^= contexts.TEMPLATE_NAME
  202. elif self._context & contexts.TEMPLATE_PARAM_VALUE:
  203. self._context ^= contexts.TEMPLATE_PARAM_VALUE
  204. elif self._context & contexts.TEMPLATE_PARAM_KEY:
  205. self._write_all(self._pop(keep_context=True))
  206. self._context |= contexts.TEMPLATE_PARAM_KEY
  207. self._write(tokens.TemplateParamSeparator())
  208. self._push(self._context)
  209. def _handle_template_param_value(self):
  210. """Handle a template parameter's value at the head of the string."""
  211. try:
  212. self._verify_safe(["\n", "{{", "}}"])
  213. except BadRoute:
  214. self._pop()
  215. raise
  216. self._write_all(self._pop(keep_context=True))
  217. self._context ^= contexts.TEMPLATE_PARAM_KEY
  218. self._context |= contexts.TEMPLATE_PARAM_VALUE
  219. self._write(tokens.TemplateParamEquals())
  220. def _handle_template_end(self):
  221. """Handle the end of a template at the head of the string."""
  222. if self._context & contexts.TEMPLATE_NAME:
  223. self._verify_safe(["\n", "{", "}", "[", "]"])
  224. elif self._context & contexts.TEMPLATE_PARAM_KEY:
  225. self._write_all(self._pop(keep_context=True))
  226. self._head += 1
  227. return self._pop()
  228. def _handle_argument_separator(self):
  229. """Handle the separator between an argument's name and default."""
  230. self._verify_safe(["\n", "{{", "}}"])
  231. self._context ^= contexts.ARGUMENT_NAME
  232. self._context |= contexts.ARGUMENT_DEFAULT
  233. self._write(tokens.ArgumentSeparator())
  234. def _handle_argument_end(self):
  235. """Handle the end of an argument at the head of the string."""
  236. if self._context & contexts.ARGUMENT_NAME:
  237. self._verify_safe(["\n", "{{", "}}"])
  238. self._head += 2
  239. return self._pop()
  240. def _parse_wikilink(self):
  241. """Parse an internal wikilink at the head of the wikicode string."""
  242. self._head += 2
  243. reset = self._head - 1
  244. try:
  245. wikilink = self._parse(contexts.WIKILINK_TITLE)
  246. except BadRoute:
  247. self._head = reset
  248. self._write_text("[[")
  249. else:
  250. self._write(tokens.WikilinkOpen())
  251. self._write_all(wikilink)
  252. self._write(tokens.WikilinkClose())
  253. def _handle_wikilink_separator(self):
  254. """Handle the separator between a wikilink's title and its text."""
  255. self._verify_safe(["\n", "{", "}", "[", "]"])
  256. self._context ^= contexts.WIKILINK_TITLE
  257. self._context |= contexts.WIKILINK_TEXT
  258. self._write(tokens.WikilinkSeparator())
  259. def _handle_wikilink_end(self):
  260. """Handle the end of a wikilink at the head of the string."""
  261. if self._context & contexts.WIKILINK_TITLE:
  262. self._verify_safe(["\n", "{", "}", "[", "]"])
  263. self._head += 1
  264. return self._pop()
  265. def _parse_heading(self):
  266. """Parse a section heading at the head of the wikicode string."""
  267. self._global |= contexts.GL_HEADING
  268. reset = self._head
  269. self._head += 1
  270. best = 1
  271. while self._read() == "=":
  272. best += 1
  273. self._head += 1
  274. context = contexts.HEADING_LEVEL_1 << min(best - 1, 5)
  275. try:
  276. title, level = self._parse(context)
  277. except BadRoute:
  278. self._head = reset + best - 1
  279. self._write_text("=" * best)
  280. else:
  281. self._write(tokens.HeadingStart(level=level))
  282. if level < best:
  283. self._write_text("=" * (best - level))
  284. self._write_all(title)
  285. self._write(tokens.HeadingEnd())
  286. finally:
  287. self._global ^= contexts.GL_HEADING
  288. def _handle_heading_end(self):
  289. """Handle the end of a section heading at the head of the string."""
  290. reset = self._head
  291. self._head += 1
  292. best = 1
  293. while self._read() == "=":
  294. best += 1
  295. self._head += 1
  296. current = int(log(self._context / contexts.HEADING_LEVEL_1, 2)) + 1
  297. level = min(current, min(best, 6))
  298. try: # Try to check for a heading closure after this one
  299. after, after_level = self._parse(self._context)
  300. except BadRoute:
  301. if level < best:
  302. self._write_text("=" * (best - level))
  303. self._head = reset + best - 1
  304. return self._pop(), level
  305. else: # Found another closure
  306. self._write_text("=" * best)
  307. self._write_all(after)
  308. return self._pop(), after_level
  309. def _really_parse_entity(self):
  310. """Actually parse an HTML entity and ensure that it is valid."""
  311. self._write(tokens.HTMLEntityStart())
  312. self._head += 1
  313. this = self._read(strict=True)
  314. if this == "#":
  315. numeric = True
  316. self._write(tokens.HTMLEntityNumeric())
  317. self._head += 1
  318. this = self._read(strict=True)
  319. if this[0].lower() == "x":
  320. hexadecimal = True
  321. self._write(tokens.HTMLEntityHex(char=this[0]))
  322. this = this[1:]
  323. if not this:
  324. self._fail_route()
  325. else:
  326. hexadecimal = False
  327. else:
  328. numeric = hexadecimal = False
  329. valid = string.hexdigits if hexadecimal else string.digits
  330. if not numeric and not hexadecimal:
  331. valid += string.ascii_letters
  332. if not all([char in valid for char in this]):
  333. self._fail_route()
  334. self._head += 1
  335. if self._read() != ";":
  336. self._fail_route()
  337. if numeric:
  338. test = int(this, 16) if hexadecimal else int(this)
  339. if test < 1 or test > 0x10FFFF:
  340. self._fail_route()
  341. else:
  342. if this not in htmlentities.entitydefs:
  343. self._fail_route()
  344. self._write(tokens.Text(text=this))
  345. self._write(tokens.HTMLEntityEnd())
  346. def _parse_entity(self):
  347. """Parse an HTML entity at the head of the wikicode string."""
  348. reset = self._head
  349. self._push()
  350. try:
  351. self._really_parse_entity()
  352. except BadRoute:
  353. self._head = reset
  354. self._write_text(self._read())
  355. else:
  356. self._write_all(self._pop())
  357. def _parse_comment(self):
  358. """Parse an HTML comment at the head of the wikicode string."""
  359. self._head += 4
  360. reset = self._head - 1
  361. try:
  362. comment = self._parse(contexts.COMMENT)
  363. except BadRoute:
  364. self._head = reset
  365. self._write_text("<!--")
  366. else:
  367. self._write(tokens.CommentStart())
  368. self._write_all(comment)
  369. self._write(tokens.CommentEnd())
  370. self._head += 2
  371. def _parse(self, context=0):
  372. """Parse the wikicode string, using *context* for when to stop."""
  373. self._push(context)
  374. while True:
  375. this = self._read()
  376. if this not in self.MARKERS:
  377. self._write_text(this)
  378. self._head += 1
  379. continue
  380. if this is self.END:
  381. fail = (contexts.TEMPLATE | contexts.ARGUMENT |
  382. contexts.WIKILINK | contexts.HEADING |
  383. contexts.COMMENT)
  384. if self._context & contexts.TEMPLATE_PARAM_KEY:
  385. self._pop()
  386. if self._context & fail:
  387. self._fail_route()
  388. return self._pop()
  389. next = self._read(1)
  390. if self._context & contexts.COMMENT:
  391. if this == next == "-" and self._read(2) == ">":
  392. return self._pop()
  393. else:
  394. self._write_text(this)
  395. elif this == next == "{":
  396. self._parse_template_or_argument()
  397. elif this == "|" and self._context & contexts.TEMPLATE:
  398. self._handle_template_param()
  399. elif this == "=" and self._context & contexts.TEMPLATE_PARAM_KEY:
  400. self._handle_template_param_value()
  401. elif this == next == "}" and self._context & contexts.TEMPLATE:
  402. return self._handle_template_end()
  403. elif this == "|" and self._context & contexts.ARGUMENT_NAME:
  404. self._handle_argument_separator()
  405. elif this == next == "}" and self._context & contexts.ARGUMENT:
  406. if self._read(2) == "}":
  407. return self._handle_argument_end()
  408. else:
  409. self._write_text("}")
  410. elif this == next == "[":
  411. if not self._context & contexts.WIKILINK_TITLE:
  412. self._parse_wikilink()
  413. else:
  414. self._write_text("[")
  415. elif this == "|" and self._context & contexts.WIKILINK_TITLE:
  416. self._handle_wikilink_separator()
  417. elif this == next == "]" and self._context & contexts.WIKILINK:
  418. return self._handle_wikilink_end()
  419. elif this == "=" and not self._global & contexts.GL_HEADING:
  420. if self._read(-1) in ("\n", self.START):
  421. self._parse_heading()
  422. else:
  423. self._write_text("=")
  424. elif this == "=" and self._context & contexts.HEADING:
  425. return self._handle_heading_end()
  426. elif this == "\n" and self._context & contexts.HEADING:
  427. self._fail_route()
  428. elif this == "&":
  429. self._parse_entity()
  430. elif this == "<" and next == "!":
  431. if self._read(2) == self._read(3) == "-":
  432. self._parse_comment()
  433. else:
  434. self._write_text(this)
  435. else:
  436. self._write_text(this)
  437. self._head += 1
  438. def tokenize(self, text):
  439. """Build a list of tokens from a string of wikicode and return it."""
  440. split = self.regex.split(text)
  441. self._text = [segment for segment in split if segment]
  442. return self._parse()