A Python parser for MediaWiki wikicode https://mwparserfromhell.readthedocs.io/
Você não pode selecionar mais de 25 tópicos Os tópicos devem começar com uma letra ou um número, podem incluir traços ('-') e podem ter até 35 caracteres.

tokenizer.py 15 KiB

11 anos atrás
11 anos atrás
11 anos atrás
11 anos atrás
11 anos atrás
11 anos atrás
11 anos atrás
11 anos atrás
11 anos atrás
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432
  1. # -*- coding: utf-8 -*-
  2. #
  3. # Copyright (C) 2012 Ben Kurtovic <ben.kurtovic@verizon.net>
  4. #
  5. # Permission is hereby granted, free of charge, to any person obtaining a copy
  6. # of this software and associated documentation files (the "Software"), to deal
  7. # in the Software without restriction, including without limitation the rights
  8. # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  9. # copies of the Software, and to permit persons to whom the Software is
  10. # furnished to do so, subject to the following conditions:
  11. #
  12. # The above copyright notice and this permission notice shall be included in
  13. # all copies or substantial portions of the Software.
  14. #
  15. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16. # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17. # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  18. # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  19. # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  20. # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
  21. # SOFTWARE.
  22. from __future__ import unicode_literals
  23. from math import log
  24. import re
  25. import string
  26. from . import contexts
  27. from . import tokens
  28. from ..compat import htmlentities
  29. __all__ = ["Tokenizer"]
  30. class BadRoute(Exception):
  31. """Raised internally when the current tokenization route is invalid."""
  32. pass
  33. class Tokenizer(object):
  34. """Creates a list of tokens from a string of wikicode."""
  35. START = object()
  36. END = object()
  37. MARKERS = ["{", "}", "[", "]", "<", ">", "|", "=", "&", "#", "*", ";", ":",
  38. "/", "-", "\n", END]
  39. regex = re.compile(r"([{}\[\]<>|=&#*;:/\-\n])", flags=re.IGNORECASE)
  40. def __init__(self):
  41. self._text = None
  42. self._head = 0
  43. self._stacks = []
  44. self._global = 0
  45. @property
  46. def _stack(self):
  47. """The current token stack."""
  48. return self._stacks[-1][0]
  49. @property
  50. def _context(self):
  51. """The current token context."""
  52. return self._stacks[-1][1]
  53. @_context.setter
  54. def _context(self, value):
  55. self._stacks[-1][1] = value
  56. @property
  57. def _textbuffer(self):
  58. """The current textbuffer."""
  59. return self._stacks[-1][2]
  60. @_textbuffer.setter
  61. def _textbuffer(self, value):
  62. self._stacks[-1][2] = value
  63. def _push(self, context=0):
  64. """Add a new token stack, context, and textbuffer to the list."""
  65. self._stacks.append([[], context, []])
  66. def _push_textbuffer(self):
  67. """Push the textbuffer onto the stack as a Text node and clear it."""
  68. if self._textbuffer:
  69. self._stack.append(tokens.Text(text="".join(self._textbuffer)))
  70. self._textbuffer = []
  71. def _pop(self, keep_context=False):
  72. """Pop the current stack/context/textbuffer, returing the stack.
  73. If *keep_context is ``True``, then we will replace the underlying
  74. stack's context with the current stack's.
  75. """
  76. self._push_textbuffer()
  77. if keep_context:
  78. context = self._context
  79. stack = self._stacks.pop()[0]
  80. self._context = context
  81. return stack
  82. return self._stacks.pop()[0]
  83. def _fail_route(self):
  84. """Fail the current tokenization route.
  85. Discards the current stack/context/textbuffer and raises
  86. :py:exc:`~.BadRoute`.
  87. """
  88. self._pop()
  89. raise BadRoute()
  90. def _write(self, token):
  91. """Write a token to the end of the current token stack."""
  92. self._push_textbuffer()
  93. self._stack.append(token)
  94. def _write_first(self, token):
  95. """Write a token to the beginning of the current token stack."""
  96. self._push_textbuffer()
  97. self._stack.insert(0, token)
  98. def _write_text(self, text):
  99. """Write text to the current textbuffer."""
  100. self._textbuffer.append(text)
  101. def _write_all(self, tokenlist):
  102. """Write a series of tokens to the current stack at once."""
  103. if tokenlist and isinstance(tokenlist[0], tokens.Text):
  104. self._write_text(tokenlist.pop(0).text)
  105. self._push_textbuffer()
  106. self._stack.extend(tokenlist)
  107. def _write_text_then_stack(self, text):
  108. """Pop the current stack, write *text*, and then write the stack."""
  109. stack = self._pop()
  110. self._write_text(text)
  111. if stack:
  112. self._write_all(stack)
  113. self._head -= 1
  114. def _read(self, delta=0, wrap=False, strict=False):
  115. """Read the value at a relative point in the wikicode.
  116. The value is read from :py:attr:`self._head <_head>` plus the value of
  117. *delta* (which can be negative). If *wrap* is ``False``, we will not
  118. allow attempts to read from the end of the string if ``self._head +
  119. delta`` is negative. If *strict* is ``True``, the route will be failed
  120. (with :py:meth:`_fail_route`) if we try to read from past the end of
  121. the string; otherwise, :py:attr:`self.END <END>` is returned. If we try
  122. to read from before the start of the string, :py:attr:`self.START
  123. <START>` is returned.
  124. """
  125. index = self._head + delta
  126. if index < 0 and (not wrap or abs(index) > len(self._text)):
  127. return self.START
  128. try:
  129. return self._text[index]
  130. except IndexError:
  131. if strict:
  132. self._fail_route()
  133. return self.END
  134. def _parse_template_or_argument(self):
  135. """Parse a template or argument at the head of the wikicode string."""
  136. self._head += 2
  137. braces = 2
  138. while self._read() == "{":
  139. braces += 1
  140. self._head += 1
  141. self._push()
  142. while braces:
  143. if braces == 1:
  144. return self._write_text_then_stack("{")
  145. if braces == 2:
  146. try:
  147. self._parse_template()
  148. except BadRoute:
  149. return self._write_text_then_stack("{{")
  150. break
  151. try:
  152. self._parse_argument()
  153. braces -= 3
  154. except BadRoute:
  155. try:
  156. self._parse_template()
  157. braces -= 2
  158. except BadRoute:
  159. return self._write_text_then_stack("{" * braces)
  160. if braces:
  161. self._head += 1
  162. self._write_all(self._pop())
  163. def _parse_template(self):
  164. """Parse a template at the head of the wikicode string."""
  165. reset = self._head
  166. try:
  167. template = self._parse(contexts.TEMPLATE_NAME)
  168. except BadRoute:
  169. self._head = reset
  170. raise
  171. else:
  172. self._write_first(tokens.TemplateOpen())
  173. self._write_all(template)
  174. self._write(tokens.TemplateClose())
  175. def _parse_argument(self):
  176. """Parse an argument at the head of the wikicode string."""
  177. reset = self._head
  178. try:
  179. argument = self._parse(contexts.ARGUMENT_NAME)
  180. except BadRoute:
  181. self._head = reset
  182. raise
  183. else:
  184. self._write_first(tokens.ArgumentOpen())
  185. self._write_all(argument)
  186. self._write(tokens.ArgumentClose())
  187. def _verify_safe(self, unsafes):
  188. """Verify that there are no unsafe characters in the current stack.
  189. The route will be failed if the name contains any element of *unsafes*
  190. in it (not merely at the beginning or end). This is used when parsing a
  191. template name or parameter key, which cannot contain newlines.
  192. """
  193. self._push_textbuffer()
  194. if self._stack:
  195. text = [tok for tok in self._stack if isinstance(tok, tokens.Text)]
  196. text = "".join([token.text for token in text]).strip()
  197. if text and any([unsafe in text for unsafe in unsafes]):
  198. self._fail_route()
  199. def _handle_template_param(self):
  200. """Handle a template parameter at the head of the string."""
  201. if self._context & contexts.TEMPLATE_NAME:
  202. self._verify_safe(["\n", "{", "}", "[", "]"])
  203. self._context ^= contexts.TEMPLATE_NAME
  204. elif self._context & contexts.TEMPLATE_PARAM_VALUE:
  205. self._context ^= contexts.TEMPLATE_PARAM_VALUE
  206. elif self._context & contexts.TEMPLATE_PARAM_KEY:
  207. self._write_all(self._pop(keep_context=True))
  208. self._context |= contexts.TEMPLATE_PARAM_KEY
  209. self._write(tokens.TemplateParamSeparator())
  210. self._push(self._context)
  211. def _handle_template_param_value(self):
  212. """Handle a template parameter's value at the head of the string."""
  213. try:
  214. self._verify_safe(["\n", "{{", "}}"])
  215. except BadRoute:
  216. self._pop()
  217. raise
  218. else:
  219. self._write_all(self._pop(keep_context=True))
  220. self._context ^= contexts.TEMPLATE_PARAM_KEY
  221. self._context |= contexts.TEMPLATE_PARAM_VALUE
  222. self._write(tokens.TemplateParamEquals())
  223. def _handle_template_end(self):
  224. """Handle the end of a template at the head of the string."""
  225. if self._context & contexts.TEMPLATE_NAME:
  226. self._verify_safe(["\n", "{", "}", "[", "]"])
  227. elif self._context & contexts.TEMPLATE_PARAM_KEY:
  228. self._write_all(self._pop(keep_context=True))
  229. self._head += 1
  230. return self._pop()
  231. def _handle_argument_separator(self):
  232. """Handle the separator between an argument's name and default."""
  233. self._verify_safe(["\n", "{{", "}}"])
  234. self._context ^= contexts.ARGUMENT_NAME
  235. self._context |= contexts.ARGUMENT_DEFAULT
  236. self._write(tokens.ArgumentSeparator())
  237. def _handle_argument_end(self):
  238. """Handle the end of an argument at the head of the string."""
  239. if self._context & contexts.ARGUMENT_NAME:
  240. self._verify_safe(["\n", "{{", "}}"])
  241. self._head += 2
  242. return self._pop()
  243. def _parse_heading(self):
  244. """Parse a section heading at the head of the wikicode string."""
  245. self._global |= contexts.GL_HEADING
  246. reset = self._head
  247. self._head += 1
  248. best = 1
  249. while self._read() == "=":
  250. best += 1
  251. self._head += 1
  252. context = contexts.HEADING_LEVEL_1 << min(best - 1, 5)
  253. try:
  254. title, level = self._parse(context)
  255. except BadRoute:
  256. self._head = reset + best - 1
  257. self._write_text("=" * best)
  258. else:
  259. self._write(tokens.HeadingStart(level=level))
  260. if level < best:
  261. self._write_text("=" * (best - level))
  262. self._write_all(title)
  263. self._write(tokens.HeadingEnd())
  264. finally:
  265. self._global ^= contexts.GL_HEADING
  266. def _handle_heading_end(self):
  267. """Handle the end of a section heading at the head of the string."""
  268. reset = self._head
  269. self._head += 1
  270. best = 1
  271. while self._read() == "=":
  272. best += 1
  273. self._head += 1
  274. current = int(log(self._context / contexts.HEADING_LEVEL_1, 2)) + 1
  275. level = min(current, min(best, 6))
  276. try:
  277. after, after_level = self._parse(self._context)
  278. except BadRoute:
  279. if level < best:
  280. self._write_text("=" * (best - level))
  281. self._head = reset + best - 1
  282. return self._pop(), level
  283. else:
  284. self._write_text("=" * best)
  285. self._write_all(after)
  286. return self._pop(), after_level
  287. def _really_parse_entity(self):
  288. """Actually parse a HTML entity and ensure that it is valid."""
  289. self._write(tokens.HTMLEntityStart())
  290. self._head += 1
  291. this = self._read(strict=True)
  292. if this == "#":
  293. numeric = True
  294. self._write(tokens.HTMLEntityNumeric())
  295. self._head += 1
  296. this = self._read(strict=True)
  297. if this[0].lower() == "x":
  298. hexadecimal = True
  299. self._write(tokens.HTMLEntityHex(char=this[0]))
  300. this = this[1:]
  301. if not this:
  302. self._fail_route()
  303. else:
  304. hexadecimal = False
  305. else:
  306. numeric = hexadecimal = False
  307. valid = string.hexdigits if hexadecimal else string.digits
  308. if not numeric and not hexadecimal:
  309. valid += string.ascii_letters
  310. if not all([char in valid for char in this]):
  311. self._fail_route()
  312. self._head += 1
  313. if self._read() != ";":
  314. self._fail_route()
  315. if numeric:
  316. test = int(this, 16) if hexadecimal else int(this)
  317. if test < 1 or test > 0x10FFFF:
  318. self._fail_route()
  319. else:
  320. if this not in htmlentities.entitydefs:
  321. self._fail_route()
  322. self._write(tokens.Text(text=this))
  323. self._write(tokens.HTMLEntityEnd())
  324. def _parse_entity(self):
  325. """Parse a HTML entity at the head of the wikicode string."""
  326. reset = self._head
  327. self._push()
  328. try:
  329. self._really_parse_entity()
  330. except BadRoute:
  331. self._head = reset
  332. self._write_text(self._read())
  333. else:
  334. self._write_all(self._pop())
  335. def _parse(self, context=0):
  336. """Parse the wikicode string, using *context* for when to stop."""
  337. self._push(context)
  338. while True:
  339. this = self._read()
  340. if this not in self.MARKERS:
  341. self._write_text(this)
  342. self._head += 1
  343. continue
  344. if this is self.END:
  345. fail = contexts.TEMPLATE | contexts.ARGUMENT | contexts.HEADING
  346. if self._context & fail:
  347. self._fail_route()
  348. return self._pop()
  349. next = self._read(1)
  350. if this == next == "{":
  351. self._parse_template_or_argument()
  352. elif this == "|" and self._context & contexts.TEMPLATE:
  353. self._handle_template_param()
  354. elif this == "=" and self._context & contexts.TEMPLATE_PARAM_KEY:
  355. self._handle_template_param_value()
  356. elif this == next == "}" and self._context & contexts.TEMPLATE:
  357. return self._handle_template_end()
  358. elif this == "|" and self._context & contexts.ARGUMENT_NAME:
  359. self._handle_argument_separator()
  360. elif this == next == "}" and self._context & contexts.ARGUMENT:
  361. if self._read(2) == "}":
  362. return self._handle_argument_end()
  363. else:
  364. self._write_text("}")
  365. elif this == "=" and not self._global & contexts.GL_HEADING:
  366. if self._read(-1) in ("\n", self.START):
  367. self._parse_heading()
  368. else:
  369. self._write_text("=")
  370. elif this == "=" and self._context & contexts.HEADING:
  371. return self._handle_heading_end()
  372. elif this == "\n" and self._context & contexts.HEADING:
  373. self._fail_route()
  374. elif this == "&":
  375. self._parse_entity()
  376. else:
  377. self._write_text(this)
  378. self._head += 1
  379. def tokenize(self, text):
  380. """Build a list of tokens from a string of wikicode and return it."""
  381. split = self.regex.split(text)
  382. self._text = [segment for segment in split if segment]
  383. return self._parse()