A Python parser for MediaWiki wikicode https://mwparserfromhell.readthedocs.io/
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

tokenizer.py 41 KiB

11 vuotta sitten
11 vuotta sitten
11 vuotta sitten
11 vuotta sitten
11 vuotta sitten
11 vuotta sitten
11 vuotta sitten
11 vuotta sitten
11 vuotta sitten
11 vuotta sitten
11 vuotta sitten
11 vuotta sitten
1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084
  1. # -*- coding: utf-8 -*-
  2. #
  3. # Copyright (C) 2012-2013 Ben Kurtovic <ben.kurtovic@verizon.net>
  4. #
  5. # Permission is hereby granted, free of charge, to any person obtaining a copy
  6. # of this software and associated documentation files (the "Software"), to deal
  7. # in the Software without restriction, including without limitation the rights
  8. # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  9. # copies of the Software, and to permit persons to whom the Software is
  10. # furnished to do so, subject to the following conditions:
  11. #
  12. # The above copyright notice and this permission notice shall be included in
  13. # all copies or substantial portions of the Software.
  14. #
  15. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16. # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17. # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  18. # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  19. # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  20. # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
  21. # SOFTWARE.
  22. from __future__ import unicode_literals
  23. from math import log
  24. import re
  25. from . import contexts, tokens
  26. from ..compat import htmlentities
  27. from ..definitions import (get_html_tag, is_parsable, is_single,
  28. is_single_only, is_scheme)
  29. __all__ = ["Tokenizer"]
  30. class BadRoute(Exception):
  31. """Raised internally when the current tokenization route is invalid."""
  32. def __init__(self, context=0):
  33. self.context = context
  34. class _TagOpenData(object):
  35. """Stores data about an HTML open tag, like ``<ref name="foo">``."""
  36. CX_NAME = 1 << 0
  37. CX_ATTR_READY = 1 << 1
  38. CX_ATTR_NAME = 1 << 2
  39. CX_ATTR_VALUE = 1 << 3
  40. CX_QUOTED = 1 << 4
  41. CX_NOTE_SPACE = 1 << 5
  42. CX_NOTE_EQUALS = 1 << 6
  43. CX_NOTE_QUOTE = 1 << 7
  44. def __init__(self):
  45. self.context = self.CX_NAME
  46. self.padding_buffer = {"first": "", "before_eq": "", "after_eq": ""}
  47. self.reset = 0
  48. class Tokenizer(object):
  49. """Creates a list of tokens from a string of wikicode."""
  50. USES_C = False
  51. START = object()
  52. END = object()
  53. MARKERS = ["{", "}", "[", "]", "<", ">", "|", "=", "&", "'", "#", "*", ";",
  54. ":", "/", "-", "\n", START, END]
  55. MAX_DEPTH = 40
  56. MAX_CYCLES = 100000
  57. regex = re.compile(r"([{}\[\]<>|=&'#*;:/\\\"\-!\n])", flags=re.IGNORECASE)
  58. tag_splitter = re.compile(r"([\s\"\\]+)")
  59. def __init__(self):
  60. self._text = None
  61. self._head = 0
  62. self._stacks = []
  63. self._global = 0
  64. self._depth = 0
  65. self._cycles = 0
  66. @property
  67. def _stack(self):
  68. """The current token stack."""
  69. return self._stacks[-1][0]
  70. @property
  71. def _context(self):
  72. """The current token context."""
  73. return self._stacks[-1][1]
  74. @_context.setter
  75. def _context(self, value):
  76. self._stacks[-1][1] = value
  77. @property
  78. def _textbuffer(self):
  79. """The current textbuffer."""
  80. return self._stacks[-1][2]
  81. @_textbuffer.setter
  82. def _textbuffer(self, value):
  83. self._stacks[-1][2] = value
  84. def _push(self, context=0):
  85. """Add a new token stack, context, and textbuffer to the list."""
  86. self._stacks.append([[], context, []])
  87. self._depth += 1
  88. self._cycles += 1
  89. def _push_textbuffer(self):
  90. """Push the textbuffer onto the stack as a Text node and clear it."""
  91. if self._textbuffer:
  92. self._stack.append(tokens.Text(text="".join(self._textbuffer)))
  93. self._textbuffer = []
  94. def _pop(self, keep_context=False):
  95. """Pop the current stack/context/textbuffer, returing the stack.
  96. If *keep_context* is ``True``, then we will replace the underlying
  97. stack's context with the current stack's.
  98. """
  99. self._push_textbuffer()
  100. self._depth -= 1
  101. if keep_context:
  102. context = self._context
  103. stack = self._stacks.pop()[0]
  104. self._context = context
  105. return stack
  106. return self._stacks.pop()[0]
  107. def _can_recurse(self):
  108. """Return whether or not our max recursion depth has been exceeded."""
  109. return self._depth < self.MAX_DEPTH and self._cycles < self.MAX_CYCLES
  110. def _fail_route(self):
  111. """Fail the current tokenization route.
  112. Discards the current stack/context/textbuffer and raises
  113. :py:exc:`~.BadRoute`.
  114. """
  115. context = self._context
  116. self._pop()
  117. raise BadRoute(context)
  118. def _emit(self, token):
  119. """Write a token to the end of the current token stack."""
  120. self._push_textbuffer()
  121. self._stack.append(token)
  122. def _emit_first(self, token):
  123. """Write a token to the beginning of the current token stack."""
  124. self._push_textbuffer()
  125. self._stack.insert(0, token)
  126. def _emit_text(self, text):
  127. """Write text to the current textbuffer."""
  128. self._textbuffer.append(text)
  129. def _emit_all(self, tokenlist):
  130. """Write a series of tokens to the current stack at once."""
  131. if tokenlist and isinstance(tokenlist[0], tokens.Text):
  132. self._emit_text(tokenlist.pop(0).text)
  133. self._push_textbuffer()
  134. self._stack.extend(tokenlist)
  135. def _emit_text_then_stack(self, text):
  136. """Pop the current stack, write *text*, and then write the stack."""
  137. stack = self._pop()
  138. self._emit_text(text)
  139. if stack:
  140. self._emit_all(stack)
  141. self._head -= 1
  142. def _read(self, delta=0, wrap=False, strict=False):
  143. """Read the value at a relative point in the wikicode.
  144. The value is read from :py:attr:`self._head <_head>` plus the value of
  145. *delta* (which can be negative). If *wrap* is ``False``, we will not
  146. allow attempts to read from the end of the string if ``self._head +
  147. delta`` is negative. If *strict* is ``True``, the route will be failed
  148. (with :py:meth:`_fail_route`) if we try to read from past the end of
  149. the string; otherwise, :py:attr:`self.END <END>` is returned. If we try
  150. to read from before the start of the string, :py:attr:`self.START
  151. <START>` is returned.
  152. """
  153. index = self._head + delta
  154. if index < 0 and (not wrap or abs(index) > len(self._text)):
  155. return self.START
  156. try:
  157. return self._text[index]
  158. except IndexError:
  159. if strict:
  160. self._fail_route()
  161. return self.END
  162. def _parse_template(self):
  163. """Parse a template at the head of the wikicode string."""
  164. reset = self._head
  165. try:
  166. template = self._parse(contexts.TEMPLATE_NAME)
  167. except BadRoute:
  168. self._head = reset
  169. raise
  170. self._emit_first(tokens.TemplateOpen())
  171. self._emit_all(template)
  172. self._emit(tokens.TemplateClose())
  173. def _parse_argument(self):
  174. """Parse an argument at the head of the wikicode string."""
  175. reset = self._head
  176. try:
  177. argument = self._parse(contexts.ARGUMENT_NAME)
  178. except BadRoute:
  179. self._head = reset
  180. raise
  181. self._emit_first(tokens.ArgumentOpen())
  182. self._emit_all(argument)
  183. self._emit(tokens.ArgumentClose())
  184. def _parse_template_or_argument(self):
  185. """Parse a template or argument at the head of the wikicode string."""
  186. self._head += 2
  187. braces = 2
  188. while self._read() == "{":
  189. self._head += 1
  190. braces += 1
  191. self._push()
  192. while braces:
  193. if braces == 1:
  194. return self._emit_text_then_stack("{")
  195. if braces == 2:
  196. try:
  197. self._parse_template()
  198. except BadRoute:
  199. return self._emit_text_then_stack("{{")
  200. break
  201. try:
  202. self._parse_argument()
  203. braces -= 3
  204. except BadRoute:
  205. try:
  206. self._parse_template()
  207. braces -= 2
  208. except BadRoute:
  209. return self._emit_text_then_stack("{" * braces)
  210. if braces:
  211. self._head += 1
  212. self._emit_all(self._pop())
  213. if self._context & contexts.FAIL_NEXT:
  214. self._context ^= contexts.FAIL_NEXT
  215. def _handle_template_param(self):
  216. """Handle a template parameter at the head of the string."""
  217. if self._context & contexts.TEMPLATE_NAME:
  218. self._context ^= contexts.TEMPLATE_NAME
  219. elif self._context & contexts.TEMPLATE_PARAM_VALUE:
  220. self._context ^= contexts.TEMPLATE_PARAM_VALUE
  221. elif self._context & contexts.TEMPLATE_PARAM_KEY:
  222. self._emit_all(self._pop(keep_context=True))
  223. self._context |= contexts.TEMPLATE_PARAM_KEY
  224. self._emit(tokens.TemplateParamSeparator())
  225. self._push(self._context)
  226. def _handle_template_param_value(self):
  227. """Handle a template parameter's value at the head of the string."""
  228. self._emit_all(self._pop(keep_context=True))
  229. self._context ^= contexts.TEMPLATE_PARAM_KEY
  230. self._context |= contexts.TEMPLATE_PARAM_VALUE
  231. self._emit(tokens.TemplateParamEquals())
  232. def _handle_template_end(self):
  233. """Handle the end of a template at the head of the string."""
  234. if self._context & contexts.TEMPLATE_PARAM_KEY:
  235. self._emit_all(self._pop(keep_context=True))
  236. self._head += 1
  237. return self._pop()
  238. def _handle_argument_separator(self):
  239. """Handle the separator between an argument's name and default."""
  240. self._context ^= contexts.ARGUMENT_NAME
  241. self._context |= contexts.ARGUMENT_DEFAULT
  242. self._emit(tokens.ArgumentSeparator())
  243. def _handle_argument_end(self):
  244. """Handle the end of an argument at the head of the string."""
  245. self._head += 2
  246. return self._pop()
  247. def _parse_wikilink(self):
  248. """Parse an internal wikilink at the head of the wikicode string."""
  249. self._head += 2
  250. reset = self._head - 1
  251. try:
  252. wikilink = self._parse(contexts.WIKILINK_TITLE)
  253. except BadRoute:
  254. self._head = reset
  255. self._emit_text("[[")
  256. else:
  257. if self._context & contexts.FAIL_NEXT:
  258. self._context ^= contexts.FAIL_NEXT
  259. self._emit(tokens.WikilinkOpen())
  260. self._emit_all(wikilink)
  261. self._emit(tokens.WikilinkClose())
  262. def _handle_wikilink_separator(self):
  263. """Handle the separator between a wikilink's title and its text."""
  264. self._context ^= contexts.WIKILINK_TITLE
  265. self._context |= contexts.WIKILINK_TEXT
  266. self._emit(tokens.WikilinkSeparator())
  267. def _handle_wikilink_end(self):
  268. """Handle the end of a wikilink at the head of the string."""
  269. self._head += 1
  270. return self._pop()
  271. def _really_parse_external_link(self, brackets):
  272. """Really parse an external link."""
  273. scheme_valid = "abcdefghijklmnopqrstuvwxyz0123456789+.-"
  274. if brackets:
  275. self._push(contexts.EXT_LINK_URI)
  276. if self._read() == self._read(1) == "/":
  277. self._emit_text("//")
  278. self._head += 2
  279. else:
  280. scheme = ""
  281. while all(char in scheme_valid for char in self._read()):
  282. scheme += self._read()
  283. self._emit_text(self._read())
  284. self._head += 1
  285. if self._read() != ":":
  286. self._fail_route()
  287. self._emit_text(":")
  288. self._head += 1
  289. slashes = self._read() == self._read(1) == "/"
  290. if slashes:
  291. self._emit_text("//")
  292. self._head += 2
  293. if not is_scheme(scheme, slashes):
  294. self._fail_route()
  295. else:
  296. scheme = []
  297. try:
  298. # Ugly, but we have to backtrack through the textbuffer looking
  299. # for our scheme since it was just parsed as text:
  300. for i in range(-1, -len(self._textbuffer) - 1, -1):
  301. for char in reversed(self._textbuffer[i]):
  302. if char.isspace() or char in self.MARKERS:
  303. raise StopIteration()
  304. if char not in scheme_valid:
  305. raise BadRoute()
  306. scheme.append(char)
  307. except StopIteration:
  308. pass
  309. scheme = "".join(reversed(scheme))
  310. slashes = self._read() == self._read(1) == "/"
  311. if not is_scheme(scheme, slashes):
  312. raise BadRoute()
  313. # Remove the scheme from the textbuffer, now that it's part of the
  314. # external link:
  315. length = len(scheme)
  316. while length:
  317. if length < len(self._textbuffer[-1]):
  318. self._textbuffer[-1] = self._textbuffer[-1][:-length]
  319. break
  320. length -= len(self._textbuffer[-1])
  321. self._textbuffer.pop()
  322. self._push(contexts.EXT_LINK_URI)
  323. self._emit_text(scheme)
  324. self._emit_text(":")
  325. if slashes:
  326. self._emit_text("//")
  327. self._head += 2
  328. parentheses = False
  329. while True:
  330. this, next = self._read(), self._read(1)
  331. if this is self.END or this == "\n":
  332. if brackets:
  333. self._fail_route()
  334. self._head -= 1
  335. return self._pop(), None
  336. elif this == next == "{" and self._can_recurse():
  337. self._parse_template_or_argument()
  338. elif this == "&":
  339. self._parse_entity()
  340. elif this == "]":
  341. if not brackets:
  342. self._head -= 1
  343. return self._pop(), None
  344. elif this == "(" and not brackets and not parentheses:
  345. parentheses = True
  346. self._emit_text(this)
  347. elif " " in this: ## Should be a more general whitespace check
  348. before, after = this.split(" ", 1)
  349. self._emit_text(before)
  350. if brackets:
  351. self._emit(tokens.ExternalLinkSeparator())
  352. self._emit_text(after)
  353. self._context ^= contexts.EXT_LINK_URI
  354. self._context |= contexts.EXT_LINK_TITLE
  355. self._head += 1
  356. return self._parse(push=False), None
  357. return self._pop(), " " + after
  358. else:
  359. self._emit_text(this)
  360. self._head += 1
  361. def _parse_external_link(self, brackets):
  362. """Parse an external link at the head of the wikicode string."""
  363. reset = self._head
  364. self._head += 1
  365. try:
  366. bad_context = self._context & contexts.INVALID_LINK
  367. if bad_context or not self._can_recurse():
  368. raise BadRoute()
  369. link, extra = self._really_parse_external_link(brackets)
  370. except BadRoute:
  371. self._head = reset
  372. if not brackets and self._context & contexts.DL_TERM:
  373. self._handle_dl_term()
  374. else:
  375. self._emit_text(self._read())
  376. else:
  377. self._emit(tokens.ExternalLinkOpen(brackets=brackets))
  378. self._emit_all(link)
  379. self._emit(tokens.ExternalLinkClose())
  380. if extra:
  381. self._emit_text(extra)
  382. def _parse_heading(self):
  383. """Parse a section heading at the head of the wikicode string."""
  384. self._global |= contexts.GL_HEADING
  385. reset = self._head
  386. self._head += 1
  387. best = 1
  388. while self._read() == "=":
  389. best += 1
  390. self._head += 1
  391. context = contexts.HEADING_LEVEL_1 << min(best - 1, 5)
  392. try:
  393. title, level = self._parse(context)
  394. except BadRoute:
  395. self._head = reset + best - 1
  396. self._emit_text("=" * best)
  397. else:
  398. self._emit(tokens.HeadingStart(level=level))
  399. if level < best:
  400. self._emit_text("=" * (best - level))
  401. self._emit_all(title)
  402. self._emit(tokens.HeadingEnd())
  403. finally:
  404. self._global ^= contexts.GL_HEADING
  405. def _handle_heading_end(self):
  406. """Handle the end of a section heading at the head of the string."""
  407. reset = self._head
  408. self._head += 1
  409. best = 1
  410. while self._read() == "=":
  411. best += 1
  412. self._head += 1
  413. current = int(log(self._context / contexts.HEADING_LEVEL_1, 2)) + 1
  414. level = min(current, min(best, 6))
  415. try: # Try to check for a heading closure after this one
  416. after, after_level = self._parse(self._context)
  417. except BadRoute:
  418. if level < best:
  419. self._emit_text("=" * (best - level))
  420. self._head = reset + best - 1
  421. return self._pop(), level
  422. else: # Found another closure
  423. self._emit_text("=" * best)
  424. self._emit_all(after)
  425. return self._pop(), after_level
  426. def _really_parse_entity(self):
  427. """Actually parse an HTML entity and ensure that it is valid."""
  428. self._emit(tokens.HTMLEntityStart())
  429. self._head += 1
  430. this = self._read(strict=True)
  431. if this == "#":
  432. numeric = True
  433. self._emit(tokens.HTMLEntityNumeric())
  434. self._head += 1
  435. this = self._read(strict=True)
  436. if this[0].lower() == "x":
  437. hexadecimal = True
  438. self._emit(tokens.HTMLEntityHex(char=this[0]))
  439. this = this[1:]
  440. if not this:
  441. self._fail_route()
  442. else:
  443. hexadecimal = False
  444. else:
  445. numeric = hexadecimal = False
  446. valid = "0123456789abcdefABCDEF" if hexadecimal else "0123456789"
  447. if not numeric and not hexadecimal:
  448. valid += "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
  449. if not all([char in valid for char in this]):
  450. self._fail_route()
  451. self._head += 1
  452. if self._read() != ";":
  453. self._fail_route()
  454. if numeric:
  455. test = int(this, 16) if hexadecimal else int(this)
  456. if test < 1 or test > 0x10FFFF:
  457. self._fail_route()
  458. else:
  459. if this not in htmlentities.entitydefs:
  460. self._fail_route()
  461. self._emit(tokens.Text(text=this))
  462. self._emit(tokens.HTMLEntityEnd())
  463. def _parse_entity(self):
  464. """Parse an HTML entity at the head of the wikicode string."""
  465. reset = self._head
  466. self._push()
  467. try:
  468. self._really_parse_entity()
  469. except BadRoute:
  470. self._head = reset
  471. self._emit_text(self._read())
  472. else:
  473. self._emit_all(self._pop())
  474. def _parse_comment(self):
  475. """Parse an HTML comment at the head of the wikicode string."""
  476. self._head += 4
  477. reset = self._head - 1
  478. self._push()
  479. while True:
  480. this = self._read()
  481. if this == self.END:
  482. self._pop()
  483. self._head = reset
  484. self._emit_text("<!--")
  485. return
  486. if this == self._read(1) == "-" and self._read(2) == ">":
  487. self._emit_first(tokens.CommentStart())
  488. self._emit(tokens.CommentEnd())
  489. self._emit_all(self._pop())
  490. self._head += 2
  491. return
  492. self._emit_text(this)
  493. self._head += 1
  494. def _push_tag_buffer(self, data):
  495. """Write a pending tag attribute from *data* to the stack."""
  496. if data.context & data.CX_QUOTED:
  497. self._emit_first(tokens.TagAttrQuote())
  498. self._emit_all(self._pop())
  499. buf = data.padding_buffer
  500. self._emit_first(tokens.TagAttrStart(pad_first=buf["first"],
  501. pad_before_eq=buf["before_eq"], pad_after_eq=buf["after_eq"]))
  502. self._emit_all(self._pop())
  503. data.padding_buffer = {key: "" for key in data.padding_buffer}
  504. def _handle_tag_space(self, data, text):
  505. """Handle whitespace (*text*) inside of an HTML open tag."""
  506. ctx = data.context
  507. end_of_value = ctx & data.CX_ATTR_VALUE and not ctx & (data.CX_QUOTED | data.CX_NOTE_QUOTE)
  508. if end_of_value or (ctx & data.CX_QUOTED and ctx & data.CX_NOTE_SPACE):
  509. self._push_tag_buffer(data)
  510. data.context = data.CX_ATTR_READY
  511. elif ctx & data.CX_NOTE_SPACE:
  512. data.context = data.CX_ATTR_READY
  513. elif ctx & data.CX_ATTR_NAME:
  514. data.context |= data.CX_NOTE_EQUALS
  515. data.padding_buffer["before_eq"] += text
  516. if ctx & data.CX_QUOTED and not ctx & data.CX_NOTE_SPACE:
  517. self._emit_text(text)
  518. elif data.context & data.CX_ATTR_READY:
  519. data.padding_buffer["first"] += text
  520. elif data.context & data.CX_ATTR_VALUE:
  521. data.padding_buffer["after_eq"] += text
  522. def _handle_tag_text(self, text):
  523. """Handle regular *text* inside of an HTML open tag."""
  524. next = self._read(1)
  525. if not self._can_recurse() or text not in self.MARKERS:
  526. self._emit_text(text)
  527. elif text == next == "{":
  528. self._parse_template_or_argument()
  529. elif text == next == "[":
  530. self._parse_wikilink()
  531. elif text == "<":
  532. self._parse_tag()
  533. else:
  534. self._emit_text(text)
  535. def _handle_tag_data(self, data, text):
  536. """Handle all sorts of *text* data inside of an HTML open tag."""
  537. for chunk in self.tag_splitter.split(text):
  538. if not chunk:
  539. continue
  540. if data.context & data.CX_NAME:
  541. if chunk in self.MARKERS or chunk.isspace():
  542. self._fail_route() # Tags must start with text, not spaces
  543. data.context = data.CX_NOTE_SPACE
  544. elif chunk.isspace():
  545. self._handle_tag_space(data, chunk)
  546. continue
  547. elif data.context & data.CX_NOTE_SPACE:
  548. if data.context & data.CX_QUOTED:
  549. data.context = data.CX_ATTR_VALUE
  550. self._pop()
  551. self._head = data.reset - 1 # Will be auto-incremented
  552. return # Break early
  553. self._fail_route()
  554. elif data.context & data.CX_ATTR_READY:
  555. data.context = data.CX_ATTR_NAME
  556. self._push(contexts.TAG_ATTR)
  557. elif data.context & data.CX_ATTR_NAME:
  558. if chunk == "=":
  559. data.context = data.CX_ATTR_VALUE | data.CX_NOTE_QUOTE
  560. self._emit(tokens.TagAttrEquals())
  561. continue
  562. if data.context & data.CX_NOTE_EQUALS:
  563. self._push_tag_buffer(data)
  564. data.context = data.CX_ATTR_NAME
  565. self._push(contexts.TAG_ATTR)
  566. elif data.context & data.CX_ATTR_VALUE:
  567. escaped = self._read(-1) == "\\" and self._read(-2) != "\\"
  568. if data.context & data.CX_NOTE_QUOTE:
  569. data.context ^= data.CX_NOTE_QUOTE
  570. if chunk == '"' and not escaped:
  571. data.context |= data.CX_QUOTED
  572. self._push(self._context)
  573. data.reset = self._head
  574. continue
  575. elif data.context & data.CX_QUOTED:
  576. if chunk == '"' and not escaped:
  577. data.context |= data.CX_NOTE_SPACE
  578. continue
  579. self._handle_tag_text(chunk)
  580. def _handle_tag_close_open(self, data, token):
  581. """Handle the closing of a open tag (``<foo>``)."""
  582. if data.context & (data.CX_ATTR_NAME | data.CX_ATTR_VALUE):
  583. self._push_tag_buffer(data)
  584. self._emit(token(padding=data.padding_buffer["first"]))
  585. self._head += 1
  586. def _handle_tag_open_close(self):
  587. """Handle the opening of a closing tag (``</foo>``)."""
  588. self._emit(tokens.TagOpenClose())
  589. self._push(contexts.TAG_CLOSE)
  590. self._head += 1
  591. def _handle_tag_close_close(self):
  592. """Handle the ending of a closing tag (``</foo>``)."""
  593. strip = lambda tok: tok.text.rstrip().lower()
  594. closing = self._pop()
  595. if len(closing) != 1 or (not isinstance(closing[0], tokens.Text) or
  596. strip(closing[0]) != strip(self._stack[1])):
  597. self._fail_route()
  598. self._emit_all(closing)
  599. self._emit(tokens.TagCloseClose())
  600. return self._pop()
  601. def _handle_blacklisted_tag(self):
  602. """Handle the body of an HTML tag that is parser-blacklisted."""
  603. while True:
  604. this, next = self._read(), self._read(1)
  605. if this is self.END:
  606. self._fail_route()
  607. elif this == "<" and next == "/":
  608. self._handle_tag_open_close()
  609. self._head += 1
  610. return self._parse(push=False)
  611. elif this == "&":
  612. self._parse_entity()
  613. else:
  614. self._emit_text(this)
  615. self._head += 1
  616. def _handle_single_only_tag_end(self):
  617. """Handle the end of an implicitly closing single-only HTML tag."""
  618. padding = self._stack.pop().padding
  619. self._emit(tokens.TagCloseSelfclose(padding=padding, implicit=True))
  620. self._head -= 1 # Offset displacement done by _handle_tag_close_open
  621. return self._pop()
  622. def _handle_single_tag_end(self):
  623. """Handle the stream end when inside a single-supporting HTML tag."""
  624. gen = enumerate(self._stack)
  625. index = next(i for i, t in gen if isinstance(t, tokens.TagCloseOpen))
  626. padding = self._stack[index].padding
  627. token = tokens.TagCloseSelfclose(padding=padding, implicit=True)
  628. self._stack[index] = token
  629. return self._pop()
  630. def _really_parse_tag(self):
  631. """Actually parse an HTML tag, starting with the open (``<foo>``)."""
  632. data = _TagOpenData()
  633. self._push(contexts.TAG_OPEN)
  634. self._emit(tokens.TagOpenOpen())
  635. while True:
  636. this, next = self._read(), self._read(1)
  637. can_exit = (not data.context & (data.CX_QUOTED | data.CX_NAME) or
  638. data.context & data.CX_NOTE_SPACE)
  639. if this is self.END:
  640. if self._context & contexts.TAG_ATTR:
  641. if data.context & data.CX_QUOTED:
  642. # Unclosed attribute quote: reset, don't die
  643. data.context = data.CX_ATTR_VALUE
  644. self._pop()
  645. self._head = data.reset
  646. continue
  647. self._pop()
  648. self._fail_route()
  649. elif this == ">" and can_exit:
  650. self._handle_tag_close_open(data, tokens.TagCloseOpen)
  651. self._context = contexts.TAG_BODY
  652. if is_single_only(self._stack[1].text):
  653. return self._handle_single_only_tag_end()
  654. if is_parsable(self._stack[1].text):
  655. return self._parse(push=False)
  656. return self._handle_blacklisted_tag()
  657. elif this == "/" and next == ">" and can_exit:
  658. self._handle_tag_close_open(data, tokens.TagCloseSelfclose)
  659. return self._pop()
  660. else:
  661. self._handle_tag_data(data, this)
  662. self._head += 1
  663. def _handle_invalid_tag_start(self):
  664. """Handle the (possible) start of an implicitly closing single tag."""
  665. reset = self._head + 1
  666. self._head += 2
  667. try:
  668. if not is_single_only(self.tag_splitter.split(self._read())[0]):
  669. raise BadRoute()
  670. tag = self._really_parse_tag()
  671. except BadRoute:
  672. self._head = reset
  673. self._emit_text("</")
  674. else:
  675. tag[0].invalid = True # Set flag of TagOpenOpen
  676. self._emit_all(tag)
  677. def _parse_tag(self):
  678. """Parse an HTML tag at the head of the wikicode string."""
  679. reset = self._head
  680. self._head += 1
  681. try:
  682. tag = self._really_parse_tag()
  683. except BadRoute:
  684. self._head = reset
  685. self._emit_text("<")
  686. else:
  687. self._emit_all(tag)
  688. def _emit_style_tag(self, tag, markup, body):
  689. """Write the body of a tag and the tokens that should surround it."""
  690. self._emit(tokens.TagOpenOpen(wiki_markup=markup))
  691. self._emit_text(tag)
  692. self._emit(tokens.TagCloseOpen())
  693. self._emit_all(body)
  694. self._emit(tokens.TagOpenClose())
  695. self._emit_text(tag)
  696. self._emit(tokens.TagCloseClose())
  697. def _parse_italics(self):
  698. """Parse wiki-style italics."""
  699. reset = self._head
  700. try:
  701. stack = self._parse(contexts.STYLE_ITALICS)
  702. except BadRoute as route:
  703. self._head = reset
  704. if route.context & contexts.STYLE_PASS_AGAIN:
  705. stack = self._parse(route.context | contexts.STYLE_SECOND_PASS)
  706. else:
  707. return self._emit_text("''")
  708. self._emit_style_tag("i", "''", stack)
  709. def _parse_bold(self):
  710. """Parse wiki-style bold."""
  711. reset = self._head
  712. try:
  713. stack = self._parse(contexts.STYLE_BOLD)
  714. except BadRoute:
  715. self._head = reset
  716. if self._context & contexts.STYLE_SECOND_PASS:
  717. self._emit_text("'")
  718. return True
  719. elif self._context & contexts.STYLE_ITALICS:
  720. self._context |= contexts.STYLE_PASS_AGAIN
  721. self._emit_text("'''")
  722. else:
  723. self._emit_text("'")
  724. self._parse_italics()
  725. else:
  726. self._emit_style_tag("b", "'''", stack)
  727. def _parse_italics_and_bold(self):
  728. """Parse wiki-style italics and bold together (i.e., five ticks)."""
  729. reset = self._head
  730. try:
  731. stack = self._parse(contexts.STYLE_BOLD)
  732. except BadRoute:
  733. self._head = reset
  734. try:
  735. stack = self._parse(contexts.STYLE_ITALICS)
  736. except BadRoute:
  737. self._head = reset
  738. self._emit_text("'''''")
  739. else:
  740. reset = self._head
  741. try:
  742. stack2 = self._parse(contexts.STYLE_BOLD)
  743. except BadRoute:
  744. self._head = reset
  745. self._emit_text("'''")
  746. self._emit_style_tag("i", "''", stack)
  747. else:
  748. self._push()
  749. self._emit_style_tag("i", "''", stack)
  750. self._emit_all(stack2)
  751. self._emit_style_tag("b", "'''", self._pop())
  752. else:
  753. reset = self._head
  754. try:
  755. stack2 = self._parse(contexts.STYLE_ITALICS)
  756. except BadRoute:
  757. self._head = reset
  758. self._emit_text("''")
  759. self._emit_style_tag("b", "'''", stack)
  760. else:
  761. self._push()
  762. self._emit_style_tag("b", "'''", stack)
  763. self._emit_all(stack2)
  764. self._emit_style_tag("i", "''", self._pop())
  765. def _parse_style(self):
  766. """Parse wiki-style formatting (``''``/``'''`` for italics/bold)."""
  767. self._head += 2
  768. ticks = 2
  769. while self._read() == "'":
  770. self._head += 1
  771. ticks += 1
  772. italics = self._context & contexts.STYLE_ITALICS
  773. bold = self._context & contexts.STYLE_BOLD
  774. if ticks > 5:
  775. self._emit_text("'" * (ticks - 5))
  776. ticks = 5
  777. elif ticks == 4:
  778. self._emit_text("'")
  779. ticks = 3
  780. if (italics and ticks in (2, 5)) or (bold and ticks in (3, 5)):
  781. if ticks == 5:
  782. self._head -= 3 if italics else 2
  783. return self._pop()
  784. elif not self._can_recurse():
  785. if ticks == 3:
  786. if self._context & contexts.STYLE_SECOND_PASS:
  787. self._emit_text("'")
  788. return self._pop()
  789. self._context |= contexts.STYLE_PASS_AGAIN
  790. self._emit_text("'" * ticks)
  791. elif ticks == 2:
  792. self._parse_italics()
  793. elif ticks == 3:
  794. if self._parse_bold():
  795. return self._pop()
  796. elif ticks == 5:
  797. self._parse_italics_and_bold()
  798. self._head -= 1
  799. def _handle_list_marker(self):
  800. """Handle a list marker at the head (``#``, ``*``, ``;``, ``:``)."""
  801. markup = self._read()
  802. if markup == ";":
  803. self._context |= contexts.DL_TERM
  804. self._emit(tokens.TagOpenOpen(wiki_markup=markup))
  805. self._emit_text(get_html_tag(markup))
  806. self._emit(tokens.TagCloseSelfclose())
  807. def _handle_list(self):
  808. """Handle a wiki-style list (``#``, ``*``, ``;``, ``:``)."""
  809. self._handle_list_marker()
  810. while self._read(1) in ("#", "*", ";", ":"):
  811. self._head += 1
  812. self._handle_list_marker()
  813. def _handle_hr(self):
  814. """Handle a wiki-style horizontal rule (``----``) in the string."""
  815. length = 4
  816. self._head += 3
  817. while self._read(1) == "-":
  818. length += 1
  819. self._head += 1
  820. self._emit(tokens.TagOpenOpen(wiki_markup="-" * length))
  821. self._emit_text("hr")
  822. self._emit(tokens.TagCloseSelfclose())
  823. def _handle_dl_term(self):
  824. """Handle the term in a description list (``foo`` in ``;foo:bar``)."""
  825. self._context ^= contexts.DL_TERM
  826. if self._read() == ":":
  827. self._handle_list_marker()
  828. else:
  829. self._emit_text("\n")
  830. def _handle_end(self):
  831. """Handle the end of the stream of wikitext."""
  832. if self._context & contexts.FAIL:
  833. if self._context & contexts.TAG_BODY:
  834. if is_single(self._stack[1].text):
  835. return self._handle_single_tag_end()
  836. if self._context & contexts.DOUBLE:
  837. self._pop()
  838. self._fail_route()
  839. return self._pop()
  840. def _verify_safe(self, this):
  841. """Make sure we are not trying to write an invalid character."""
  842. context = self._context
  843. if context & contexts.FAIL_NEXT:
  844. return False
  845. if context & contexts.WIKILINK:
  846. if context & contexts.WIKILINK_TEXT:
  847. return not (this == self._read(1) == "[")
  848. elif this == "]" or this == "{":
  849. self._context |= contexts.FAIL_NEXT
  850. elif this == "\n" or this == "[" or this == "}":
  851. return False
  852. return True
  853. elif context & contexts.EXT_LINK_TITLE:
  854. return this != "\n"
  855. elif context & contexts.TEMPLATE_NAME:
  856. if this == "{" or this == "}" or this == "[":
  857. self._context |= contexts.FAIL_NEXT
  858. return True
  859. if this == "]":
  860. return False
  861. if this == "|":
  862. return True
  863. if context & contexts.HAS_TEXT:
  864. if context & contexts.FAIL_ON_TEXT:
  865. if this is self.END or not this.isspace():
  866. return False
  867. else:
  868. if this == "\n":
  869. self._context |= contexts.FAIL_ON_TEXT
  870. elif this is self.END or not this.isspace():
  871. self._context |= contexts.HAS_TEXT
  872. return True
  873. elif context & contexts.TAG_CLOSE:
  874. return this != "<"
  875. else:
  876. if context & contexts.FAIL_ON_EQUALS:
  877. if this == "=":
  878. return False
  879. elif context & contexts.FAIL_ON_LBRACE:
  880. if this == "{" or (self._read(-1) == self._read(-2) == "{"):
  881. if context & contexts.TEMPLATE:
  882. self._context |= contexts.FAIL_ON_EQUALS
  883. else:
  884. self._context |= contexts.FAIL_NEXT
  885. return True
  886. self._context ^= contexts.FAIL_ON_LBRACE
  887. elif context & contexts.FAIL_ON_RBRACE:
  888. if this == "}":
  889. if context & contexts.TEMPLATE:
  890. self._context |= contexts.FAIL_ON_EQUALS
  891. else:
  892. self._context |= contexts.FAIL_NEXT
  893. return True
  894. self._context ^= contexts.FAIL_ON_RBRACE
  895. elif this == "{":
  896. self._context |= contexts.FAIL_ON_LBRACE
  897. elif this == "}":
  898. self._context |= contexts.FAIL_ON_RBRACE
  899. return True
  900. def _parse(self, context=0, push=True):
  901. """Parse the wikicode string, using *context* for when to stop."""
  902. if push:
  903. self._push(context)
  904. while True:
  905. this = self._read()
  906. if self._context & contexts.UNSAFE:
  907. if not self._verify_safe(this):
  908. if self._context & contexts.DOUBLE:
  909. self._pop()
  910. self._fail_route()
  911. if this not in self.MARKERS:
  912. self._emit_text(this)
  913. self._head += 1
  914. continue
  915. if this is self.END:
  916. return self._handle_end()
  917. next = self._read(1)
  918. if this == next == "{":
  919. if self._can_recurse():
  920. self._parse_template_or_argument()
  921. else:
  922. self._emit_text("{")
  923. elif this == "|" and self._context & contexts.TEMPLATE:
  924. self._handle_template_param()
  925. elif this == "=" and self._context & contexts.TEMPLATE_PARAM_KEY:
  926. self._handle_template_param_value()
  927. elif this == next == "}" and self._context & contexts.TEMPLATE:
  928. return self._handle_template_end()
  929. elif this == "|" and self._context & contexts.ARGUMENT_NAME:
  930. self._handle_argument_separator()
  931. elif this == next == "}" and self._context & contexts.ARGUMENT:
  932. if self._read(2) == "}":
  933. return self._handle_argument_end()
  934. else:
  935. self._emit_text("}")
  936. elif this == next == "[" and self._can_recurse():
  937. if not self._context & contexts.INVALID_LINK:
  938. self._parse_wikilink()
  939. else:
  940. self._emit_text("[")
  941. elif this == "|" and self._context & contexts.WIKILINK_TITLE:
  942. self._handle_wikilink_separator()
  943. elif this == next == "]" and self._context & contexts.WIKILINK:
  944. return self._handle_wikilink_end()
  945. elif this == "[":
  946. self._parse_external_link(True)
  947. elif this == ":" and self._read(-1) not in self.MARKERS:
  948. self._parse_external_link(False)
  949. elif this == "]" and self._context & contexts.EXT_LINK_TITLE:
  950. return self._pop()
  951. elif this == "=" and not self._global & contexts.GL_HEADING:
  952. if self._read(-1) in ("\n", self.START):
  953. self._parse_heading()
  954. else:
  955. self._emit_text("=")
  956. elif this == "=" and self._context & contexts.HEADING:
  957. return self._handle_heading_end()
  958. elif this == "\n" and self._context & contexts.HEADING:
  959. self._fail_route()
  960. elif this == "&":
  961. self._parse_entity()
  962. elif this == "<" and next == "!":
  963. if self._read(2) == self._read(3) == "-":
  964. self._parse_comment()
  965. else:
  966. self._emit_text(this)
  967. elif this == "<" and next == "/" and self._read(2) is not self.END:
  968. if self._context & contexts.TAG_BODY:
  969. self._handle_tag_open_close()
  970. else:
  971. self._handle_invalid_tag_start()
  972. elif this == "<" and not self._context & contexts.TAG_CLOSE:
  973. if self._can_recurse():
  974. self._parse_tag()
  975. else:
  976. self._emit_text("<")
  977. elif this == ">" and self._context & contexts.TAG_CLOSE:
  978. return self._handle_tag_close_close()
  979. elif this == next == "'":
  980. result = self._parse_style()
  981. if result is not None:
  982. return result
  983. elif self._read(-1) in ("\n", self.START):
  984. if this in ("#", "*", ";", ":"):
  985. self._handle_list()
  986. elif this == next == self._read(2) == self._read(3) == "-":
  987. self._handle_hr()
  988. else:
  989. self._emit_text(this)
  990. elif this in ("\n", ":") and self._context & contexts.DL_TERM:
  991. self._handle_dl_term()
  992. else:
  993. self._emit_text(this)
  994. self._head += 1
  995. def tokenize(self, text):
  996. """Build a list of tokens from a string of wikicode and return it."""
  997. split = self.regex.split(text)
  998. self._text = [segment for segment in split if segment]
  999. return self._parse()