A Python parser for MediaWiki wikicode https://mwparserfromhell.readthedocs.io/
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

tokenizer.py 55 KiB

11 vuotta sitten
11 vuotta sitten
11 vuotta sitten
10 vuotta sitten
11 vuotta sitten
11 vuotta sitten
11 vuotta sitten
11 vuotta sitten
10 vuotta sitten
10 vuotta sitten
10 vuotta sitten
10 vuotta sitten
10 vuotta sitten
10 vuotta sitten
10 vuotta sitten
10 vuotta sitten
10 vuotta sitten
10 vuotta sitten
10 vuotta sitten
10 vuotta sitten
10 vuotta sitten
10 vuotta sitten
10 vuotta sitten
10 vuotta sitten
10 vuotta sitten
11 vuotta sitten
11 vuotta sitten
11 vuotta sitten
11 vuotta sitten
11 vuotta sitten
1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402
  1. # -*- coding: utf-8 -*-
  2. #
  3. # Copyright (C) 2012-2015 Ben Kurtovic <ben.kurtovic@gmail.com>
  4. #
  5. # Permission is hereby granted, free of charge, to any person obtaining a copy
  6. # of this software and associated documentation files (the "Software"), to deal
  7. # in the Software without restriction, including without limitation the rights
  8. # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  9. # copies of the Software, and to permit persons to whom the Software is
  10. # furnished to do so, subject to the following conditions:
  11. #
  12. # The above copyright notice and this permission notice shall be included in
  13. # all copies or substantial portions of the Software.
  14. #
  15. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16. # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17. # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  18. # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  19. # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  20. # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
  21. # SOFTWARE.
  22. from __future__ import unicode_literals
  23. from math import log
  24. import re
  25. from . import contexts, tokens, ParserError
  26. from ..compat import htmlentities, range
  27. from ..definitions import (get_html_tag, is_parsable, is_single,
  28. is_single_only, is_scheme)
  29. __all__ = ["Tokenizer"]
  30. class BadRoute(Exception):
  31. """Raised internally when the current tokenization route is invalid."""
  32. def __init__(self, context=0):
  33. super(BadRoute, self).__init__()
  34. self.context = context
  35. class _TagOpenData(object):
  36. """Stores data about an HTML open tag, like ``<ref name="foo">``."""
  37. CX_NAME = 1 << 0
  38. CX_ATTR_READY = 1 << 1
  39. CX_ATTR_NAME = 1 << 2
  40. CX_ATTR_VALUE = 1 << 3
  41. CX_QUOTED = 1 << 4
  42. CX_NOTE_SPACE = 1 << 5
  43. CX_NOTE_EQUALS = 1 << 6
  44. CX_NOTE_QUOTE = 1 << 7
  45. def __init__(self):
  46. self.context = self.CX_NAME
  47. self.padding_buffer = {"first": "", "before_eq": "", "after_eq": ""}
  48. self.quoter = None
  49. self.reset = 0
  50. class Tokenizer(object):
  51. """Creates a list of tokens from a string of wikicode."""
  52. USES_C = False
  53. START = object()
  54. END = object()
  55. MARKERS = ["{", "}", "[", "]", "<", ">", "|", "=", "&", "'", "#", "*", ";",
  56. ":", "/", "-", "!", "\n", START, END]
  57. MAX_DEPTH = 40
  58. MAX_CYCLES = 100000
  59. regex = re.compile(r"([{}\[\]<>|=&'#*;:/\\\"\-!\n])", flags=re.IGNORECASE)
  60. tag_splitter = re.compile(r"([\s\"\'\\]+)")
  61. def __init__(self):
  62. self._text = None
  63. self._head = 0
  64. self._stacks = []
  65. self._global = 0
  66. self._depth = 0
  67. self._cycles = 0
  68. @property
  69. def _stack(self):
  70. """The current token stack."""
  71. return self._stacks[-1][0]
  72. @property
  73. def _context(self):
  74. """The current token context."""
  75. return self._stacks[-1][1]
  76. @_context.setter
  77. def _context(self, value):
  78. self._stacks[-1][1] = value
  79. @property
  80. def _textbuffer(self):
  81. """The current textbuffer."""
  82. return self._stacks[-1][2]
  83. @_textbuffer.setter
  84. def _textbuffer(self, value):
  85. self._stacks[-1][2] = value
  86. def _push(self, context=0):
  87. """Add a new token stack, context, and textbuffer to the list."""
  88. self._stacks.append([[], context, []])
  89. self._depth += 1
  90. self._cycles += 1
  91. def _push_textbuffer(self):
  92. """Push the textbuffer onto the stack as a Text node and clear it."""
  93. if self._textbuffer:
  94. self._stack.append(tokens.Text(text="".join(self._textbuffer)))
  95. self._textbuffer = []
  96. def _pop(self, keep_context=False):
  97. """Pop the current stack/context/textbuffer, returning the stack.
  98. If *keep_context* is ``True``, then we will replace the underlying
  99. stack's context with the current stack's.
  100. """
  101. self._push_textbuffer()
  102. self._depth -= 1
  103. if keep_context:
  104. context = self._context
  105. stack = self._stacks.pop()[0]
  106. self._context = context
  107. return stack
  108. return self._stacks.pop()[0]
  109. def _can_recurse(self):
  110. """Return whether or not our max recursion depth has been exceeded."""
  111. return self._depth < self.MAX_DEPTH and self._cycles < self.MAX_CYCLES
  112. def _fail_route(self):
  113. """Fail the current tokenization route.
  114. Discards the current stack/context/textbuffer and raises
  115. :exc:`.BadRoute`.
  116. """
  117. context = self._context
  118. self._pop()
  119. raise BadRoute(context)
  120. def _emit(self, token):
  121. """Write a token to the end of the current token stack."""
  122. self._push_textbuffer()
  123. self._stack.append(token)
  124. def _emit_first(self, token):
  125. """Write a token to the beginning of the current token stack."""
  126. self._push_textbuffer()
  127. self._stack.insert(0, token)
  128. def _emit_text(self, text):
  129. """Write text to the current textbuffer."""
  130. self._textbuffer.append(text)
  131. def _emit_all(self, tokenlist):
  132. """Write a series of tokens to the current stack at once."""
  133. if tokenlist and isinstance(tokenlist[0], tokens.Text):
  134. self._emit_text(tokenlist.pop(0).text)
  135. self._push_textbuffer()
  136. self._stack.extend(tokenlist)
  137. def _emit_text_then_stack(self, text):
  138. """Pop the current stack, write *text*, and then write the stack."""
  139. stack = self._pop()
  140. self._emit_text(text)
  141. if stack:
  142. self._emit_all(stack)
  143. self._head -= 1
  144. def _read(self, delta=0, wrap=False, strict=False):
  145. """Read the value at a relative point in the wikicode.
  146. The value is read from :attr:`self._head <_head>` plus the value of
  147. *delta* (which can be negative). If *wrap* is ``False``, we will not
  148. allow attempts to read from the end of the string if ``self._head +
  149. delta`` is negative. If *strict* is ``True``, the route will be failed
  150. (with :meth:`_fail_route`) if we try to read from past the end of the
  151. string; otherwise, :attr:`self.END <END>` is returned. If we try to
  152. read from before the start of the string, :attr:`self.START <START>` is
  153. returned.
  154. """
  155. index = self._head + delta
  156. if index < 0 and (not wrap or abs(index) > len(self._text)):
  157. return self.START
  158. try:
  159. return self._text[index]
  160. except IndexError:
  161. if strict:
  162. self._fail_route()
  163. return self.END
  164. def _parse_template(self, has_content):
  165. """Parse a template at the head of the wikicode string."""
  166. reset = self._head
  167. context = contexts.TEMPLATE_NAME
  168. if has_content:
  169. context |= contexts.HAS_TEMPLATE
  170. try:
  171. template = self._parse(context)
  172. except BadRoute:
  173. self._head = reset
  174. raise
  175. self._emit_first(tokens.TemplateOpen())
  176. self._emit_all(template)
  177. self._emit(tokens.TemplateClose())
  178. def _parse_argument(self):
  179. """Parse an argument at the head of the wikicode string."""
  180. reset = self._head
  181. try:
  182. argument = self._parse(contexts.ARGUMENT_NAME)
  183. except BadRoute:
  184. self._head = reset
  185. raise
  186. self._emit_first(tokens.ArgumentOpen())
  187. self._emit_all(argument)
  188. self._emit(tokens.ArgumentClose())
  189. def _parse_template_or_argument(self):
  190. """Parse a template or argument at the head of the wikicode string."""
  191. self._head += 2
  192. braces = 2
  193. while self._read() == "{":
  194. self._head += 1
  195. braces += 1
  196. has_content = False
  197. self._push()
  198. while braces:
  199. if braces == 1:
  200. return self._emit_text_then_stack("{")
  201. if braces == 2:
  202. try:
  203. self._parse_template(has_content)
  204. except BadRoute:
  205. return self._emit_text_then_stack("{{")
  206. break
  207. try:
  208. self._parse_argument()
  209. braces -= 3
  210. except BadRoute:
  211. try:
  212. self._parse_template(has_content)
  213. braces -= 2
  214. except BadRoute:
  215. return self._emit_text_then_stack("{" * braces)
  216. if braces:
  217. has_content = True
  218. self._head += 1
  219. self._emit_all(self._pop())
  220. if self._context & contexts.FAIL_NEXT:
  221. self._context ^= contexts.FAIL_NEXT
  222. def _handle_template_param(self):
  223. """Handle a template parameter at the head of the string."""
  224. if self._context & contexts.TEMPLATE_NAME:
  225. if not self._context & (contexts.HAS_TEXT | contexts.HAS_TEMPLATE):
  226. self._fail_route()
  227. self._context ^= contexts.TEMPLATE_NAME
  228. elif self._context & contexts.TEMPLATE_PARAM_VALUE:
  229. self._context ^= contexts.TEMPLATE_PARAM_VALUE
  230. else:
  231. self._emit_all(self._pop(keep_context=True))
  232. self._context |= contexts.TEMPLATE_PARAM_KEY
  233. self._emit(tokens.TemplateParamSeparator())
  234. self._push(self._context)
  235. def _handle_template_param_value(self):
  236. """Handle a template parameter's value at the head of the string."""
  237. self._emit_all(self._pop(keep_context=True))
  238. self._context ^= contexts.TEMPLATE_PARAM_KEY
  239. self._context |= contexts.TEMPLATE_PARAM_VALUE
  240. self._emit(tokens.TemplateParamEquals())
  241. def _handle_template_end(self):
  242. """Handle the end of a template at the head of the string."""
  243. if self._context & contexts.TEMPLATE_NAME:
  244. if not self._context & (contexts.HAS_TEXT | contexts.HAS_TEMPLATE):
  245. self._fail_route()
  246. elif self._context & contexts.TEMPLATE_PARAM_KEY:
  247. self._emit_all(self._pop(keep_context=True))
  248. self._head += 1
  249. return self._pop()
  250. def _handle_argument_separator(self):
  251. """Handle the separator between an argument's name and default."""
  252. self._context ^= contexts.ARGUMENT_NAME
  253. self._context |= contexts.ARGUMENT_DEFAULT
  254. self._emit(tokens.ArgumentSeparator())
  255. def _handle_argument_end(self):
  256. """Handle the end of an argument at the head of the string."""
  257. self._head += 2
  258. return self._pop()
  259. def _parse_wikilink(self):
  260. """Parse an internal wikilink at the head of the wikicode string."""
  261. self._head += 2
  262. reset = self._head - 1
  263. try:
  264. wikilink = self._parse(contexts.WIKILINK_TITLE)
  265. except BadRoute:
  266. self._head = reset
  267. self._emit_text("[[")
  268. else:
  269. self._emit(tokens.WikilinkOpen())
  270. self._emit_all(wikilink)
  271. self._emit(tokens.WikilinkClose())
  272. def _handle_wikilink_separator(self):
  273. """Handle the separator between a wikilink's title and its text."""
  274. self._context ^= contexts.WIKILINK_TITLE
  275. self._context |= contexts.WIKILINK_TEXT
  276. self._emit(tokens.WikilinkSeparator())
  277. def _handle_wikilink_end(self):
  278. """Handle the end of a wikilink at the head of the string."""
  279. self._head += 1
  280. return self._pop()
  281. def _parse_bracketed_uri_scheme(self):
  282. """Parse the URI scheme of a bracket-enclosed external link."""
  283. self._push(contexts.EXT_LINK_URI)
  284. if self._read() == self._read(1) == "/":
  285. self._emit_text("//")
  286. self._head += 2
  287. else:
  288. valid = "abcdefghijklmnopqrstuvwxyz0123456789+.-"
  289. all_valid = lambda: all(char in valid for char in self._read())
  290. scheme = ""
  291. while self._read() is not self.END and all_valid():
  292. scheme += self._read()
  293. self._emit_text(self._read())
  294. self._head += 1
  295. if self._read() != ":":
  296. self._fail_route()
  297. self._emit_text(":")
  298. self._head += 1
  299. slashes = self._read() == self._read(1) == "/"
  300. if slashes:
  301. self._emit_text("//")
  302. self._head += 2
  303. if not is_scheme(scheme, slashes):
  304. self._fail_route()
  305. def _parse_free_uri_scheme(self):
  306. """Parse the URI scheme of a free (no brackets) external link."""
  307. valid = "abcdefghijklmnopqrstuvwxyz0123456789+.-"
  308. scheme = []
  309. try:
  310. # We have to backtrack through the textbuffer looking for our
  311. # scheme since it was just parsed as text:
  312. for chunk in reversed(self._textbuffer):
  313. for char in reversed(chunk):
  314. if char.isspace() or char in self.MARKERS:
  315. raise StopIteration()
  316. if char not in valid:
  317. raise BadRoute()
  318. scheme.append(char)
  319. except StopIteration:
  320. pass
  321. scheme = "".join(reversed(scheme))
  322. slashes = self._read() == self._read(1) == "/"
  323. if not is_scheme(scheme, slashes):
  324. raise BadRoute()
  325. self._push(self._context | contexts.EXT_LINK_URI)
  326. self._emit_text(scheme)
  327. self._emit_text(":")
  328. if slashes:
  329. self._emit_text("//")
  330. self._head += 2
  331. def _handle_free_link_text(self, punct, tail, this):
  332. """Handle text in a free ext link, including trailing punctuation."""
  333. if "(" in this and ")" in punct:
  334. punct = punct[:-1] # ')' is not longer valid punctuation
  335. if this.endswith(punct):
  336. for i in range(len(this) - 1, 0, -1):
  337. if this[i - 1] not in punct:
  338. break
  339. else:
  340. i = 0
  341. stripped = this[:i]
  342. if stripped and tail:
  343. self._emit_text(tail)
  344. tail = ""
  345. tail += this[i:]
  346. this = stripped
  347. elif tail:
  348. self._emit_text(tail)
  349. tail = ""
  350. self._emit_text(this)
  351. return punct, tail
  352. def _is_free_link_end(self, this, next):
  353. """Return whether the current head is the end of a free link."""
  354. # Built from _parse()'s end sentinels:
  355. after, ctx = self._read(2), self._context
  356. equal_sign_contexts = contexts.TEMPLATE_PARAM_KEY | contexts.HEADING
  357. return (this in (self.END, "\n", "[", "]", "<", ">") or
  358. this == next == "'" or
  359. (this == "|" and ctx & contexts.TEMPLATE) or
  360. (this == "=" and ctx & equal_sign_contexts) or
  361. (this == next == "}" and ctx & contexts.TEMPLATE) or
  362. (this == next == after == "}" and ctx & contexts.ARGUMENT))
  363. def _really_parse_external_link(self, brackets):
  364. """Really parse an external link."""
  365. if brackets:
  366. self._parse_bracketed_uri_scheme()
  367. invalid = ("\n", " ", "]")
  368. else:
  369. self._parse_free_uri_scheme()
  370. invalid = ("\n", " ", "[", "]")
  371. punct = tuple(",;\.:!?)")
  372. if self._read() is self.END or self._read()[0] in invalid:
  373. self._fail_route()
  374. tail = ""
  375. while True:
  376. this, next = self._read(), self._read(1)
  377. if this == "&":
  378. if tail:
  379. self._emit_text(tail)
  380. tail = ""
  381. self._parse_entity()
  382. elif (this == "<" and next == "!" and self._read(2) ==
  383. self._read(3) == "-"):
  384. if tail:
  385. self._emit_text(tail)
  386. tail = ""
  387. self._parse_comment()
  388. elif not brackets and self._is_free_link_end(this, next):
  389. return self._pop(), tail, -1
  390. elif this is self.END or this == "\n":
  391. self._fail_route()
  392. elif this == next == "{" and self._can_recurse():
  393. if tail:
  394. self._emit_text(tail)
  395. tail = ""
  396. self._parse_template_or_argument()
  397. elif this == "]":
  398. return self._pop(), tail, 0
  399. elif " " in this:
  400. before, after = this.split(" ", 1)
  401. if brackets:
  402. self._emit_text(before)
  403. self._emit(tokens.ExternalLinkSeparator())
  404. if after:
  405. self._emit_text(after)
  406. self._context ^= contexts.EXT_LINK_URI
  407. self._context |= contexts.EXT_LINK_TITLE
  408. self._head += 1
  409. return self._parse(push=False), None, 0
  410. punct, tail = self._handle_free_link_text(punct, tail, before)
  411. return self._pop(), tail + " " + after, 0
  412. elif not brackets:
  413. punct, tail = self._handle_free_link_text(punct, tail, this)
  414. else:
  415. self._emit_text(this)
  416. self._head += 1
  417. def _remove_uri_scheme_from_textbuffer(self, scheme):
  418. """Remove the URI scheme of a new external link from the textbuffer."""
  419. length = len(scheme)
  420. while length:
  421. if length < len(self._textbuffer[-1]):
  422. self._textbuffer[-1] = self._textbuffer[-1][:-length]
  423. break
  424. length -= len(self._textbuffer[-1])
  425. self._textbuffer.pop()
  426. def _parse_external_link(self, brackets):
  427. """Parse an external link at the head of the wikicode string."""
  428. reset = self._head
  429. self._head += 1
  430. try:
  431. bad_context = self._context & contexts.NO_EXT_LINKS
  432. if bad_context or not self._can_recurse():
  433. raise BadRoute()
  434. link, extra, delta = self._really_parse_external_link(brackets)
  435. except BadRoute:
  436. self._head = reset
  437. if not brackets and self._context & contexts.DL_TERM:
  438. self._handle_dl_term()
  439. else:
  440. self._emit_text(self._read())
  441. else:
  442. if not brackets:
  443. scheme = link[0].text.split(":", 1)[0]
  444. self._remove_uri_scheme_from_textbuffer(scheme)
  445. self._emit(tokens.ExternalLinkOpen(brackets=brackets))
  446. self._emit_all(link)
  447. self._emit(tokens.ExternalLinkClose())
  448. self._head += delta
  449. if extra:
  450. self._emit_text(extra)
  451. def _parse_heading(self):
  452. """Parse a section heading at the head of the wikicode string."""
  453. self._global |= contexts.GL_HEADING
  454. reset = self._head
  455. self._head += 1
  456. best = 1
  457. while self._read() == "=":
  458. best += 1
  459. self._head += 1
  460. context = contexts.HEADING_LEVEL_1 << min(best - 1, 5)
  461. try:
  462. title, level = self._parse(context)
  463. except BadRoute:
  464. self._head = reset + best - 1
  465. self._emit_text("=" * best)
  466. else:
  467. self._emit(tokens.HeadingStart(level=level))
  468. if level < best:
  469. self._emit_text("=" * (best - level))
  470. self._emit_all(title)
  471. self._emit(tokens.HeadingEnd())
  472. finally:
  473. self._global ^= contexts.GL_HEADING
  474. def _handle_heading_end(self):
  475. """Handle the end of a section heading at the head of the string."""
  476. reset = self._head
  477. self._head += 1
  478. best = 1
  479. while self._read() == "=":
  480. best += 1
  481. self._head += 1
  482. current = int(log(self._context / contexts.HEADING_LEVEL_1, 2)) + 1
  483. level = min(current, min(best, 6))
  484. try: # Try to check for a heading closure after this one
  485. after, after_level = self._parse(self._context)
  486. except BadRoute:
  487. if level < best:
  488. self._emit_text("=" * (best - level))
  489. self._head = reset + best - 1
  490. return self._pop(), level
  491. else: # Found another closure
  492. self._emit_text("=" * best)
  493. self._emit_all(after)
  494. return self._pop(), after_level
  495. def _really_parse_entity(self):
  496. """Actually parse an HTML entity and ensure that it is valid."""
  497. self._emit(tokens.HTMLEntityStart())
  498. self._head += 1
  499. this = self._read(strict=True)
  500. if this == "#":
  501. numeric = True
  502. self._emit(tokens.HTMLEntityNumeric())
  503. self._head += 1
  504. this = self._read(strict=True)
  505. if this[0].lower() == "x":
  506. hexadecimal = True
  507. self._emit(tokens.HTMLEntityHex(char=this[0]))
  508. this = this[1:]
  509. if not this:
  510. self._fail_route()
  511. else:
  512. hexadecimal = False
  513. else:
  514. numeric = hexadecimal = False
  515. valid = "0123456789abcdefABCDEF" if hexadecimal else "0123456789"
  516. if not numeric and not hexadecimal:
  517. valid += "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
  518. if not all([char in valid for char in this]):
  519. self._fail_route()
  520. self._head += 1
  521. if self._read() != ";":
  522. self._fail_route()
  523. if numeric:
  524. test = int(this, 16) if hexadecimal else int(this)
  525. if test < 1 or test > 0x10FFFF:
  526. self._fail_route()
  527. else:
  528. if this not in htmlentities.entitydefs:
  529. self._fail_route()
  530. self._emit(tokens.Text(text=this))
  531. self._emit(tokens.HTMLEntityEnd())
  532. def _parse_entity(self):
  533. """Parse an HTML entity at the head of the wikicode string."""
  534. reset = self._head
  535. self._push()
  536. try:
  537. self._really_parse_entity()
  538. except BadRoute:
  539. self._head = reset
  540. self._emit_text(self._read())
  541. else:
  542. self._emit_all(self._pop())
  543. def _parse_comment(self):
  544. """Parse an HTML comment at the head of the wikicode string."""
  545. self._head += 4
  546. reset = self._head - 1
  547. self._push()
  548. while True:
  549. this = self._read()
  550. if this == self.END:
  551. self._pop()
  552. self._head = reset
  553. self._emit_text("<!--")
  554. return
  555. if this == self._read(1) == "-" and self._read(2) == ">":
  556. self._emit_first(tokens.CommentStart())
  557. self._emit(tokens.CommentEnd())
  558. self._emit_all(self._pop())
  559. self._head += 2
  560. if self._context & contexts.FAIL_NEXT:
  561. # _verify_safe() sets this flag while parsing a template
  562. # or link when it encounters what might be a comment -- we
  563. # must unset it to let _verify_safe() know it was correct:
  564. self._context ^= contexts.FAIL_NEXT
  565. return
  566. self._emit_text(this)
  567. self._head += 1
  568. def _push_tag_buffer(self, data):
  569. """Write a pending tag attribute from *data* to the stack."""
  570. if data.context & data.CX_QUOTED:
  571. self._emit_first(tokens.TagAttrQuote(char=data.quoter))
  572. self._emit_all(self._pop())
  573. buf = data.padding_buffer
  574. self._emit_first(tokens.TagAttrStart(pad_first=buf["first"],
  575. pad_before_eq=buf["before_eq"], pad_after_eq=buf["after_eq"]))
  576. self._emit_all(self._pop())
  577. for key in data.padding_buffer:
  578. data.padding_buffer[key] = ""
  579. def _handle_tag_space(self, data, text):
  580. """Handle whitespace (*text*) inside of an HTML open tag."""
  581. ctx = data.context
  582. end_of_value = ctx & data.CX_ATTR_VALUE and not ctx & (data.CX_QUOTED | data.CX_NOTE_QUOTE)
  583. if end_of_value or (ctx & data.CX_QUOTED and ctx & data.CX_NOTE_SPACE):
  584. self._push_tag_buffer(data)
  585. data.context = data.CX_ATTR_READY
  586. elif ctx & data.CX_NOTE_SPACE:
  587. data.context = data.CX_ATTR_READY
  588. elif ctx & data.CX_ATTR_NAME:
  589. data.context |= data.CX_NOTE_EQUALS
  590. data.padding_buffer["before_eq"] += text
  591. if ctx & data.CX_QUOTED and not ctx & data.CX_NOTE_SPACE:
  592. self._emit_text(text)
  593. elif data.context & data.CX_ATTR_READY:
  594. data.padding_buffer["first"] += text
  595. elif data.context & data.CX_ATTR_VALUE:
  596. data.padding_buffer["after_eq"] += text
  597. def _handle_tag_text(self, text):
  598. """Handle regular *text* inside of an HTML open tag."""
  599. next = self._read(1)
  600. if not self._can_recurse() or text not in self.MARKERS:
  601. self._emit_text(text)
  602. elif text == next == "{":
  603. self._parse_template_or_argument()
  604. elif text == next == "[":
  605. self._parse_wikilink()
  606. elif text == "<":
  607. self._parse_tag()
  608. else:
  609. self._emit_text(text)
  610. def _handle_tag_data(self, data, text):
  611. """Handle all sorts of *text* data inside of an HTML open tag."""
  612. for chunk in self.tag_splitter.split(text):
  613. if not chunk:
  614. continue
  615. if data.context & data.CX_NAME:
  616. if chunk in self.MARKERS or chunk.isspace():
  617. self._fail_route() # Tags must start with text, not spaces
  618. data.context = data.CX_NOTE_SPACE
  619. elif chunk.isspace():
  620. self._handle_tag_space(data, chunk)
  621. continue
  622. elif data.context & data.CX_NOTE_SPACE:
  623. if data.context & data.CX_QUOTED:
  624. data.context = data.CX_ATTR_VALUE
  625. self._pop()
  626. self._head = data.reset - 1 # Will be auto-incremented
  627. return # Break early
  628. self._fail_route()
  629. elif data.context & data.CX_ATTR_READY:
  630. data.context = data.CX_ATTR_NAME
  631. self._push(contexts.TAG_ATTR)
  632. elif data.context & data.CX_ATTR_NAME:
  633. if chunk == "=":
  634. data.context = data.CX_ATTR_VALUE | data.CX_NOTE_QUOTE
  635. self._emit(tokens.TagAttrEquals())
  636. continue
  637. if data.context & data.CX_NOTE_EQUALS:
  638. self._push_tag_buffer(data)
  639. data.context = data.CX_ATTR_NAME
  640. self._push(contexts.TAG_ATTR)
  641. else: # data.context & data.CX_ATTR_VALUE assured
  642. escaped = self._read(-1) == "\\" and self._read(-2) != "\\"
  643. if data.context & data.CX_NOTE_QUOTE:
  644. data.context ^= data.CX_NOTE_QUOTE
  645. if chunk in "'\"" and not escaped:
  646. data.context |= data.CX_QUOTED
  647. data.quoter = chunk
  648. data.reset = self._head
  649. self._push(self._context)
  650. continue
  651. elif data.context & data.CX_QUOTED:
  652. if chunk == data.quoter and not escaped:
  653. data.context |= data.CX_NOTE_SPACE
  654. continue
  655. self._handle_tag_text(chunk)
  656. def _handle_tag_close_open(self, data, token):
  657. """Handle the closing of a open tag (``<foo>``)."""
  658. if data.context & (data.CX_ATTR_NAME | data.CX_ATTR_VALUE):
  659. self._push_tag_buffer(data)
  660. self._emit(token(padding=data.padding_buffer["first"]))
  661. self._head += 1
  662. def _handle_tag_open_close(self):
  663. """Handle the opening of a closing tag (``</foo>``)."""
  664. self._emit(tokens.TagOpenClose())
  665. self._push(contexts.TAG_CLOSE)
  666. self._head += 1
  667. def _handle_tag_close_close(self):
  668. """Handle the ending of a closing tag (``</foo>``)."""
  669. strip = lambda tok: tok.text.rstrip().lower()
  670. closing = self._pop()
  671. if len(closing) != 1 or (not isinstance(closing[0], tokens.Text) or
  672. strip(closing[0]) != strip(self._stack[1])):
  673. self._fail_route()
  674. self._emit_all(closing)
  675. self._emit(tokens.TagCloseClose())
  676. return self._pop()
  677. def _handle_blacklisted_tag(self):
  678. """Handle the body of an HTML tag that is parser-blacklisted."""
  679. strip = lambda text: text.rstrip().lower()
  680. while True:
  681. this, next = self._read(), self._read(1)
  682. if this is self.END:
  683. self._fail_route()
  684. elif this == "<" and next == "/":
  685. self._head += 3
  686. if self._read() != ">" or (strip(self._read(-1)) !=
  687. strip(self._stack[1].text)):
  688. self._head -= 1
  689. self._emit_text("</")
  690. continue
  691. self._emit(tokens.TagOpenClose())
  692. self._emit_text(self._read(-1))
  693. self._emit(tokens.TagCloseClose())
  694. return self._pop()
  695. elif this == "&":
  696. self._parse_entity()
  697. else:
  698. self._emit_text(this)
  699. self._head += 1
  700. def _handle_single_only_tag_end(self):
  701. """Handle the end of an implicitly closing single-only HTML tag."""
  702. padding = self._stack.pop().padding
  703. self._emit(tokens.TagCloseSelfclose(padding=padding, implicit=True))
  704. self._head -= 1 # Offset displacement done by _handle_tag_close_open
  705. return self._pop()
  706. def _handle_single_tag_end(self):
  707. """Handle the stream end when inside a single-supporting HTML tag."""
  708. stack = self._stack
  709. # We need to find the index of the TagCloseOpen token corresponding to
  710. # the TagOpenOpen token located at index 0:
  711. depth = 1
  712. for index, token in enumerate(stack[2:], 2):
  713. if isinstance(token, tokens.TagOpenOpen):
  714. depth += 1
  715. elif isinstance(token, tokens.TagCloseOpen):
  716. depth -= 1
  717. if depth == 0:
  718. break
  719. else: # pragma: no cover (untestable/exceptional case)
  720. raise ParserError("_handle_single_tag_end() missed a TagCloseOpen")
  721. padding = stack[index].padding
  722. stack[index] = tokens.TagCloseSelfclose(padding=padding, implicit=True)
  723. return self._pop()
  724. def _really_parse_tag(self):
  725. """Actually parse an HTML tag, starting with the open (``<foo>``)."""
  726. data = _TagOpenData()
  727. self._push(contexts.TAG_OPEN)
  728. self._emit(tokens.TagOpenOpen())
  729. while True:
  730. this, next = self._read(), self._read(1)
  731. can_exit = (not data.context & (data.CX_QUOTED | data.CX_NAME) or
  732. data.context & data.CX_NOTE_SPACE)
  733. if this is self.END:
  734. if self._context & contexts.TAG_ATTR:
  735. if data.context & data.CX_QUOTED:
  736. # Unclosed attribute quote: reset, don't die
  737. data.context = data.CX_ATTR_VALUE
  738. self._pop()
  739. self._head = data.reset
  740. continue
  741. self._pop()
  742. self._fail_route()
  743. elif this == ">" and can_exit:
  744. self._handle_tag_close_open(data, tokens.TagCloseOpen)
  745. self._context = contexts.TAG_BODY
  746. if is_single_only(self._stack[1].text):
  747. return self._handle_single_only_tag_end()
  748. if is_parsable(self._stack[1].text):
  749. return self._parse(push=False)
  750. return self._handle_blacklisted_tag()
  751. elif this == "/" and next == ">" and can_exit:
  752. self._handle_tag_close_open(data, tokens.TagCloseSelfclose)
  753. return self._pop()
  754. else:
  755. self._handle_tag_data(data, this)
  756. self._head += 1
  757. def _handle_invalid_tag_start(self):
  758. """Handle the (possible) start of an implicitly closing single tag."""
  759. reset = self._head + 1
  760. self._head += 2
  761. try:
  762. if not is_single_only(self.tag_splitter.split(self._read())[0]):
  763. raise BadRoute()
  764. tag = self._really_parse_tag()
  765. except BadRoute:
  766. self._head = reset
  767. self._emit_text("</")
  768. else:
  769. tag[0].invalid = True # Set flag of TagOpenOpen
  770. self._emit_all(tag)
  771. def _parse_tag(self):
  772. """Parse an HTML tag at the head of the wikicode string."""
  773. reset = self._head
  774. self._head += 1
  775. try:
  776. tag = self._really_parse_tag()
  777. except BadRoute:
  778. self._head = reset
  779. self._emit_text("<")
  780. else:
  781. self._emit_all(tag)
  782. def _emit_style_tag(self, tag, markup, body):
  783. """Write the body of a tag and the tokens that should surround it."""
  784. self._emit(tokens.TagOpenOpen(wiki_markup=markup))
  785. self._emit_text(tag)
  786. self._emit(tokens.TagCloseOpen())
  787. self._emit_all(body)
  788. self._emit(tokens.TagOpenClose())
  789. self._emit_text(tag)
  790. self._emit(tokens.TagCloseClose())
  791. def _parse_italics(self):
  792. """Parse wiki-style italics."""
  793. reset = self._head
  794. try:
  795. stack = self._parse(contexts.STYLE_ITALICS)
  796. except BadRoute as route:
  797. self._head = reset
  798. if route.context & contexts.STYLE_PASS_AGAIN:
  799. new_ctx = contexts.STYLE_ITALICS | contexts.STYLE_SECOND_PASS
  800. stack = self._parse(new_ctx)
  801. else:
  802. return self._emit_text("''")
  803. self._emit_style_tag("i", "''", stack)
  804. def _parse_bold(self):
  805. """Parse wiki-style bold."""
  806. reset = self._head
  807. try:
  808. stack = self._parse(contexts.STYLE_BOLD)
  809. except BadRoute:
  810. self._head = reset
  811. if self._context & contexts.STYLE_SECOND_PASS:
  812. self._emit_text("'")
  813. return True
  814. elif self._context & contexts.STYLE_ITALICS:
  815. self._context |= contexts.STYLE_PASS_AGAIN
  816. self._emit_text("'''")
  817. else:
  818. self._emit_text("'")
  819. self._parse_italics()
  820. else:
  821. self._emit_style_tag("b", "'''", stack)
  822. def _parse_italics_and_bold(self):
  823. """Parse wiki-style italics and bold together (i.e., five ticks)."""
  824. reset = self._head
  825. try:
  826. stack = self._parse(contexts.STYLE_BOLD)
  827. except BadRoute:
  828. self._head = reset
  829. try:
  830. stack = self._parse(contexts.STYLE_ITALICS)
  831. except BadRoute:
  832. self._head = reset
  833. self._emit_text("'''''")
  834. else:
  835. reset = self._head
  836. try:
  837. stack2 = self._parse(contexts.STYLE_BOLD)
  838. except BadRoute:
  839. self._head = reset
  840. self._emit_text("'''")
  841. self._emit_style_tag("i", "''", stack)
  842. else:
  843. self._push()
  844. self._emit_style_tag("i", "''", stack)
  845. self._emit_all(stack2)
  846. self._emit_style_tag("b", "'''", self._pop())
  847. else:
  848. reset = self._head
  849. try:
  850. stack2 = self._parse(contexts.STYLE_ITALICS)
  851. except BadRoute:
  852. self._head = reset
  853. self._emit_text("''")
  854. self._emit_style_tag("b", "'''", stack)
  855. else:
  856. self._push()
  857. self._emit_style_tag("b", "'''", stack)
  858. self._emit_all(stack2)
  859. self._emit_style_tag("i", "''", self._pop())
  860. def _parse_style(self):
  861. """Parse wiki-style formatting (``''``/``'''`` for italics/bold)."""
  862. self._head += 2
  863. ticks = 2
  864. while self._read() == "'":
  865. self._head += 1
  866. ticks += 1
  867. italics = self._context & contexts.STYLE_ITALICS
  868. bold = self._context & contexts.STYLE_BOLD
  869. if ticks > 5:
  870. self._emit_text("'" * (ticks - 5))
  871. ticks = 5
  872. elif ticks == 4:
  873. self._emit_text("'")
  874. ticks = 3
  875. if (italics and ticks in (2, 5)) or (bold and ticks in (3, 5)):
  876. if ticks == 5:
  877. self._head -= 3 if italics else 2
  878. return self._pop()
  879. elif not self._can_recurse():
  880. if ticks == 3:
  881. if self._context & contexts.STYLE_SECOND_PASS:
  882. self._emit_text("'")
  883. return self._pop()
  884. if self._context & contexts.STYLE_ITALICS:
  885. self._context |= contexts.STYLE_PASS_AGAIN
  886. self._emit_text("'" * ticks)
  887. elif ticks == 2:
  888. self._parse_italics()
  889. elif ticks == 3:
  890. if self._parse_bold():
  891. return self._pop()
  892. else: # ticks == 5
  893. self._parse_italics_and_bold()
  894. self._head -= 1
  895. def _handle_list_marker(self):
  896. """Handle a list marker at the head (``#``, ``*``, ``;``, ``:``)."""
  897. markup = self._read()
  898. if markup == ";":
  899. self._context |= contexts.DL_TERM
  900. self._emit(tokens.TagOpenOpen(wiki_markup=markup))
  901. self._emit_text(get_html_tag(markup))
  902. self._emit(tokens.TagCloseSelfclose())
  903. def _handle_list(self):
  904. """Handle a wiki-style list (``#``, ``*``, ``;``, ``:``)."""
  905. self._handle_list_marker()
  906. while self._read(1) in ("#", "*", ";", ":"):
  907. self._head += 1
  908. self._handle_list_marker()
  909. def _handle_hr(self):
  910. """Handle a wiki-style horizontal rule (``----``) in the string."""
  911. length = 4
  912. self._head += 3
  913. while self._read(1) == "-":
  914. length += 1
  915. self._head += 1
  916. self._emit(tokens.TagOpenOpen(wiki_markup="-" * length))
  917. self._emit_text("hr")
  918. self._emit(tokens.TagCloseSelfclose())
  919. def _handle_dl_term(self):
  920. """Handle the term in a description list (``foo`` in ``;foo:bar``)."""
  921. self._context ^= contexts.DL_TERM
  922. if self._read() == ":":
  923. self._handle_list_marker()
  924. else:
  925. self._emit_text("\n")
  926. def _emit_table_tag(self, open_open_markup, tag, style, padding,
  927. close_open_markup, contents, open_close_markup):
  928. """Emit a table tag."""
  929. self._emit(tokens.TagOpenOpen(wiki_markup=open_open_markup))
  930. self._emit_text(tag)
  931. if style:
  932. self._emit_all(style)
  933. if close_open_markup:
  934. self._emit(tokens.TagCloseOpen(wiki_markup=close_open_markup,
  935. padding=padding))
  936. else:
  937. self._emit(tokens.TagCloseOpen(padding=padding))
  938. if contents:
  939. self._emit_all(contents)
  940. self._emit(tokens.TagOpenClose(wiki_markup=open_close_markup))
  941. self._emit_text(tag)
  942. self._emit(tokens.TagCloseClose())
  943. def _handle_table_style(self, end_token):
  944. """Handle style attributes for a table until ``end_token``."""
  945. data = _TagOpenData()
  946. data.context = _TagOpenData.CX_ATTR_READY
  947. while True:
  948. this = self._read()
  949. can_exit = (not data.context & data.CX_QUOTED or
  950. data.context & data.CX_NOTE_SPACE)
  951. if this == end_token and can_exit:
  952. if data.context & (data.CX_ATTR_NAME | data.CX_ATTR_VALUE):
  953. self._push_tag_buffer(data)
  954. if this.isspace():
  955. data.padding_buffer["first"] += this
  956. return data.padding_buffer["first"]
  957. elif this is self.END or this == end_token:
  958. if self._context & contexts.TAG_ATTR:
  959. if data.context & data.CX_QUOTED:
  960. # Unclosed attribute quote: reset, don't die
  961. data.context = data.CX_ATTR_VALUE
  962. self._pop()
  963. self._head = data.reset
  964. continue
  965. self._pop()
  966. self._fail_route()
  967. else:
  968. self._handle_tag_data(data, this)
  969. self._head += 1
  970. def _parse_table(self):
  971. """Parse a wikicode table by starting with the first line."""
  972. reset = self._head + 1
  973. self._head += 2
  974. self._push(contexts.TABLE_OPEN)
  975. try:
  976. padding = self._handle_table_style("\n")
  977. except BadRoute:
  978. self._head = reset
  979. self._emit_text("{|")
  980. return
  981. style = self._pop()
  982. self._head += 1
  983. try:
  984. table = self._parse(contexts.TABLE_OPEN)
  985. except BadRoute:
  986. self._head = reset
  987. self._emit_text("{|")
  988. return
  989. self._emit_table_tag("{|", "table", style, padding, None, table, "|}")
  990. # Offset displacement done by _parse():
  991. self._head -= 1
  992. def _handle_table_row(self):
  993. """Parse as style until end of the line, then continue."""
  994. self._head += 2
  995. if not self._can_recurse():
  996. self._emit_text("|-")
  997. self._head -= 1
  998. return
  999. self._push(contexts.TABLE_OPEN | contexts.TABLE_ROW_OPEN)
  1000. try:
  1001. padding = self._handle_table_style("\n")
  1002. except BadRoute:
  1003. self._pop()
  1004. raise
  1005. style = self._pop()
  1006. # Don't parse the style separator:
  1007. self._head += 1
  1008. row = self._parse(contexts.TABLE_OPEN | contexts.TABLE_ROW_OPEN)
  1009. self._emit_table_tag("|-", "tr", style, padding, None, row, "")
  1010. # Offset displacement done by parse():
  1011. self._head -= 1
  1012. def _handle_table_cell(self, markup, tag, line_context):
  1013. """Parse as normal syntax unless we hit a style marker, then parse
  1014. style as HTML attributes and the remainder as normal syntax."""
  1015. old_context = self._context
  1016. padding, style = "", None
  1017. self._head += len(markup)
  1018. reset = self._head
  1019. if not self._can_recurse():
  1020. self._emit_text(markup)
  1021. self._head -= 1
  1022. return
  1023. cell = self._parse(contexts.TABLE_OPEN | contexts.TABLE_CELL_OPEN |
  1024. line_context | contexts.TABLE_CELL_STYLE)
  1025. cell_context = self._context
  1026. self._context = old_context
  1027. reset_for_style = cell_context & contexts.TABLE_CELL_STYLE
  1028. if reset_for_style:
  1029. self._head = reset
  1030. self._push(contexts.TABLE_OPEN | contexts.TABLE_CELL_OPEN |
  1031. line_context)
  1032. padding = self._handle_table_style("|")
  1033. style = self._pop()
  1034. # Don't parse the style separator:
  1035. self._head += 1
  1036. cell = self._parse(contexts.TABLE_OPEN | contexts.TABLE_CELL_OPEN |
  1037. line_context)
  1038. cell_context = self._context
  1039. self._context = old_context
  1040. close_open_markup = "|" if reset_for_style else None
  1041. self._emit_table_tag(markup, tag, style, padding, close_open_markup,
  1042. cell, "")
  1043. # Keep header/cell line contexts:
  1044. self._context |= cell_context & (contexts.TABLE_TH_LINE |
  1045. contexts.TABLE_TD_LINE)
  1046. # Offset displacement done by parse():
  1047. self._head -= 1
  1048. def _handle_table_cell_end(self, reset_for_style=False):
  1049. """Returns the current context, with the TABLE_CELL_STYLE flag set if
  1050. it is necessary to reset and parse style attributes."""
  1051. if reset_for_style:
  1052. self._context |= contexts.TABLE_CELL_STYLE
  1053. else:
  1054. self._context &= ~contexts.TABLE_CELL_STYLE
  1055. return self._pop(keep_context=True)
  1056. def _handle_table_row_end(self):
  1057. """Return the stack in order to handle the table row end."""
  1058. return self._pop()
  1059. def _handle_table_end(self):
  1060. """Return the stack in order to handle the table end."""
  1061. self._head += 2
  1062. return self._pop()
  1063. def _handle_end(self):
  1064. """Handle the end of the stream of wikitext."""
  1065. if self._context & contexts.FAIL:
  1066. if self._context & contexts.TAG_BODY:
  1067. if is_single(self._stack[1].text):
  1068. return self._handle_single_tag_end()
  1069. if self._context & contexts.TABLE_CELL_OPEN:
  1070. self._pop()
  1071. if self._context & contexts.DOUBLE:
  1072. self._pop()
  1073. self._fail_route()
  1074. return self._pop()
  1075. def _verify_safe(self, this):
  1076. """Make sure we are not trying to write an invalid character."""
  1077. context = self._context
  1078. if context & contexts.FAIL_NEXT:
  1079. return False
  1080. if context & contexts.WIKILINK_TITLE:
  1081. if this == "]" or this == "{":
  1082. self._context |= contexts.FAIL_NEXT
  1083. elif this == "\n" or this == "[" or this == "}" or this == ">":
  1084. return False
  1085. elif this == "<":
  1086. if self._read(1) == "!":
  1087. self._context |= contexts.FAIL_NEXT
  1088. else:
  1089. return False
  1090. return True
  1091. elif context & contexts.EXT_LINK_TITLE:
  1092. return this != "\n"
  1093. elif context & contexts.TEMPLATE_NAME:
  1094. if this == "{":
  1095. self._context |= contexts.HAS_TEMPLATE | contexts.FAIL_NEXT
  1096. return True
  1097. if this == "}" or (this == "<" and self._read(1) == "!"):
  1098. self._context |= contexts.FAIL_NEXT
  1099. return True
  1100. if this == "[" or this == "]" or this == "<" or this == ">":
  1101. return False
  1102. if this == "|":
  1103. return True
  1104. if context & contexts.HAS_TEXT:
  1105. if context & contexts.FAIL_ON_TEXT:
  1106. if this is self.END or not this.isspace():
  1107. return False
  1108. elif this == "\n":
  1109. self._context |= contexts.FAIL_ON_TEXT
  1110. elif this is self.END or not this.isspace():
  1111. self._context |= contexts.HAS_TEXT
  1112. return True
  1113. elif context & contexts.TAG_CLOSE:
  1114. return this != "<"
  1115. else:
  1116. if context & contexts.FAIL_ON_EQUALS:
  1117. if this == "=":
  1118. return False
  1119. elif context & contexts.FAIL_ON_LBRACE:
  1120. if this == "{" or (self._read(-1) == self._read(-2) == "{"):
  1121. if context & contexts.TEMPLATE:
  1122. self._context |= contexts.FAIL_ON_EQUALS
  1123. else:
  1124. self._context |= contexts.FAIL_NEXT
  1125. return True
  1126. self._context ^= contexts.FAIL_ON_LBRACE
  1127. elif context & contexts.FAIL_ON_RBRACE:
  1128. if this == "}":
  1129. self._context |= contexts.FAIL_NEXT
  1130. return True
  1131. self._context ^= contexts.FAIL_ON_RBRACE
  1132. elif this == "{":
  1133. self._context |= contexts.FAIL_ON_LBRACE
  1134. elif this == "}":
  1135. self._context |= contexts.FAIL_ON_RBRACE
  1136. return True
  1137. def _parse(self, context=0, push=True):
  1138. """Parse the wikicode string, using *context* for when to stop."""
  1139. if push:
  1140. self._push(context)
  1141. while True:
  1142. this = self._read()
  1143. if self._context & contexts.UNSAFE:
  1144. if not self._verify_safe(this):
  1145. if self._context & contexts.DOUBLE:
  1146. self._pop()
  1147. self._fail_route()
  1148. if this not in self.MARKERS:
  1149. self._emit_text(this)
  1150. self._head += 1
  1151. continue
  1152. if this is self.END:
  1153. return self._handle_end()
  1154. next = self._read(1)
  1155. if this == next == "{":
  1156. if self._can_recurse():
  1157. self._parse_template_or_argument()
  1158. else:
  1159. self._emit_text("{")
  1160. elif this == "|" and self._context & contexts.TEMPLATE:
  1161. self._handle_template_param()
  1162. elif this == "=" and self._context & contexts.TEMPLATE_PARAM_KEY:
  1163. self._handle_template_param_value()
  1164. elif this == next == "}" and self._context & contexts.TEMPLATE:
  1165. return self._handle_template_end()
  1166. elif this == "|" and self._context & contexts.ARGUMENT_NAME:
  1167. self._handle_argument_separator()
  1168. elif this == next == "}" and self._context & contexts.ARGUMENT:
  1169. if self._read(2) == "}":
  1170. return self._handle_argument_end()
  1171. else:
  1172. self._emit_text("}")
  1173. elif this == next == "[" and self._can_recurse():
  1174. if not self._context & contexts.NO_WIKILINKS:
  1175. self._parse_wikilink()
  1176. else:
  1177. self._emit_text("[")
  1178. elif this == "|" and self._context & contexts.WIKILINK_TITLE:
  1179. self._handle_wikilink_separator()
  1180. elif this == next == "]" and self._context & contexts.WIKILINK:
  1181. return self._handle_wikilink_end()
  1182. elif this == "[":
  1183. self._parse_external_link(True)
  1184. elif this == ":" and self._read(-1) not in self.MARKERS:
  1185. self._parse_external_link(False)
  1186. elif this == "]" and self._context & contexts.EXT_LINK_TITLE:
  1187. return self._pop()
  1188. elif this == "=" and not self._global & contexts.GL_HEADING:
  1189. if self._read(-1) in ("\n", self.START):
  1190. self._parse_heading()
  1191. else:
  1192. self._emit_text("=")
  1193. elif this == "=" and self._context & contexts.HEADING:
  1194. return self._handle_heading_end()
  1195. elif this == "\n" and self._context & contexts.HEADING:
  1196. self._fail_route()
  1197. elif this == "&":
  1198. self._parse_entity()
  1199. elif this == "<" and next == "!":
  1200. if self._read(2) == self._read(3) == "-":
  1201. self._parse_comment()
  1202. else:
  1203. self._emit_text(this)
  1204. elif this == "<" and next == "/" and self._read(2) is not self.END:
  1205. if self._context & contexts.TAG_BODY:
  1206. self._handle_tag_open_close()
  1207. else:
  1208. self._handle_invalid_tag_start()
  1209. elif this == "<" and not self._context & contexts.TAG_CLOSE:
  1210. if self._can_recurse():
  1211. self._parse_tag()
  1212. else:
  1213. self._emit_text("<")
  1214. elif this == ">" and self._context & contexts.TAG_CLOSE:
  1215. return self._handle_tag_close_close()
  1216. elif this == next == "'" and not self._skip_style_tags:
  1217. result = self._parse_style()
  1218. if result is not None:
  1219. return result
  1220. elif self._read(-1) in ("\n", self.START) and this in ("#", "*", ";", ":"):
  1221. self._handle_list()
  1222. elif self._read(-1) in ("\n", self.START) and this == next == self._read(2) == self._read(3) == "-":
  1223. self._handle_hr()
  1224. elif this in ("\n", ":") and self._context & contexts.DL_TERM:
  1225. self._handle_dl_term()
  1226. if this == "\n":
  1227. # Kill potential table contexts
  1228. self._context &= ~contexts.TABLE_CELL_LINE_CONTEXTS
  1229. # Start of table parsing
  1230. elif this == "{" and next == "|" and (self._read(-1) in ("\n", self.START) or
  1231. (self._read(-2) in ("\n", self.START) and self._read(-1).isspace())):
  1232. if self._can_recurse():
  1233. self._parse_table()
  1234. else:
  1235. self._emit_text("{|")
  1236. elif self._context & contexts.TABLE_OPEN:
  1237. if this == next == "|" and self._context & contexts.TABLE_TD_LINE:
  1238. if self._context & contexts.TABLE_CELL_OPEN:
  1239. return self._handle_table_cell_end()
  1240. self._handle_table_cell("||", "td", contexts.TABLE_TD_LINE)
  1241. elif this == next == "|" and self._context & contexts.TABLE_TH_LINE:
  1242. if self._context & contexts.TABLE_CELL_OPEN:
  1243. return self._handle_table_cell_end()
  1244. self._handle_table_cell("||", "th", contexts.TABLE_TH_LINE)
  1245. elif this == next == "!" and self._context & contexts.TABLE_TH_LINE:
  1246. if self._context & contexts.TABLE_CELL_OPEN:
  1247. return self._handle_table_cell_end()
  1248. self._handle_table_cell("!!", "th", contexts.TABLE_TH_LINE)
  1249. elif this == "|" and self._context & contexts.TABLE_CELL_STYLE:
  1250. return self._handle_table_cell_end(reset_for_style=True)
  1251. # on newline, clear out cell line contexts
  1252. elif this == "\n" and self._context & contexts.TABLE_CELL_LINE_CONTEXTS:
  1253. self._context &= ~contexts.TABLE_CELL_LINE_CONTEXTS
  1254. self._emit_text(this)
  1255. elif (self._read(-1) in ("\n", self.START) or
  1256. (self._read(-2) in ("\n", self.START) and self._read(-1).isspace())):
  1257. if this == "|" and next == "}":
  1258. if self._context & contexts.TABLE_CELL_OPEN:
  1259. return self._handle_table_cell_end()
  1260. if self._context & contexts.TABLE_ROW_OPEN:
  1261. return self._handle_table_row_end()
  1262. return self._handle_table_end()
  1263. elif this == "|" and next == "-":
  1264. if self._context & contexts.TABLE_CELL_OPEN:
  1265. return self._handle_table_cell_end()
  1266. if self._context & contexts.TABLE_ROW_OPEN:
  1267. return self._handle_table_row_end()
  1268. self._handle_table_row()
  1269. elif this == "|":
  1270. if self._context & contexts.TABLE_CELL_OPEN:
  1271. return self._handle_table_cell_end()
  1272. self._handle_table_cell("|", "td", contexts.TABLE_TD_LINE)
  1273. elif this == "!":
  1274. if self._context & contexts.TABLE_CELL_OPEN:
  1275. return self._handle_table_cell_end()
  1276. self._handle_table_cell("!", "th", contexts.TABLE_TH_LINE)
  1277. else:
  1278. self._emit_text(this)
  1279. else:
  1280. self._emit_text(this)
  1281. else:
  1282. self._emit_text(this)
  1283. self._head += 1
  1284. def tokenize(self, text, context=0, skip_style_tags=False):
  1285. """Build a list of tokens from a string of wikicode and return it."""
  1286. self._skip_style_tags = skip_style_tags
  1287. split = self.regex.split(text)
  1288. self._text = [segment for segment in split if segment]
  1289. self._head = self._global = self._depth = self._cycles = 0
  1290. try:
  1291. tokens = self._parse(context)
  1292. except BadRoute: # pragma: no cover (untestable/exceptional case)
  1293. raise ParserError("Python tokenizer exited with BadRoute")
  1294. if self._stacks: # pragma: no cover (untestable/exceptional case)
  1295. err = "Python tokenizer exited with non-empty token stack"
  1296. raise ParserError(err)
  1297. return tokens