Explorar el Código

Consolidate some code in _parse_comment()

tags/v0.3
Ben Kurtovic hace 11 años
padre
commit
d6e03ea5c5
Se han modificado 2 ficheros con 33 adiciones y 36 borrados
  1. +16
    -20
      mwparserfromhell/parser/contexts.py
  2. +17
    -16
      mwparserfromhell/parser/tokenizer.py

+ 16
- 20
mwparserfromhell/parser/contexts.py Ver fichero

@@ -60,8 +60,6 @@ Local (stack-specific) contexts:
* :py:const:`HEADING_LEVEL_5`
* :py:const:`HEADING_LEVEL_6`

* :py:const:`COMMENT`

* :py:const:`TAG`

* :py:const:`TAG_OPEN`
@@ -123,28 +121,26 @@ HEADING_LEVEL_6 = 1 << 12
HEADING = (HEADING_LEVEL_1 + HEADING_LEVEL_2 + HEADING_LEVEL_3 +
HEADING_LEVEL_4 + HEADING_LEVEL_5 + HEADING_LEVEL_6)

COMMENT = 1 << 13

TAG_OPEN = 1 << 14
TAG_ATTR = 1 << 15
TAG_BODY = 1 << 16
TAG_CLOSE = 1 << 17
TAG_OPEN = 1 << 13
TAG_ATTR = 1 << 14
TAG_BODY = 1 << 15
TAG_CLOSE = 1 << 16
TAG = TAG_OPEN + TAG_ATTR + TAG_BODY + TAG_CLOSE

STYLE_ITALICS = 1 << 18
STYLE_BOLD = 1 << 19
STYLE_PASS_AGAIN = 1 << 20
STYLE_SECOND_PASS = 1 << 21
STYLE_ITALICS = 1 << 17
STYLE_BOLD = 1 << 18
STYLE_PASS_AGAIN = 1 << 19
STYLE_SECOND_PASS = 1 << 20
STYLE = STYLE_ITALICS + STYLE_BOLD + STYLE_PASS_AGAIN + STYLE_SECOND_PASS

DL_TERM = 1 << 22
DL_TERM = 1 << 21

HAS_TEXT = 1 << 23
FAIL_ON_TEXT = 1 << 24
FAIL_NEXT = 1 << 25
FAIL_ON_LBRACE = 1 << 26
FAIL_ON_RBRACE = 1 << 27
FAIL_ON_EQUALS = 1 << 28
HAS_TEXT = 1 << 22
FAIL_ON_TEXT = 1 << 23
FAIL_NEXT = 1 << 24
FAIL_ON_LBRACE = 1 << 25
FAIL_ON_RBRACE = 1 << 26
FAIL_ON_EQUALS = 1 << 27
SAFETY_CHECK = (HAS_TEXT + FAIL_ON_TEXT + FAIL_NEXT + FAIL_ON_LBRACE +
FAIL_ON_RBRACE + FAIL_ON_EQUALS)

@@ -154,7 +150,7 @@ GL_HEADING = 1 << 0

# Aggregate contexts:

FAIL = TEMPLATE + ARGUMENT + WIKILINK + HEADING + COMMENT + TAG + STYLE
FAIL = TEMPLATE + ARGUMENT + WIKILINK + HEADING + TAG + STYLE
UNSAFE = (TEMPLATE_NAME + WIKILINK_TITLE + TEMPLATE_PARAM_KEY + ARGUMENT_NAME +
TAG_CLOSE)
DOUBLE = TEMPLATE_PARAM_KEY + TAG_CLOSE

+ 17
- 16
mwparserfromhell/parser/tokenizer.py Ver fichero

@@ -417,16 +417,22 @@ class Tokenizer(object):
"""Parse an HTML comment at the head of the wikicode string."""
self._head += 4
reset = self._head - 1
try:
comment = self._parse(contexts.COMMENT)
except BadRoute:
self._head = reset
self._emit_text("<!--")
else:
self._emit(tokens.CommentStart())
self._emit_all(comment)
self._emit(tokens.CommentEnd())
self._head += 2
self._push()
while True:
this = self._read()
if this == self.END:
self._pop()
self._head = reset
self._emit_text("<!--")
return
if this == self._read(1) == "-" and self._read(2) == ">":
self._emit_first(tokens.CommentStart())
self._emit(tokens.CommentEnd())
self._emit_all(self._pop())
self._head += 2
return
self._emit_text(this)
self._head += 1

def _push_tag_buffer(self, data):
"""Write a pending tag attribute from *data* to the stack."""
@@ -871,12 +877,7 @@ class Tokenizer(object):
if this is self.END:
return self._handle_end()
next = self._read(1)
if self._context & contexts.COMMENT:
if this == next == "-" and self._read(2) == ">":
return self._pop()
else:
self._emit_text(this)
elif this == next == "{":
if this == next == "{":
if self._can_recurse():
self._parse_template_or_argument()
else:


Cargando…
Cancelar
Guardar