Browse Source

_write(text=True) -> _write_text(); __eq__ for Tokens

tags/v0.1
Ben Kurtovic 12 years ago
parent
commit
664b6e39ec
2 changed files with 15 additions and 10 deletions
  1. +10
    -10
      mwparserfromhell/parser/tokenizer.py
  2. +5
    -0
      mwparserfromhell/parser/tokens.py

+ 10
- 10
mwparserfromhell/parser/tokenizer.py View File

@@ -75,12 +75,12 @@ class Tokenizer(object):
self._push_textbuffer() self._push_textbuffer()
return self._stacks.pop()[0] return self._stacks.pop()[0]


def _write(self, data, text=False):
if text:
self._textbuffer.append(data)
return
def _write(self, token):
self._push_textbuffer() self._push_textbuffer()
self._stack.append(data)
self._stack.append(token)

def _write_text(self, text):
self._textbuffer.append(text)


def _write_all(self, tokenlist): def _write_all(self, tokenlist):
self._push_textbuffer() self._push_textbuffer()
@@ -102,7 +102,7 @@ class Tokenizer(object):
template = self._parse(contexts.TEMPLATE_NAME) template = self._parse(contexts.TEMPLATE_NAME)
except BadRoute: except BadRoute:
self._head = reset self._head = reset
self._write(self._read(), text=True)
self._write_text(self._read())
else: else:
self._write(tokens.TemplateOpen()) self._write(tokens.TemplateOpen())
self._write_all(template) self._write_all(template)
@@ -137,8 +137,8 @@ class Tokenizer(object):
return self._pop() return self._pop()


def _parse_entity(self): def _parse_entity(self):
self._push()
try: try:
self._push()
self._write(tokens.HTMLEntityStart()) self._write(tokens.HTMLEntityStart())
this = self._read(1) this = self._read(1)
if this is self.END: if this is self.END:
@@ -172,7 +172,7 @@ class Tokenizer(object):
self._write(tokens.Text(text=text)) self._write(tokens.Text(text=text))
self._write(tokens.HTMLEntityEnd()) self._write(tokens.HTMLEntityEnd())
except BadRoute: except BadRoute:
self._write(self._read(), text=True)
self._write_text(self._read())
else: else:
self._write_all(self._pop()) self._write_all(self._pop())
self._head += 2 self._head += 2
@@ -182,7 +182,7 @@ class Tokenizer(object):
while True: while True:
this = self._read() this = self._read()
if this not in self.SENTINELS: if this not in self.SENTINELS:
self._write(this, text=True)
self._write_text(this)
self._head += 1 self._head += 1
continue continue
if this is self.END: if this is self.END:
@@ -201,7 +201,7 @@ class Tokenizer(object):
elif this == "&": elif this == "&":
self._parse_entity() self._parse_entity()
else: else:
self._write(this, text=True)
self._write_text(this)
self._head += 1 self._head += 1


def tokenize(self, text): def tokenize(self, text):


+ 5
- 0
mwparserfromhell/parser/tokens.py View File

@@ -35,6 +35,11 @@ class Token(object):
args.append(key + "=" + repr(value)) args.append(key + "=" + repr(value))
return u"{0}({1})".format(type(self).__name__, u", ".join(args)) return u"{0}({1})".format(type(self).__name__, u", ".join(args))


def __eq__(self, other):
if isinstance(other, type(self)):
return self._kwargs == other._kwargs
return False

def __getattr__(self, key): def __getattr__(self, key):
return self._kwargs[key] return self._kwargs[key]




Loading…
Cancel
Save