Selaa lähdekoodia

Implement Tokenizer_push_tag_buffer()

tags/v0.3
Ben Kurtovic 11 vuotta sitten
vanhempi
commit
e636bf77cf
2 muutettua tiedostoa jossa 67 lisäystä ja 8 poistoa
  1. +64
    -5
      mwparserfromhell/parser/tokenizer.c
  2. +3
    -3
      mwparserfromhell/parser/tokenizer.h

+ 64
- 5
mwparserfromhell/parser/tokenizer.c Näytä tiedosto

@@ -1231,11 +1231,10 @@ Tokenizer_really_parse_tag(Tokenizer* self)

if (!data)
return NULL;
data->padding_first = Textbuffer_new();
data->padding_before_eq = Textbuffer_new();
data->padding_after_eq = Textbuffer_new();
if (!data->padding_first || !data->padding_before_eq ||
!data->padding_after_eq) {
data->pad_first = Textbuffer_new();
data->pad_before_eq = Textbuffer_new();
data->pad_after_eq = Textbuffer_new();
if (!data->pad_first || !data->pad_before_eq || !data->pad_after_eq) {
free(data);
return NULL;
}
@@ -1318,6 +1317,66 @@ Tokenizer_really_parse_tag(Tokenizer* self)
static int
Tokenizer_push_tag_buffer(Tokenizer* self, TagOpenData* data)
{
PyObject *token, *tokens, *kwargs, *pad_first, *pad_before_eq,
*pad_after_eq;

if (data->context & TAG_QUOTED) {
token = PyObject_CallObject(TagAttrQuote, NULL);
if (!token)
return -1;
if (Tokenizer_emit_first(self, token)) {
Py_DECREF(token);
return -1;
}
Py_DECREF(token);
tokens = Tokenizer_pop(self);
if (!tokens)
return -1;
if (Tokenizer_emit_all(self, tokens)) {
Py_DECREF(tokens);
return -1;
}
Py_DECREF(tokens);
}
pad_first = Textbuffer_render(data->pad_first);
pad_before_eq = Textbuffer_render(data->pad_before_eq);
pad_after_eq = Textbuffer_render(data->pad_after_eq);
if (!pad_first || !pad_before_eq || !pad_after_eq)
return -1;
kwargs = PyDict_New();
if (!kwargs)
return -1;
PyDict_SetItemString(kwargs, "pad_first", pad_first);
PyDict_SetItemString(kwargs, "pad_before_eq", pad_before_eq);
PyDict_SetItemString(kwargs, "pad_after_eq", pad_after_eq);
Py_DECREF(pad_first);
Py_DECREF(pad_before_eq);
Py_DECREF(pad_after_eq);
token = PyObject_Call(TagAttrStart, NOARGS, kwargs);
Py_DECREF(kwargs);
if (!token)
return -1;
if (Tokenizer_emit_first(self, token)) {
Py_DECREF(token);
return -1;
}
Py_DECREF(token);
tokens = Tokenizer_pop(self);
if (!tokens)
return -1;
if (Tokenizer_emit_all(self, tokens)) {
Py_DECREF(tokens);
return -1;
}
Py_DECREF(tokens);
Textbuffer_dealloc(data->pad_first);
Textbuffer_dealloc(data->pad_before_eq);
Textbuffer_dealloc(data->pad_after_eq);
data->pad_first = Textbuffer_new();
data->pad_before_eq = Textbuffer_new();
data->pad_after_eq = Textbuffer_new();
if (!data->pad_first || !data->pad_before_eq || !data->pad_after_eq)
return -1;
return 0;
}



+ 3
- 3
mwparserfromhell/parser/tokenizer.h Näytä tiedosto

@@ -177,9 +177,9 @@ typedef struct {

typedef struct {
int context;
struct Textbuffer* padding_first;
struct Textbuffer* padding_before_eq;
struct Textbuffer* padding_after_eq;
struct Textbuffer* pad_first;
struct Textbuffer* pad_before_eq;
struct Textbuffer* pad_after_eq;
Py_ssize_t reset;
} TagOpenData;



Ladataan…
Peruuta
Tallenna