|
@@ -1163,7 +1163,7 @@ Tokenizer_verify_safe(Tokenizer* self, int context, Py_UNICODE data) |
|
|
else if (context & (LC_TEMPLATE_PARAM_KEY | LC_ARGUMENT_NAME)) { |
|
|
else if (context & (LC_TEMPLATE_PARAM_KEY | LC_ARGUMENT_NAME)) { |
|
|
if (context & LC_FAIL_ON_EQUALS) { |
|
|
if (context & LC_FAIL_ON_EQUALS) { |
|
|
if (data == *"=") { |
|
|
if (data == *"=") { |
|
|
self->topstack->context |= LC_FAIL_NEXT; |
|
|
|
|
|
|
|
|
Tokenizer_fail_route(self); |
|
|
return; |
|
|
return; |
|
|
} |
|
|
} |
|
|
} |
|
|
} |
|
@@ -1195,7 +1195,12 @@ Tokenizer_verify_safe(Tokenizer* self, int context, Py_UNICODE data) |
|
|
if (context & LC_HAS_TEXT) { |
|
|
if (context & LC_HAS_TEXT) { |
|
|
if (context & LC_FAIL_ON_TEXT) { |
|
|
if (context & LC_FAIL_ON_TEXT) { |
|
|
if (!Py_UNICODE_ISSPACE(data)) { |
|
|
if (!Py_UNICODE_ISSPACE(data)) { |
|
|
Tokenizer_fail_route(self); |
|
|
|
|
|
|
|
|
if (context & LC_TEMPLATE_PARAM_KEY) { |
|
|
|
|
|
self->topstack->context ^= LC_FAIL_ON_TEXT; |
|
|
|
|
|
self->topstack->context |= LC_FAIL_ON_EQUALS; |
|
|
|
|
|
} |
|
|
|
|
|
else |
|
|
|
|
|
Tokenizer_fail_route(self); |
|
|
return; |
|
|
return; |
|
|
} |
|
|
} |
|
|
} |
|
|
} |
|
@@ -1220,6 +1225,7 @@ Tokenizer_parse(Tokenizer* self, int context) |
|
|
LC_TEMPLATE_PARAM_KEY | LC_ARGUMENT_NAME); |
|
|
LC_TEMPLATE_PARAM_KEY | LC_ARGUMENT_NAME); |
|
|
int this_context, is_marker, i; |
|
|
int this_context, is_marker, i; |
|
|
Py_UNICODE this, next, next_next, last; |
|
|
Py_UNICODE this, next, next_next, last; |
|
|
|
|
|
PyObject *trash; |
|
|
|
|
|
|
|
|
if (Tokenizer_push(self, context)) |
|
|
if (Tokenizer_push(self, context)) |
|
|
return NULL; |
|
|
return NULL; |
|
@@ -1228,8 +1234,13 @@ Tokenizer_parse(Tokenizer* self, int context) |
|
|
this_context = self->topstack->context; |
|
|
this_context = self->topstack->context; |
|
|
if (this_context & unsafe_contexts) { |
|
|
if (this_context & unsafe_contexts) { |
|
|
Tokenizer_verify_safe(self, this_context, this); |
|
|
Tokenizer_verify_safe(self, this_context, this); |
|
|
if (BAD_ROUTE) |
|
|
|
|
|
|
|
|
if (BAD_ROUTE) { |
|
|
|
|
|
if (this_context & LC_TEMPLATE_PARAM_KEY) { |
|
|
|
|
|
trash = Tokenizer_pop(self); |
|
|
|
|
|
Py_XDECREF(trash); |
|
|
|
|
|
} |
|
|
return NULL; |
|
|
return NULL; |
|
|
|
|
|
} |
|
|
} |
|
|
} |
|
|
is_marker = 0; |
|
|
is_marker = 0; |
|
|
for (i = 0; i < NUM_MARKERS; i++) { |
|
|
for (i = 0; i < NUM_MARKERS; i++) { |
|
@@ -1245,7 +1256,7 @@ Tokenizer_parse(Tokenizer* self, int context) |
|
|
} |
|
|
} |
|
|
if (this == *"") { |
|
|
if (this == *"") { |
|
|
if (this_context & LC_TEMPLATE_PARAM_KEY) { |
|
|
if (this_context & LC_TEMPLATE_PARAM_KEY) { |
|
|
PyObject* trash = Tokenizer_pop(self); |
|
|
|
|
|
|
|
|
trash = Tokenizer_pop(self); |
|
|
Py_XDECREF(trash); |
|
|
Py_XDECREF(trash); |
|
|
} |
|
|
} |
|
|
if (this_context & fail_contexts) |
|
|
if (this_context & fail_contexts) |
|
|