Explorar el Código
Increase MAX_DEPTH to 100 (#314)
* Increase MAX_DEPTH to 100
* Update nested_templates_and_style_tags to cover new limit
tags/v0.6.6
David Ebbo
hace 10 meses
committed by
GitHub
No se encontró ninguna clave conocida en la base de datos para esta firma
ID de clave GPG: 4AEE18F83AFDEB23
Se han
modificado 3 ficheros con
4 adiciones y
4 borrados
-
src/mwparserfromhell/parser/ctokenizer/tok_support.h
-
src/mwparserfromhell/parser/tokenizer.py
-
tests/tokenizer/integration.mwtest
|
|
@@ -49,7 +49,7 @@ Py_UCS4 Tokenizer_read_backwards(Tokenizer *, Py_ssize_t); |
|
|
|
|
|
|
|
/* Macros */ |
|
|
|
|
|
|
|
#define MAX_DEPTH 40 |
|
|
|
#define MAX_DEPTH 100 |
|
|
|
#define Tokenizer_CAN_RECURSE(self) (self->depth < MAX_DEPTH) |
|
|
|
#define Tokenizer_IS_CURRENT_STACK(self, id) \ |
|
|
|
(self->topstack->ident.head == (id).head && \ |
|
|
|
|
|
@@ -92,7 +92,7 @@ class Tokenizer: |
|
|
|
END, |
|
|
|
] |
|
|
|
URISCHEME = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+.-" |
|
|
|
MAX_DEPTH = 40 |
|
|
|
MAX_DEPTH = 100 |
|
|
|
regex = re.compile(r"([{}\[\]<>|=&'#*;:/\\\"\-!\n])", flags=re.IGNORECASE) |
|
|
|
tag_splitter = re.compile(r"([\s\"\'\\]+)") |
|
|
|
|
|
|
|