瀏覽代碼
Increase MAX_DEPTH to 100 (#314)
* Increase MAX_DEPTH to 100
* Update nested_templates_and_style_tags to cover new limit
tags/v0.6.6
David Ebbo
10 月之前
committed by
GitHub
沒有發現已知的金鑰在資料庫的簽署中
GPG 金鑰 ID: 4AEE18F83AFDEB23
共有
3 個檔案被更改,包括
4 行新增 和
4 行删除
-
src/mwparserfromhell/parser/ctokenizer/tok_support.h
-
src/mwparserfromhell/parser/tokenizer.py
-
tests/tokenizer/integration.mwtest
|
|
@@ -49,7 +49,7 @@ Py_UCS4 Tokenizer_read_backwards(Tokenizer *, Py_ssize_t); |
|
|
|
|
|
|
|
/* Macros */ |
|
|
|
|
|
|
|
#define MAX_DEPTH 40 |
|
|
|
#define MAX_DEPTH 100 |
|
|
|
#define Tokenizer_CAN_RECURSE(self) (self->depth < MAX_DEPTH) |
|
|
|
#define Tokenizer_IS_CURRENT_STACK(self, id) \ |
|
|
|
(self->topstack->ident.head == (id).head && \ |
|
|
|
|
|
@@ -92,7 +92,7 @@ class Tokenizer: |
|
|
|
END, |
|
|
|
] |
|
|
|
URISCHEME = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+.-" |
|
|
|
MAX_DEPTH = 40 |
|
|
|
MAX_DEPTH = 100 |
|
|
|
regex = re.compile(r"([{}\[\]<>|=&'#*;:/\\\"\-!\n])", flags=re.IGNORECASE) |
|
|
|
tag_splitter = re.compile(r"([\s\"\'\\]+)") |
|
|
|
|
|
|
|