Browse Source

Increase MAX_DEPTH to 100 (#314)

* Increase MAX_DEPTH to 100

* Update nested_templates_and_style_tags to cover new limit
tags/v0.6.6
David Ebbo 10 months ago
committed by GitHub
parent
commit
4acb885b9f
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 4 additions and 4 deletions
  1. +1
    -1
      src/mwparserfromhell/parser/ctokenizer/tok_support.h
  2. +1
    -1
      src/mwparserfromhell/parser/tokenizer.py
  3. +2
    -2
      tests/tokenizer/integration.mwtest

+ 1
- 1
src/mwparserfromhell/parser/ctokenizer/tok_support.h View File

@@ -49,7 +49,7 @@ Py_UCS4 Tokenizer_read_backwards(Tokenizer *, Py_ssize_t);


/* Macros */ /* Macros */


#define MAX_DEPTH 40
#define MAX_DEPTH 100
#define Tokenizer_CAN_RECURSE(self) (self->depth < MAX_DEPTH) #define Tokenizer_CAN_RECURSE(self) (self->depth < MAX_DEPTH)
#define Tokenizer_IS_CURRENT_STACK(self, id) \ #define Tokenizer_IS_CURRENT_STACK(self, id) \
(self->topstack->ident.head == (id).head && \ (self->topstack->ident.head == (id).head && \


+ 1
- 1
src/mwparserfromhell/parser/tokenizer.py View File

@@ -92,7 +92,7 @@ class Tokenizer:
END, END,
] ]
URISCHEME = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+.-" URISCHEME = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+.-"
MAX_DEPTH = 40
MAX_DEPTH = 100
regex = re.compile(r"([{}\[\]<>|=&'#*;:/\\\"\-!\n])", flags=re.IGNORECASE) regex = re.compile(r"([{}\[\]<>|=&'#*;:/\\\"\-!\n])", flags=re.IGNORECASE)
tag_splitter = re.compile(r"([\s\"\'\\]+)") tag_splitter = re.compile(r"([\s\"\'\\]+)")




+ 2
- 2
tests/tokenizer/integration.mwtest
File diff suppressed because it is too large
View File


Loading…
Cancel
Save