@@ -196,6 +196,23 @@ int Tokenizer_check_route(Tokenizer* self, uint64_t context) | |||||
} | } | ||||
/* | /* | ||||
Free the tokenizer's bad route cache tree. Intended to be called by the | |||||
main tokenizer function after parsing is finished. | |||||
*/ | |||||
void Tokenizer_free_bad_route_tree(Tokenizer *self) | |||||
{ | |||||
struct avl_tree_node *cur = avl_tree_first_in_postorder(self->bad_routes); | |||||
struct avl_tree_node *parent; | |||||
while (cur) { | |||||
route_tree_node *node = avl_tree_entry(cur, route_tree_node, node); | |||||
parent = avl_get_parent(cur); | |||||
free(node); | |||||
cur = avl_tree_next_in_postorder(cur, parent); | |||||
} | |||||
self->bad_routes = NULL; | |||||
} | |||||
/* | |||||
Write a token to the current token stack. | Write a token to the current token stack. | ||||
*/ | */ | ||||
int Tokenizer_emit_token(Tokenizer* self, PyObject* token, int first) | int Tokenizer_emit_token(Tokenizer* self, PyObject* token, int first) | ||||
@@ -33,6 +33,7 @@ PyObject* Tokenizer_pop(Tokenizer*); | |||||
PyObject* Tokenizer_pop_keeping_context(Tokenizer*); | PyObject* Tokenizer_pop_keeping_context(Tokenizer*); | ||||
void* Tokenizer_fail_route(Tokenizer*); | void* Tokenizer_fail_route(Tokenizer*); | ||||
int Tokenizer_check_route(Tokenizer*, uint64_t); | int Tokenizer_check_route(Tokenizer*, uint64_t); | ||||
void Tokenizer_free_bad_route_tree(Tokenizer*); | |||||
int Tokenizer_emit_token(Tokenizer*, PyObject*, int); | int Tokenizer_emit_token(Tokenizer*, PyObject*, int); | ||||
int Tokenizer_emit_token_kwargs(Tokenizer*, PyObject*, PyObject*, int); | int Tokenizer_emit_token_kwargs(Tokenizer*, PyObject*, PyObject*, int); | ||||
@@ -22,6 +22,7 @@ SOFTWARE. | |||||
#include "tokenizer.h" | #include "tokenizer.h" | ||||
#include "tok_parse.h" | #include "tok_parse.h" | ||||
#include "tok_support.h" | |||||
#include "tokens.h" | #include "tokens.h" | ||||
/* Globals */ | /* Globals */ | ||||
@@ -165,10 +166,7 @@ static PyObject* Tokenizer_tokenize(Tokenizer* self, PyObject* args) | |||||
tokens = Tokenizer_parse(self, context, 1); | tokens = Tokenizer_parse(self, context, 1); | ||||
route_tree_node *n; | |||||
avl_tree_for_each_in_postorder(n, self->bad_routes, route_tree_node, node) | |||||
free(n); | |||||
self->bad_routes = NULL; | |||||
Tokenizer_free_bad_route_tree(self); | |||||
if (!tokens || self->topstack) { | if (!tokens || self->topstack) { | ||||
Py_XDECREF(tokens); | Py_XDECREF(tokens); | ||||