diff --git a/mwparserfromhell/parser/tokenizer.c b/mwparserfromhell/parser/tokenizer.c index b3ad3ec..f6eea84 100644 --- a/mwparserfromhell/parser/tokenizer.c +++ b/mwparserfromhell/parser/tokenizer.c @@ -2311,11 +2311,7 @@ static int load_entitydefs(void) PyObject *string; #endif -#ifdef IS_PY3K - tempmod = PyImport_ImportModule("html.entities"); -#else - tempmod = PyImport_ImportModule("htmlentitydefs"); -#endif + tempmod = PyImport_ImportModule(ENTITYDEFS_MODULE); if (!tempmod) return -1; defmap = PyObject_GetAttrString(tempmod, "entitydefs"); @@ -2353,7 +2349,7 @@ static int load_tokens(void) *globals = PyEval_GetGlobals(), *locals = PyEval_GetLocals(), *fromlist = PyList_New(1), - *modname = PyBytes_FromString("tokens"); + *modname = IMPORT_NAME_FUNC("tokens"); char *name = "mwparserfromhell.parser"; if (!fromlist || !modname) @@ -2413,7 +2409,7 @@ static int load_tag_defs(void) *globals = PyEval_GetGlobals(), *locals = PyEval_GetLocals(), *fromlist = PyList_New(1), - *modname = PyBytes_FromString("tag_defs"); + *modname = IMPORT_NAME_FUNC("tag_defs"); char *name = "mwparserfromhell"; if (!fromlist || !modname) @@ -2428,24 +2424,14 @@ static int load_tag_defs(void) return 0; } -#ifdef IS_PY3K - #define INIT_ERROR return NULL - PyMODINIT_FUNC PyInit__tokenizer(void) -#else - #define INIT_ERROR return - PyMODINIT_FUNC init_tokenizer(void) -#endif +PyMODINIT_FUNC INIT_FUNC_NAME(void) { PyObject *module; TokenizerType.tp_new = PyType_GenericNew; if (PyType_Ready(&TokenizerType) < 0) INIT_ERROR; -#ifdef IS_PY3K - module = PyModule_Create(&module_def); -#else - module = Py_InitModule("_tokenizer", NULL); -#endif + module = CREATE_MODULE; if (!module) INIT_ERROR; Py_INCREF(&TokenizerType); diff --git a/mwparserfromhell/parser/tokenizer.h b/mwparserfromhell/parser/tokenizer.h index 2bf6973..1229688 100644 --- a/mwparserfromhell/parser/tokenizer.h +++ b/mwparserfromhell/parser/tokenizer.h @@ -252,6 +252,23 @@ static PyObject* Tokenizer_parse(Tokenizer*, int, int); static PyObject* Tokenizer_tokenize(Tokenizer*, PyObject*); +/* Macros for Python 2/3 compatibility: */ + +#ifdef IS_PY3K + #define IMPORT_NAME_FUNC PyUnicode_FromString + #define CREATE_MODULE PyModule_Create(&module_def); + #define ENTITYDEFS_MODULE "html.entities" + #define INIT_FUNC_NAME PyInit__tokenizer + #define INIT_ERROR return NULL +#else + #define IMPORT_NAME_FUNC PyBytes_FromString + #define CREATE_MODULE Py_InitModule("_tokenizer", NULL); + #define ENTITYDEFS_MODULE "htmlentitydefs" + #define INIT_FUNC_NAME init_tokenizer + #define INIT_ERROR return +#endif + + /* More structs for creating the Tokenizer type: */ static PyMethodDef Tokenizer_methods[] = {