A Python parser for MediaWiki wikicode https://mwparserfromhell.readthedocs.io/
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 

302 lines
8.3 KiB

  1. /*
  2. Copyright (C) 2012-2017 Ben Kurtovic <ben.kurtovic@gmail.com>
  3. Permission is hereby granted, free of charge, to any person obtaining a copy of
  4. this software and associated documentation files (the "Software"), to deal in
  5. the Software without restriction, including without limitation the rights to
  6. use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
  7. of the Software, and to permit persons to whom the Software is furnished to do
  8. so, subject to the following conditions:
  9. The above copyright notice and this permission notice shall be included in all
  10. copies or substantial portions of the Software.
  11. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  12. IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  13. FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  14. AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  15. LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  16. OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
  17. SOFTWARE.
  18. */
  19. #include "tokenizer.h"
  20. #include "tok_parse.h"
  21. #include "tok_support.h"
  22. #include "tokens.h"
  23. /* Globals */
  24. int route_state;
  25. uint64_t route_context;
  26. char** entitydefs;
  27. PyObject* NOARGS;
  28. PyObject* definitions;
  29. static PyObject* ParserError;
  30. /* Forward declarations */
  31. static int load_exceptions(void);
  32. /*
  33. Create a new tokenizer object.
  34. */
  35. static PyObject*
  36. Tokenizer_new(PyTypeObject* type, PyObject* args, PyObject* kwds)
  37. {
  38. Tokenizer* self = (Tokenizer*) type->tp_alloc(type, 0);
  39. return (PyObject*) self;
  40. }
  41. /*
  42. Deallocate the given tokenizer's text field.
  43. */
  44. static void dealloc_tokenizer_text(TokenizerInput* text)
  45. {
  46. Py_XDECREF(text->object);
  47. }
  48. /*
  49. Deallocate the given tokenizer object.
  50. */
  51. static void Tokenizer_dealloc(Tokenizer* self)
  52. {
  53. Stack *this = self->topstack, *next;
  54. dealloc_tokenizer_text(&self->text);
  55. while (this) {
  56. Py_DECREF(this->stack);
  57. Textbuffer_dealloc(this->textbuffer);
  58. next = this->next;
  59. free(this);
  60. this = next;
  61. }
  62. Py_TYPE(self)->tp_free((PyObject*) self);
  63. }
  64. /*
  65. Initialize a new tokenizer instance's text field.
  66. */
  67. static void init_tokenizer_text(TokenizerInput* text)
  68. {
  69. text->object = Py_None;
  70. Py_INCREF(Py_None);
  71. text->length = 0;
  72. text->kind = PyUnicode_WCHAR_KIND;
  73. text->data = NULL;
  74. }
  75. /*
  76. Initialize a new tokenizer instance by setting instance attributes.
  77. */
  78. static int Tokenizer_init(Tokenizer* self, PyObject* args, PyObject* kwds)
  79. {
  80. static char* kwlist[] = {NULL};
  81. if (!PyArg_ParseTupleAndKeywords(args, kwds, "", kwlist))
  82. return -1;
  83. init_tokenizer_text(&self->text);
  84. self->topstack = NULL;
  85. self->head = self->global = self->depth = 0;
  86. self->route_context = self->route_state = 0;
  87. self->bad_routes = NULL;
  88. self->skip_style_tags = 0;
  89. return 0;
  90. }
  91. /*
  92. Load input text into the tokenizer.
  93. */
  94. static int load_tokenizer_text(TokenizerInput* text, PyObject *input)
  95. {
  96. dealloc_tokenizer_text(text);
  97. text->object = input;
  98. if (PyUnicode_READY(input) < 0)
  99. return -1;
  100. text->kind = PyUnicode_KIND(input);
  101. text->data = PyUnicode_DATA(input);
  102. text->length = PyUnicode_GET_LENGTH(input);
  103. return 0;
  104. }
  105. /*
  106. Build a list of tokens from a string of wikicode and return it.
  107. */
  108. static PyObject* Tokenizer_tokenize(Tokenizer* self, PyObject* args)
  109. {
  110. PyObject *input, *tokens;
  111. uint64_t context = 0;
  112. int skip_style_tags = 0;
  113. if (PyArg_ParseTuple(args, "U|ii", &input, &context, &skip_style_tags)) {
  114. Py_INCREF(input);
  115. if (load_tokenizer_text(&self->text, input))
  116. return NULL;
  117. }
  118. else {
  119. const char *encoded;
  120. Py_ssize_t size;
  121. /* Failed to parse a Unicode object; try a string instead. */
  122. PyErr_Clear();
  123. if (!PyArg_ParseTuple(args, "s#|ii", &encoded, &size, &context,
  124. &skip_style_tags))
  125. return NULL;
  126. if (!(input = PyUnicode_FromStringAndSize(encoded, size)))
  127. return NULL;
  128. if (load_tokenizer_text(&self->text, input))
  129. return NULL;
  130. }
  131. self->head = self->global = self->depth = 0;
  132. self->skip_style_tags = skip_style_tags;
  133. self->bad_routes = NULL;
  134. tokens = Tokenizer_parse(self, context, 1);
  135. Tokenizer_free_bad_route_tree(self);
  136. if (!tokens || self->topstack) {
  137. Py_XDECREF(tokens);
  138. if (PyErr_Occurred())
  139. return NULL;
  140. if (!ParserError && load_exceptions() < 0)
  141. return NULL;
  142. if (BAD_ROUTE) {
  143. RESET_ROUTE();
  144. PyErr_SetString(ParserError, "C tokenizer exited with BAD_ROUTE");
  145. }
  146. else if (self->topstack)
  147. PyErr_SetString(ParserError,
  148. "C tokenizer exited with non-empty token stack");
  149. else
  150. PyErr_SetString(ParserError, "C tokenizer exited unexpectedly");
  151. return NULL;
  152. }
  153. return tokens;
  154. }
  155. static int load_entities(void)
  156. {
  157. PyObject *tempmod, *defmap, *deflist;
  158. unsigned numdefs, i;
  159. PyObject *string;
  160. tempmod = PyImport_ImportModule("html.entities");
  161. if (!tempmod)
  162. return -1;
  163. defmap = PyObject_GetAttrString(tempmod, "entitydefs");
  164. if (!defmap)
  165. return -1;
  166. Py_DECREF(tempmod);
  167. deflist = PyDict_Keys(defmap);
  168. if (!deflist)
  169. return -1;
  170. Py_DECREF(defmap);
  171. numdefs = (unsigned) PyList_GET_SIZE(deflist);
  172. entitydefs = calloc(numdefs + 1, sizeof(char*));
  173. if (!entitydefs)
  174. return -1;
  175. for (i = 0; i < numdefs; i++) {
  176. string = PyUnicode_AsASCIIString(PyList_GET_ITEM(deflist, i));
  177. if (!string)
  178. return -1;
  179. entitydefs[i] = PyBytes_AsString(string);
  180. if (!entitydefs[i])
  181. return -1;
  182. }
  183. Py_DECREF(deflist);
  184. return 0;
  185. }
  186. static int load_tokens(void)
  187. {
  188. PyObject *tempmod, *tokens,
  189. *globals = PyEval_GetGlobals(),
  190. *locals = PyEval_GetLocals(),
  191. *fromlist = PyList_New(1),
  192. *modname = PyUnicode_FromString("tokens");
  193. char *name = "mwparserfromhell.parser";
  194. if (!fromlist || !modname)
  195. return -1;
  196. PyList_SET_ITEM(fromlist, 0, modname);
  197. tempmod = PyImport_ImportModuleLevel(name, globals, locals, fromlist, 0);
  198. Py_DECREF(fromlist);
  199. if (!tempmod)
  200. return -1;
  201. tokens = PyObject_GetAttrString(tempmod, "tokens");
  202. Py_DECREF(tempmod);
  203. load_tokens_from_module(tokens);
  204. Py_DECREF(tokens);
  205. return 0;
  206. }
  207. static int load_defs(void)
  208. {
  209. PyObject *tempmod,
  210. *globals = PyEval_GetGlobals(),
  211. *locals = PyEval_GetLocals(),
  212. *fromlist = PyList_New(1),
  213. *modname = PyUnicode_FromString("definitions");
  214. char *name = "mwparserfromhell";
  215. if (!fromlist || !modname)
  216. return -1;
  217. PyList_SET_ITEM(fromlist, 0, modname);
  218. tempmod = PyImport_ImportModuleLevel(name, globals, locals, fromlist, 0);
  219. Py_DECREF(fromlist);
  220. if (!tempmod)
  221. return -1;
  222. definitions = PyObject_GetAttrString(tempmod, "definitions");
  223. Py_DECREF(tempmod);
  224. return 0;
  225. }
  226. static int load_exceptions(void)
  227. {
  228. PyObject *tempmod, *parsermod,
  229. *globals = PyEval_GetGlobals(),
  230. *locals = PyEval_GetLocals(),
  231. *fromlist = PyList_New(1),
  232. *modname = PyUnicode_FromString("parser");
  233. char *name = "mwparserfromhell";
  234. if (!fromlist || !modname)
  235. return -1;
  236. PyList_SET_ITEM(fromlist, 0, modname);
  237. tempmod = PyImport_ImportModuleLevel(name, globals, locals, fromlist, 0);
  238. Py_DECREF(fromlist);
  239. if (!tempmod)
  240. return -1;
  241. parsermod = PyObject_GetAttrString(tempmod, "parser");
  242. Py_DECREF(tempmod);
  243. ParserError = PyObject_GetAttrString(parsermod, "ParserError");
  244. Py_DECREF(parsermod);
  245. return 0;
  246. }
  247. PyMODINIT_FUNC PyInit__tokenizer(void)
  248. {
  249. PyObject *module;
  250. TokenizerType.tp_new = PyType_GenericNew;
  251. if (PyType_Ready(&TokenizerType) < 0)
  252. return NULL;
  253. module = PyModule_Create(&module_def);
  254. if (!module)
  255. return NULL;
  256. Py_INCREF(&TokenizerType);
  257. PyModule_AddObject(module, "CTokenizer", (PyObject*) &TokenizerType);
  258. Py_INCREF(Py_True);
  259. PyDict_SetItemString(TokenizerType.tp_dict, "USES_C", Py_True);
  260. NOARGS = PyTuple_New(0);
  261. if (!NOARGS || load_entities() || load_tokens() || load_defs())
  262. return NULL;
  263. return module;
  264. }