A Python parser for MediaWiki wikicode https://mwparserfromhell.readthedocs.io/
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 

329 lines
8.5 KiB

  1. /*
  2. Copyright (C) 2012-2017 Ben Kurtovic <ben.kurtovic@gmail.com>
  3. Permission is hereby granted, free of charge, to any person obtaining a copy of
  4. this software and associated documentation files (the "Software"), to deal in
  5. the Software without restriction, including without limitation the rights to
  6. use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
  7. of the Software, and to permit persons to whom the Software is furnished to do
  8. so, subject to the following conditions:
  9. The above copyright notice and this permission notice shall be included in all
  10. copies or substantial portions of the Software.
  11. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  12. IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  13. FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  14. AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  15. LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  16. OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
  17. SOFTWARE.
  18. */
  19. #include "tokenizer.h"
  20. #include "tok_parse.h"
  21. #include "tok_support.h"
  22. #include "tokens.h"
  23. /* Globals */
  24. int route_state;
  25. uint64_t route_context;
  26. char **entitydefs;
  27. PyObject *NOARGS;
  28. PyObject *definitions;
  29. static PyObject *ParserError;
  30. /* Forward declarations */
  31. static int load_exceptions(void);
  32. /*
  33. Create a new tokenizer object.
  34. */
  35. static PyObject *
  36. Tokenizer_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
  37. {
  38. Tokenizer *self = (Tokenizer *) type->tp_alloc(type, 0);
  39. return (PyObject *) self;
  40. }
  41. /*
  42. Deallocate the given tokenizer's text field.
  43. */
  44. static void
  45. dealloc_tokenizer_text(TokenizerInput *text)
  46. {
  47. Py_XDECREF(text->object);
  48. }
  49. /*
  50. Deallocate the given tokenizer object.
  51. */
  52. static void
  53. Tokenizer_dealloc(Tokenizer *self)
  54. {
  55. Stack *this = self->topstack, *next;
  56. dealloc_tokenizer_text(&self->text);
  57. while (this) {
  58. Py_DECREF(this->stack);
  59. Textbuffer_dealloc(this->textbuffer);
  60. next = this->next;
  61. free(this);
  62. this = next;
  63. }
  64. Py_TYPE(self)->tp_free((PyObject *) self);
  65. }
  66. /*
  67. Initialize a new tokenizer instance's text field.
  68. */
  69. static void
  70. init_tokenizer_text(TokenizerInput *text)
  71. {
  72. text->object = Py_None;
  73. Py_INCREF(Py_None);
  74. text->length = 0;
  75. text->kind = PyUnicode_WCHAR_KIND;
  76. text->data = NULL;
  77. }
  78. /*
  79. Initialize a new tokenizer instance by setting instance attributes.
  80. */
  81. static int
  82. Tokenizer_init(Tokenizer *self, PyObject *args, PyObject *kwds)
  83. {
  84. static char *kwlist[] = {NULL};
  85. if (!PyArg_ParseTupleAndKeywords(args, kwds, "", kwlist)) {
  86. return -1;
  87. }
  88. init_tokenizer_text(&self->text);
  89. self->topstack = NULL;
  90. self->head = self->global = self->depth = 0;
  91. self->route_context = self->route_state = 0;
  92. self->bad_routes = NULL;
  93. self->skip_style_tags = 0;
  94. return 0;
  95. }
  96. /*
  97. Load input text into the tokenizer.
  98. */
  99. static int
  100. load_tokenizer_text(TokenizerInput *text, PyObject *input)
  101. {
  102. dealloc_tokenizer_text(text);
  103. text->object = input;
  104. if (PyUnicode_READY(input) < 0) {
  105. return -1;
  106. }
  107. text->kind = PyUnicode_KIND(input);
  108. text->data = PyUnicode_DATA(input);
  109. text->length = PyUnicode_GET_LENGTH(input);
  110. return 0;
  111. }
  112. /*
  113. Build a list of tokens from a string of wikicode and return it.
  114. */
  115. static PyObject *
  116. Tokenizer_tokenize(Tokenizer *self, PyObject *args)
  117. {
  118. PyObject *input, *tokens;
  119. unsigned long long context = 0;
  120. int skip_style_tags = 0;
  121. if (PyArg_ParseTuple(args, "U|Kp", &input, &context, &skip_style_tags)) {
  122. Py_INCREF(input);
  123. if (load_tokenizer_text(&self->text, input)) {
  124. return NULL;
  125. }
  126. } else {
  127. const char *encoded;
  128. Py_ssize_t size;
  129. /* Failed to parse a Unicode object; try a string instead. */
  130. PyErr_Clear();
  131. if (!PyArg_ParseTuple(
  132. args, "s#|Kp", &encoded, &size, &context, &skip_style_tags)) {
  133. return NULL;
  134. }
  135. if (!(input = PyUnicode_FromStringAndSize(encoded, size))) {
  136. return NULL;
  137. }
  138. if (load_tokenizer_text(&self->text, input)) {
  139. return NULL;
  140. }
  141. }
  142. self->head = self->global = self->depth = 0;
  143. self->skip_style_tags = skip_style_tags;
  144. self->bad_routes = NULL;
  145. tokens = Tokenizer_parse(self, context, 1);
  146. Tokenizer_free_bad_route_tree(self);
  147. if (!tokens || self->topstack) {
  148. Py_XDECREF(tokens);
  149. if (PyErr_Occurred()) {
  150. return NULL;
  151. }
  152. if (!ParserError && load_exceptions() < 0) {
  153. return NULL;
  154. }
  155. if (BAD_ROUTE) {
  156. RESET_ROUTE();
  157. PyErr_SetString(ParserError, "C tokenizer exited with BAD_ROUTE");
  158. } else if (self->topstack) {
  159. PyErr_SetString(ParserError,
  160. "C tokenizer exited with non-empty token stack");
  161. } else {
  162. PyErr_SetString(ParserError, "C tokenizer exited unexpectedly");
  163. }
  164. return NULL;
  165. }
  166. return tokens;
  167. }
  168. static int
  169. load_entities(void)
  170. {
  171. PyObject *tempmod, *defmap, *deflist;
  172. unsigned numdefs, i;
  173. PyObject *string;
  174. tempmod = PyImport_ImportModule("html.entities");
  175. if (!tempmod) {
  176. return -1;
  177. }
  178. defmap = PyObject_GetAttrString(tempmod, "entitydefs");
  179. if (!defmap) {
  180. return -1;
  181. }
  182. Py_DECREF(tempmod);
  183. deflist = PyDict_Keys(defmap);
  184. if (!deflist) {
  185. return -1;
  186. }
  187. Py_DECREF(defmap);
  188. numdefs = (unsigned) PyList_GET_SIZE(deflist);
  189. entitydefs = calloc(numdefs + 1, sizeof(char *));
  190. if (!entitydefs) {
  191. return -1;
  192. }
  193. for (i = 0; i < numdefs; i++) {
  194. string = PyUnicode_AsASCIIString(PyList_GET_ITEM(deflist, i));
  195. if (!string) {
  196. return -1;
  197. }
  198. entitydefs[i] = PyBytes_AsString(string);
  199. if (!entitydefs[i]) {
  200. return -1;
  201. }
  202. }
  203. Py_DECREF(deflist);
  204. return 0;
  205. }
  206. static int
  207. load_tokens(void)
  208. {
  209. PyObject *tempmod, *tokens;
  210. PyObject *globals = PyEval_GetGlobals(), *locals = PyEval_GetLocals(),
  211. *fromlist = PyList_New(1), *modname = PyUnicode_FromString("tokens");
  212. char *name = "mwparserfromhell.parser";
  213. if (!fromlist || !modname) {
  214. return -1;
  215. }
  216. PyList_SET_ITEM(fromlist, 0, modname);
  217. tempmod = PyImport_ImportModuleLevel(name, globals, locals, fromlist, 0);
  218. Py_DECREF(fromlist);
  219. if (!tempmod) {
  220. return -1;
  221. }
  222. tokens = PyObject_GetAttrString(tempmod, "tokens");
  223. Py_DECREF(tempmod);
  224. load_tokens_from_module(tokens);
  225. Py_DECREF(tokens);
  226. return 0;
  227. }
  228. static int
  229. load_defs(void)
  230. {
  231. PyObject *tempmod;
  232. PyObject *globals = PyEval_GetGlobals(), *locals = PyEval_GetLocals(),
  233. *fromlist = PyList_New(1), *modname = PyUnicode_FromString("definitions");
  234. char *name = "mwparserfromhell";
  235. if (!fromlist || !modname) {
  236. return -1;
  237. }
  238. PyList_SET_ITEM(fromlist, 0, modname);
  239. tempmod = PyImport_ImportModuleLevel(name, globals, locals, fromlist, 0);
  240. Py_DECREF(fromlist);
  241. if (!tempmod) {
  242. return -1;
  243. }
  244. definitions = PyObject_GetAttrString(tempmod, "definitions");
  245. Py_DECREF(tempmod);
  246. return 0;
  247. }
  248. static int
  249. load_exceptions(void)
  250. {
  251. PyObject *tempmod, *parsermod;
  252. PyObject *globals = PyEval_GetGlobals(), *locals = PyEval_GetLocals(),
  253. *fromlist = PyList_New(1), *modname = PyUnicode_FromString("parser");
  254. char *name = "mwparserfromhell";
  255. if (!fromlist || !modname) {
  256. return -1;
  257. }
  258. PyList_SET_ITEM(fromlist, 0, modname);
  259. tempmod = PyImport_ImportModuleLevel(name, globals, locals, fromlist, 0);
  260. Py_DECREF(fromlist);
  261. if (!tempmod) {
  262. return -1;
  263. }
  264. parsermod = PyObject_GetAttrString(tempmod, "parser");
  265. Py_DECREF(tempmod);
  266. ParserError = PyObject_GetAttrString(parsermod, "ParserError");
  267. Py_DECREF(parsermod);
  268. return 0;
  269. }
  270. PyMODINIT_FUNC
  271. PyInit__tokenizer(void)
  272. {
  273. PyObject *module;
  274. TokenizerType.tp_new = PyType_GenericNew;
  275. if (PyType_Ready(&TokenizerType) < 0) {
  276. return NULL;
  277. }
  278. module = PyModule_Create(&module_def);
  279. if (!module) {
  280. return NULL;
  281. }
  282. Py_INCREF(&TokenizerType);
  283. PyModule_AddObject(module, "CTokenizer", (PyObject *) &TokenizerType);
  284. Py_INCREF(Py_True);
  285. PyDict_SetItemString(TokenizerType.tp_dict, "USES_C", Py_True);
  286. NOARGS = PyTuple_New(0);
  287. if (!NOARGS || load_entities() || load_tokens() || load_defs()) {
  288. return NULL;
  289. }
  290. return module;
  291. }