A Python parser for MediaWiki wikicode https://mwparserfromhell.readthedocs.io/
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 

274 lines
9.8 KiB

  1. /*
  2. Tokenizer Header File for MWParserFromHell
  3. Copyright (C) 2012 Ben Kurtovic <ben.kurtovic@verizon.net>
  4. Permission is hereby granted, free of charge, to any person obtaining a copy of
  5. this software and associated documentation files (the "Software"), to deal in
  6. the Software without restriction, including without limitation the rights to
  7. use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
  8. of the Software, and to permit persons to whom the Software is furnished to do
  9. so, subject to the following conditions:
  10. The above copyright notice and this permission notice shall be included in all
  11. copies or substantial portions of the Software.
  12. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  13. IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  14. FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  15. AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  16. LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  17. OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
  18. SOFTWARE.
  19. */
  20. #ifndef PY_SSIZE_T_CLEAN
  21. #define PY_SSIZE_T_CLEAN
  22. #endif
  23. #include <Python.h>
  24. #include <math.h>
  25. #include <structmember.h>
  26. #if PY_MAJOR_VERSION >= 3
  27. #define IS_PY3K
  28. #endif
  29. #define malloc PyObject_Malloc
  30. #define free PyObject_Free
  31. static const char* MARKERS[] = {
  32. "{", "}", "[", "]", "<", ">", "|", "=", "&", "#", "*", ";", ":", "/", "-",
  33. "!", "\n", ""};
  34. #define NUM_MARKERS 18
  35. #define TEXTBUFFER_BLOCKSIZE 1024
  36. #define MAX_ENTITY_SIZE 8
  37. static int route_state = 0;
  38. #define BAD_ROUTE (route_state)
  39. #define FAIL_ROUTE() (route_state = 1)
  40. #define RESET_ROUTE() (route_state = 0)
  41. static char** entitydefs;
  42. static PyObject* EMPTY;
  43. static PyObject* NOARGS;
  44. static PyObject* tokens;
  45. /* Tokens */
  46. static PyObject* Text;
  47. static PyObject* TemplateOpen;
  48. static PyObject* TemplateParamSeparator;
  49. static PyObject* TemplateParamEquals;
  50. static PyObject* TemplateClose;
  51. static PyObject* ArgumentOpen;
  52. static PyObject* ArgumentSeparator;
  53. static PyObject* ArgumentClose;
  54. static PyObject* WikilinkOpen;
  55. static PyObject* WikilinkSeparator;
  56. static PyObject* WikilinkClose;
  57. static PyObject* HTMLEntityStart;
  58. static PyObject* HTMLEntityNumeric;
  59. static PyObject* HTMLEntityHex;
  60. static PyObject* HTMLEntityEnd;
  61. static PyObject* HeadingStart;
  62. static PyObject* HeadingEnd;
  63. static PyObject* CommentStart;
  64. static PyObject* CommentEnd;
  65. static PyObject* TagOpenOpen;
  66. static PyObject* TagAttrStart;
  67. static PyObject* TagAttrEquals;
  68. static PyObject* TagAttrQuote;
  69. static PyObject* TagCloseOpen;
  70. static PyObject* TagCloseSelfclose;
  71. static PyObject* TagOpenClose;
  72. static PyObject* TagCloseClose;
  73. /* Local contexts: */
  74. #define LC_TEMPLATE 0x00007
  75. #define LC_TEMPLATE_NAME 0x00001
  76. #define LC_TEMPLATE_PARAM_KEY 0x00002
  77. #define LC_TEMPLATE_PARAM_VALUE 0x00004
  78. #define LC_ARGUMENT 0x00018
  79. #define LC_ARGUMENT_NAME 0x00008
  80. #define LC_ARGUMENT_DEFAULT 0x00010
  81. #define LC_WIKILINK 0x00060
  82. #define LC_WIKILINK_TITLE 0x00020
  83. #define LC_WIKILINK_TEXT 0x00040
  84. #define LC_HEADING 0x01F80
  85. #define LC_HEADING_LEVEL_1 0x00080
  86. #define LC_HEADING_LEVEL_2 0x00100
  87. #define LC_HEADING_LEVEL_3 0x00200
  88. #define LC_HEADING_LEVEL_4 0x00400
  89. #define LC_HEADING_LEVEL_5 0x00800
  90. #define LC_HEADING_LEVEL_6 0x01000
  91. #define LC_COMMENT 0x02000
  92. #define LC_HAS_TEXT 0x04000
  93. #define LC_FAIL_ON_TEXT 0x08000
  94. #define LC_FAIL_NEXT 0x10000
  95. #define LC_FAIL_ON_LBRACE 0x20000
  96. #define LC_FAIL_ON_RBRACE 0x40000
  97. #define LC_FAIL_ON_EQUALS 0x80000
  98. /* Global contexts: */
  99. #define GL_HEADING 0x1
  100. /* Miscellaneous structs: */
  101. struct Textbuffer {
  102. Py_ssize_t size;
  103. Py_UNICODE* data;
  104. struct Textbuffer* next;
  105. };
  106. struct Stack {
  107. PyObject* stack;
  108. int context;
  109. struct Textbuffer* textbuffer;
  110. struct Stack* next;
  111. };
  112. typedef struct {
  113. PyObject* title;
  114. int level;
  115. } HeadingData;
  116. /* Tokenizer object definition: */
  117. typedef struct {
  118. PyObject_HEAD
  119. PyObject* text; /* text to tokenize */
  120. struct Stack* topstack; /* topmost stack */
  121. Py_ssize_t head; /* current position in text */
  122. Py_ssize_t length; /* length of text */
  123. int global; /* global context */
  124. } Tokenizer;
  125. /* Macros for accessing Tokenizer data: */
  126. #define Tokenizer_READ(self, delta) (*PyUnicode_AS_UNICODE(Tokenizer_read(self, delta)))
  127. /* Function prototypes: */
  128. static PyObject* Tokenizer_new(PyTypeObject*, PyObject*, PyObject*);
  129. static struct Textbuffer* Textbuffer_new(void);
  130. static void Tokenizer_dealloc(Tokenizer*);
  131. static void Textbuffer_dealloc(struct Textbuffer*);
  132. static int Tokenizer_init(Tokenizer*, PyObject*, PyObject*);
  133. static int Tokenizer_push(Tokenizer*, int);
  134. static PyObject* Textbuffer_render(struct Textbuffer*);
  135. static int Tokenizer_push_textbuffer(Tokenizer*);
  136. static void Tokenizer_delete_top_of_stack(Tokenizer*);
  137. static PyObject* Tokenizer_pop(Tokenizer*);
  138. static PyObject* Tokenizer_pop_keeping_context(Tokenizer*);
  139. static void* Tokenizer_fail_route(Tokenizer*);
  140. static int Tokenizer_write(Tokenizer*, PyObject*);
  141. static int Tokenizer_write_first(Tokenizer*, PyObject*);
  142. static int Tokenizer_write_text(Tokenizer*, Py_UNICODE);
  143. static int Tokenizer_write_all(Tokenizer*, PyObject*);
  144. static int Tokenizer_write_text_then_stack(Tokenizer*, const char*);
  145. static PyObject* Tokenizer_read(Tokenizer*, Py_ssize_t);
  146. static PyObject* Tokenizer_read_backwards(Tokenizer*, Py_ssize_t);
  147. static int Tokenizer_parse_template_or_argument(Tokenizer*);
  148. static int Tokenizer_parse_template(Tokenizer*);
  149. static int Tokenizer_parse_argument(Tokenizer*);
  150. static int Tokenizer_handle_template_param(Tokenizer*);
  151. static int Tokenizer_handle_template_param_value(Tokenizer*);
  152. static PyObject* Tokenizer_handle_template_end(Tokenizer*);
  153. static int Tokenizer_handle_argument_separator(Tokenizer*);
  154. static PyObject* Tokenizer_handle_argument_end(Tokenizer*);
  155. static int Tokenizer_parse_wikilink(Tokenizer*);
  156. static int Tokenizer_handle_wikilink_separator(Tokenizer*);
  157. static PyObject* Tokenizer_handle_wikilink_end(Tokenizer*);
  158. static int Tokenizer_parse_heading(Tokenizer*);
  159. static HeadingData* Tokenizer_handle_heading_end(Tokenizer*);
  160. static int Tokenizer_really_parse_entity(Tokenizer*);
  161. static int Tokenizer_parse_entity(Tokenizer*);
  162. static int Tokenizer_parse_comment(Tokenizer*);
  163. static void Tokenizer_verify_safe(Tokenizer*, int, Py_UNICODE);
  164. static PyObject* Tokenizer_parse(Tokenizer*, int);
  165. static PyObject* Tokenizer_tokenize(Tokenizer*, PyObject*);
  166. /* More structs for creating the Tokenizer type: */
  167. static PyMethodDef
  168. Tokenizer_methods[] = {
  169. {"tokenize", (PyCFunction) Tokenizer_tokenize, METH_VARARGS,
  170. "Build a list of tokens from a string of wikicode and return it."},
  171. {NULL}
  172. };
  173. static PyMemberDef
  174. Tokenizer_members[] = {
  175. {NULL}
  176. };
  177. static PyMethodDef
  178. module_methods[] = {
  179. {NULL}
  180. };
  181. static PyTypeObject
  182. TokenizerType = {
  183. PyObject_HEAD_INIT(NULL)
  184. 0, /* ob_size */
  185. "_tokenizer.CTokenizer", /* tp_name */
  186. sizeof(Tokenizer), /* tp_basicsize */
  187. 0, /* tp_itemsize */
  188. (destructor) Tokenizer_dealloc, /* tp_dealloc */
  189. 0, /* tp_print */
  190. 0, /* tp_getattr */
  191. 0, /* tp_setattr */
  192. 0, /* tp_compare */
  193. 0, /* tp_repr */
  194. 0, /* tp_as_number */
  195. 0, /* tp_as_sequence */
  196. 0, /* tp_as_mapping */
  197. 0, /* tp_hash */
  198. 0, /* tp_call */
  199. 0, /* tp_str */
  200. 0, /* tp_getattro */
  201. 0, /* tp_setattro */
  202. 0, /* tp_as_buffer */
  203. Py_TPFLAGS_DEFAULT, /* tp_flags */
  204. "Creates a list of tokens from a string of wikicode.", /* tp_doc */
  205. 0, /* tp_traverse */
  206. 0, /* tp_clear */
  207. 0, /* tp_richcompare */
  208. 0, /* tp_weaklistoffset */
  209. 0, /* tp_iter */
  210. 0, /* tp_iternext */
  211. Tokenizer_methods, /* tp_methods */
  212. Tokenizer_members, /* tp_members */
  213. 0, /* tp_getset */
  214. 0, /* tp_base */
  215. 0, /* tp_dict */
  216. 0, /* tp_descr_get */
  217. 0, /* tp_descr_set */
  218. 0, /* tp_dictoffset */
  219. (initproc) Tokenizer_init, /* tp_init */
  220. 0, /* tp_alloc */
  221. Tokenizer_new, /* tp_new */
  222. };