@@ -2783,12 +2783,6 @@ Tokenizer_handle_table_cell(Tokenizer* self, const char *markup, | |||||
line_context)) | line_context)) | ||||
return -1; | return -1; | ||||
padding = Tokenizer_parse_as_table_style(self, '|', 0); | padding = Tokenizer_parse_as_table_style(self, '|', 0); | ||||
if (BAD_ROUTE) { | |||||
trash = Tokenizer_pop(self); | |||||
Py_XDECREF(trash); | |||||
self->head = reset; | |||||
return 0; | |||||
} | |||||
if (!padding) | if (!padding) | ||||
return -1; | return -1; | ||||
style = Tokenizer_pop(self); | style = Tokenizer_pop(self); | ||||
@@ -1325,7 +1325,7 @@ class Tokenizer(object): | |||||
elif this in ("\n", ":") and self._context & contexts.DL_TERM: | elif this in ("\n", ":") and self._context & contexts.DL_TERM: | ||||
self._handle_dl_term() | self._handle_dl_term() | ||||
if this == "\n": | if this == "\n": | ||||
# kill potential table contexts | |||||
# Kill potential table contexts | |||||
self._context &= ~contexts.TABLE_CELL_LINE_CONTEXTS | self._context &= ~contexts.TABLE_CELL_LINE_CONTEXTS | ||||
# Start of table parsing | # Start of table parsing | ||||
elif this == "{" and next == "|" and (self._read(-1) in ("\n", self.START) or | elif this == "{" and next == "|" and (self._read(-1) in ("\n", self.START) or | ||||
@@ -447,6 +447,13 @@ output: [TagOpenOpen(wiki_markup=":"), Text(text="dd"), TagCloseSelfclose(), Tag | |||||
--- | --- | ||||
name: dt_dd_mix4 | |||||
label: another example of correct dt/dd usage, with a trigger for a specific parse route | |||||
input: ";foo]:bar" | |||||
output: [TagOpenOpen(wiki_markup=";"), Text(text="dt"), TagCloseSelfclose(), Text(text="foo]"), TagOpenOpen(wiki_markup=":"), Text(text="dd"), TagCloseSelfclose(), Text(text="bar")] | |||||
--- | |||||
name: ul_ol_dt_dd_mix | name: ul_ol_dt_dd_mix | ||||
label: an assortment of uls, ols, dds, and dts | label: an assortment of uls, ols, dds, and dts | ||||
input: ";:#*foo\n:#*;foo\n#*;:foo\n*;:#foo" | input: ";:#*foo\n:#*;foo\n#*;:foo\n*;:#foo" | ||||