diff --git a/mwparserfromhell/parser/tokenizer.c b/mwparserfromhell/parser/tokenizer.c index 07d3988..1bc1f14 100644 --- a/mwparserfromhell/parser/tokenizer.c +++ b/mwparserfromhell/parser/tokenizer.c @@ -2272,7 +2272,8 @@ static PyObject* Tokenizer_parse_style(Tokenizer* self) return NULL; return Tokenizer_pop(self); } - self->topstack->context |= LC_STYLE_PASS_AGAIN; + if (context & LC_STYLE_ITALICS) + self->topstack->context |= LC_STYLE_PASS_AGAIN; } for (i = 0; i < ticks; i++) { if (Tokenizer_emit_char(self, *"'")) diff --git a/mwparserfromhell/parser/tokenizer.py b/mwparserfromhell/parser/tokenizer.py index 1061b9f..8fae729 100644 --- a/mwparserfromhell/parser/tokenizer.py +++ b/mwparserfromhell/parser/tokenizer.py @@ -823,7 +823,8 @@ class Tokenizer(object): except BadRoute as route: self._head = reset if route.context & contexts.STYLE_PASS_AGAIN: - stack = self._parse(route.context | contexts.STYLE_SECOND_PASS) + new_ctx = contexts.STYLE_ITALICS | contexts.STYLE_SECOND_PASS + stack = self._parse(new_ctx) else: return self._emit_text("''") self._emit_style_tag("i", "''", stack) @@ -912,7 +913,8 @@ class Tokenizer(object): if self._context & contexts.STYLE_SECOND_PASS: self._emit_text("'") return self._pop() - self._context |= contexts.STYLE_PASS_AGAIN + if self._context & contexts.STYLE_ITALICS: + self._context |= contexts.STYLE_PASS_AGAIN self._emit_text("'" * ticks) elif ticks == 2: self._parse_italics()