diff --git a/README.rst b/README.rst index cf2715f..65474cf 100644 --- a/README.rst +++ b/README.rst @@ -189,11 +189,13 @@ Python 3 code (via the API_): API_URL = "https://en.wikipedia.org/w/api.php" def parse(title): - data = {"action": "query", "prop": "revisions", "rvlimit": 1, - "rvprop": "content", "format": "json", "titles": title} + data = {"action": "query", "prop": "revisions", "rvprop": "content", + "rvslots": "main", "rvlimit": 1, "titles": title, + "format": "json", "formatversion": "2"} raw = urlopen(API_URL, urlencode(data).encode()).read() res = json.loads(raw) - text = list(res["query"]["pages"].values())[0]["revisions"][0]["*"] + revision = res["query"]["pages"][0]["revisions"][0] + text = revision["slots"]["main"]["content"] return mwparserfromhell.parse(text) .. _MediaWiki: http://mediawiki.org diff --git a/mwparserfromhell/nodes/tag.py b/mwparserfromhell/nodes/tag.py index c6b88e3..70a2876 100644 --- a/mwparserfromhell/nodes/tag.py +++ b/mwparserfromhell/nodes/tag.py @@ -300,7 +300,10 @@ class Tag(Node): return attr def remove(self, name): - """Remove all attributes with the given *name*.""" + """Remove all attributes with the given *name*. + + Raises :exc:`ValueError` if none were found. + """ attrs = [attr for attr in self.attributes if attr.name == name.strip()] if not attrs: raise ValueError(name) diff --git a/tests/test_docs.py b/tests/test_docs.py index bc4da1d..ef596d6 100644 --- a/tests/test_docs.py +++ b/tests/test_docs.py @@ -114,14 +114,16 @@ class TestDocs(unittest.TestCase): url1 = "https://en.wikipedia.org/w/api.php" url2 = "https://en.wikipedia.org/w/index.php?title={0}&action=raw" title = "Test" - data = {"action": "query", "prop": "revisions", "rvlimit": 1, - "rvprop": "content", "format": "json", "titles": title} + data = {"action": "query", "prop": "revisions", "rvprop": "content", + "rvslots": "main", "rvlimit": 1, "titles": title, + "format": "json", "formatversion": "2"} try: raw = urlopen(url1, urlencode(data).encode("utf8")).read() except IOError: self.skipTest("cannot continue because of unsuccessful web call") res = json.loads(raw.decode("utf8")) - text = list(res["query"]["pages"].values())[0]["revisions"][0]["*"] + revision = res["query"]["pages"][0]["revisions"][0] + text = revision["slots"]["main"]["content"] try: expected = urlopen(url2.format(title)).read().decode("utf8") except IOError: