@@ -189,11 +189,13 @@ Python 3 code (via the API_): | |||||
API_URL = "https://en.wikipedia.org/w/api.php" | API_URL = "https://en.wikipedia.org/w/api.php" | ||||
def parse(title): | def parse(title): | ||||
data = {"action": "query", "prop": "revisions", "rvlimit": 1, | |||||
"rvprop": "content", "format": "json", "titles": title} | |||||
data = {"action": "query", "prop": "revisions", "rvprop": "content", | |||||
"rvslots": "main", "rvlimit": 1, "titles": title, | |||||
"format": "json", "formatversion": "2"} | |||||
raw = urlopen(API_URL, urlencode(data).encode()).read() | raw = urlopen(API_URL, urlencode(data).encode()).read() | ||||
res = json.loads(raw) | res = json.loads(raw) | ||||
text = list(res["query"]["pages"].values())[0]["revisions"][0]["*"] | |||||
revision = res["query"]["pages"][0]["revisions"][0] | |||||
text = revision["slots"]["main"]["content"] | |||||
return mwparserfromhell.parse(text) | return mwparserfromhell.parse(text) | ||||
.. _MediaWiki: http://mediawiki.org | .. _MediaWiki: http://mediawiki.org | ||||
@@ -300,7 +300,10 @@ class Tag(Node): | |||||
return attr | return attr | ||||
def remove(self, name): | def remove(self, name): | ||||
"""Remove all attributes with the given *name*.""" | |||||
"""Remove all attributes with the given *name*. | |||||
Raises :exc:`ValueError` if none were found. | |||||
""" | |||||
attrs = [attr for attr in self.attributes if attr.name == name.strip()] | attrs = [attr for attr in self.attributes if attr.name == name.strip()] | ||||
if not attrs: | if not attrs: | ||||
raise ValueError(name) | raise ValueError(name) | ||||
@@ -114,14 +114,16 @@ class TestDocs(unittest.TestCase): | |||||
url1 = "https://en.wikipedia.org/w/api.php" | url1 = "https://en.wikipedia.org/w/api.php" | ||||
url2 = "https://en.wikipedia.org/w/index.php?title={0}&action=raw" | url2 = "https://en.wikipedia.org/w/index.php?title={0}&action=raw" | ||||
title = "Test" | title = "Test" | ||||
data = {"action": "query", "prop": "revisions", "rvlimit": 1, | |||||
"rvprop": "content", "format": "json", "titles": title} | |||||
data = {"action": "query", "prop": "revisions", "rvprop": "content", | |||||
"rvslots": "main", "rvlimit": 1, "titles": title, | |||||
"format": "json", "formatversion": "2"} | |||||
try: | try: | ||||
raw = urlopen(url1, urlencode(data).encode("utf8")).read() | raw = urlopen(url1, urlencode(data).encode("utf8")).read() | ||||
except IOError: | except IOError: | ||||
self.skipTest("cannot continue because of unsuccessful web call") | self.skipTest("cannot continue because of unsuccessful web call") | ||||
res = json.loads(raw.decode("utf8")) | res = json.loads(raw.decode("utf8")) | ||||
text = list(res["query"]["pages"].values())[0]["revisions"][0]["*"] | |||||
revision = res["query"]["pages"][0]["revisions"][0] | |||||
text = revision["slots"]["main"]["content"] | |||||
try: | try: | ||||
expected = urlopen(url2.format(title)).read().decode("utf8") | expected = urlopen(url2.format(title)).read().decode("utf8") | ||||
except IOError: | except IOError: | ||||