@@ -37,18 +37,19 @@ __email__ = "ben.kurtovic@verizon.net" | |||
__release__ = False | |||
if not __release__: | |||
def _add_git_commit_id_to_version_string(version): | |||
def _get_git_commit_id(): | |||
"""Return the ID of the git HEAD commit.""" | |||
from git import Repo | |||
from os.path import split, dirname | |||
path = split(dirname(__file__))[0] | |||
commit_id = Repo(path).head.object.hexsha | |||
return version + ".git+" + commit_id[:8] | |||
return commit_id[:8] | |||
try: | |||
__version__ = _add_git_commit_id_to_version_string(__version__) | |||
__version__ += ".git+" + _get_git_commit_id() | |||
except Exception: | |||
pass | |||
finally: | |||
del _add_git_commit_id_to_version_string | |||
del _get_git_commit_id | |||
from earwigbot import bot | |||
from earwigbot import commands | |||
@@ -23,7 +23,6 @@ | |||
from urllib import quote_plus | |||
from earwigbot import exceptions | |||
from earwigbot import wiki | |||
from earwigbot.commands import BaseCommand | |||
class Command(BaseCommand): | |||
@@ -47,7 +46,7 @@ class Command(BaseCommand): | |||
try: | |||
count = user.editcount | |||
except earwigbot.UserNotFoundError: | |||
except exceptions.UserNotFoundError: | |||
msg = "the user \x0302{0}\x0301 does not exist." | |||
self.reply(data, msg.format(name)) | |||
return | |||
@@ -23,7 +23,6 @@ | |||
import time | |||
from earwigbot import exceptions | |||
from earwigbot import wiki | |||
from earwigbot.commands import BaseCommand | |||
class Command(BaseCommand): | |||
@@ -21,7 +21,6 @@ | |||
# SOFTWARE. | |||
from earwigbot import exceptions | |||
from earwigbot import wiki | |||
from earwigbot.commands import BaseCommand | |||
class Command(BaseCommand): | |||
@@ -79,7 +79,7 @@ class _ResourceManager(object): | |||
""" | |||
f, path, desc = imp.find_module(name, [path]) | |||
try: | |||
module = imp.load_module(name, f, path, desc) | |||
module = imp.load_module(name, f, path, desc) | |||
except Exception: | |||
e = "Couldn't load module {0} (from {1})" | |||
self.logger.exception(e.format(name, path)) | |||
@@ -107,9 +107,9 @@ class BaseTask(object): | |||
Otherwise, we'll use our default site. | |||
""" | |||
if not site: | |||
try: | |||
site = self.site | |||
except AttributeError: | |||
if hasattr(self, "site"): | |||
site = getattr(self, "site") | |||
else: | |||
site = self.bot.wiki.get_site() | |||
try: | |||
@@ -91,7 +91,7 @@ class Task(BaseTask): | |||
def update(self, num_days): | |||
self.logger.info("Updating past {0} days".format(num_days)) | |||
generator = self.backwards_cat_iterator() | |||
for d in xrange(num_days): | |||
for i in xrange(num_days): | |||
category = generator.next() | |||
date = category.title.split("/")[-1] | |||
self.update_date(date, category) | |||
@@ -102,7 +102,7 @@ class Task(BaseTask): | |||
self.logger.info("Generating chart for past {0} days".format(num_days)) | |||
data = OrderedDict() | |||
generator = self.backwards_cat_iterator() | |||
for d in xrange(num_days): | |||
for i in xrange(num_days): | |||
category = generator.next() | |||
date = category.title.split("/")[-1] | |||
data[date] = self.get_date_counts(date) | |||
@@ -110,7 +110,7 @@ class Page(CopyrightMixin): | |||
if prefix != title: # ignore a page that's titled "Category" or "User" | |||
try: | |||
self._namespace = self._site.namespace_name_to_id(prefix) | |||
except NamespaceNotFoundError: | |||
except exceptions.NamespaceNotFoundError: | |||
self._namespace = 0 | |||
else: | |||
self._namespace = 0 | |||
@@ -191,10 +191,10 @@ class Page(CopyrightMixin): | |||
Assuming the API is sound, this should not raise any exceptions. | |||
""" | |||
if not result: | |||
params = {"action": "query", "rvprop": "user", "intoken": "edit", | |||
"prop": "info|revisions", "rvlimit": 1, "rvdir": "newer", | |||
"titles": self._title, "inprop": "protection|url"} | |||
result = self._site._api_query(params) | |||
query = self._site.api_query | |||
result = query(action="query", rvprop="user", intoken="edit", | |||
prop="info|revisions", rvlimit=1, rvdir="newer", | |||
titles=self._title, inprop="protection|url") | |||
res = result["query"]["pages"].values()[0] | |||
@@ -255,9 +255,9 @@ class Page(CopyrightMixin): | |||
want to force content reloading. | |||
""" | |||
if not result: | |||
params = {"action": "query", "prop": "revisions", "rvlimit": 1, | |||
"rvprop": "content|timestamp", "titles": self._title} | |||
result = self._site._api_query(params) | |||
query = self._site.api_query | |||
result = query(action="query", prop="revisions", rvlimit=1, | |||
rvprop="content|timestamp", titles=self._title) | |||
res = result["query"]["pages"].values()[0] | |||
try: | |||
@@ -302,8 +302,8 @@ class Page(CopyrightMixin): | |||
# Try the API query, catching most errors with our handler: | |||
try: | |||
result = self._site._api_query(params) | |||
except SiteAPIError as error: | |||
result = self._site.api_query(**params) | |||
except exceptions.SiteAPIError as error: | |||
if not hasattr(error, "code"): | |||
raise # We can only handle errors with a code attribute | |||
result = self._handle_edit_errors(error, params, tries) | |||
@@ -609,10 +609,10 @@ class Page(CopyrightMixin): | |||
if self._exists == 0: | |||
# Kill two birds with one stone by doing an API query for both our | |||
# attributes and our page content: | |||
params = {"action": "query", "rvlimit": 1, "titles": self._title, | |||
"prop": "info|revisions", "inprop": "protection|url", | |||
"intoken": "edit", "rvprop": "content|timestamp"} | |||
result = self._site._api_query(params) | |||
query = self._site.api_query | |||
result = query(action="query", rvlimit=1, titles=self._title, | |||
prop="info|revisions", inprop="protection|url", | |||
intoken="edit", rvprop="content|timestamp") | |||
self._load_attributes(result=result) | |||
self._assert_existence() | |||
self._load_content(result=result) | |||
@@ -122,6 +122,7 @@ class Site(object): | |||
self._wait_between_queries = wait_between_queries | |||
self._max_retries = 5 | |||
self._last_query_time = 0 | |||
self._api_lock = Lock() | |||
# Attributes used for SQL queries: | |||
self._sql_data = sql | |||
@@ -278,7 +279,7 @@ class Site(object): | |||
return self._api_query(params, tries=tries, wait=wait*3) | |||
else: # Some unknown error occurred | |||
e = 'API query failed: got error "{0}"; server says: "{1}".' | |||
error = earwigbot.SiteAPIError(e.format(code, info)) | |||
error = exceptions.SiteAPIError(e.format(code, info)) | |||
error.code, error.info = code, info | |||
raise error | |||
@@ -297,17 +298,16 @@ class Site(object): | |||
attrs = [self._name, self._project, self._lang, self._base_url, | |||
self._article_path, self._script_path] | |||
params = {"action": "query", "meta": "siteinfo"} | |||
params = {"action": "query", "meta": "siteinfo", "siprop": "general"} | |||
if not self._namespaces or force: | |||
params["siprop"] = "general|namespaces|namespacealiases" | |||
result = self._api_query(params) | |||
params["siprop"] += "|namespaces|namespacealiases" | |||
result = self.api_query(**params) | |||
self._load_namespaces(result) | |||
elif all(attrs): # Everything is already specified and we're not told | |||
return # to force a reload, so do nothing | |||
else: # We're only loading attributes other than _namespaces | |||
params["siprop"] = "general" | |||
result = self._api_query(params) | |||
result = self.api_query(**params) | |||
res = result["query"]["general"] | |||
self._name = res["wikiid"] | |||
@@ -402,8 +402,7 @@ class Site(object): | |||
username argument) when cookie lookup fails, probably indicating that | |||
we are logged out. | |||
""" | |||
params = {"action": "query", "meta": "userinfo"} | |||
result = self._api_query(params) | |||
result = self.api_query(action="query", meta="userinfo") | |||
return result["query"]["userinfo"]["name"] | |||
def _get_username(self): | |||
@@ -456,12 +455,14 @@ class Site(object): | |||
loop if MediaWiki isn't acting right. | |||
""" | |||
name, password = login | |||
params = {"action": "login", "lgname": name, "lgpassword": password} | |||
if token: | |||
params["lgtoken"] = token | |||
result = self._api_query(params) | |||
res = result["login"]["result"] | |||
result = self.api_query(action="login", lgname=name, | |||
lgpassword=password, lgtoken=token) | |||
else: | |||
result = self.api_query(action="login", lgname=name, | |||
lgpassword=password) | |||
res = result["login"]["result"] | |||
if res == "Success": | |||
self._save_cookiejar() | |||
elif res == "NeedToken" and attempt == 0: | |||
@@ -487,8 +488,7 @@ class Site(object): | |||
cookiejar (which probably contains now-invalidated cookies) and try to | |||
save it, if it supports that sort of thing. | |||
""" | |||
params = {"action": "logout"} | |||
self._api_query(params) | |||
self.api_query(action="logout") | |||
self._cookiejar.clear() | |||
self._save_cookiejar() | |||
@@ -574,7 +574,8 @@ class Site(object): | |||
There is helpful MediaWiki API documentation at `MediaWiki.org | |||
<http://www.mediawiki.org/wiki/API>`_. | |||
""" | |||
return self._api_query(kwargs) | |||
with self._api_lock: | |||
return self._api_query(kwargs) | |||
def sql_query(self, query, params=(), plain_query=False, dict_cursor=False, | |||
cursor_class=None, show_table=False): | |||
@@ -105,9 +105,9 @@ class User(object): | |||
Normally, this is called by _get_attribute() when a requested attribute | |||
is not defined. This defines it. | |||
""" | |||
params = {"action": "query", "list": "users", "ususers": self._name, | |||
"usprop": "blockinfo|groups|rights|editcount|registration|emailable|gender"} | |||
result = self._site._api_query(params) | |||
props = "blockinfo|groups|rights|editcount|registration|emailable|gender" | |||
result = self._site.api_query(action="query", list="users", | |||
ususers=self._name, usprop=props) | |||
res = result["query"]["users"][0] | |||
# normalize our username in case it was entered oddly | |||