Additional IRC commands and bot tasks for EarwigBot https://en.wikipedia.org/wiki/User:EarwigBot
Non puoi selezionare più di 25 argomenti Gli argomenti devono iniziare con una lettera o un numero, possono includere trattini ('-') e possono essere lunghi fino a 35 caratteri.

721 righe
30 KiB

  1. # -*- coding: utf-8 -*-
  2. #
  3. # Copyright (C) 2009-2013 Ben Kurtovic <ben.kurtovic@verizon.net>
  4. #
  5. # Permission is hereby granted, free of charge, to any person obtaining a copy
  6. # of this software and associated documentation files (the "Software"), to deal
  7. # in the Software without restriction, including without limitation the rights
  8. # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  9. # copies of the Software, and to permit persons to whom the Software is
  10. # furnished to do so, subject to the following conditions:
  11. #
  12. # The above copyright notice and this permission notice shall be included in
  13. # all copies or substantial portions of the Software.
  14. #
  15. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16. # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17. # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  18. # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  19. # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  20. # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
  21. # SOFTWARE.
  22. from datetime import datetime
  23. import re
  24. from os.path import expanduser
  25. from threading import Lock
  26. from time import sleep
  27. import oursql
  28. from earwigbot import exceptions
  29. from earwigbot import wiki
  30. from earwigbot.tasks import Task
  31. class AFCStatistics(Task):
  32. """A task to generate statistics for WikiProject Articles for Creation.
  33. Statistics are stored in a MySQL database ("u_earwig_afc_statistics")
  34. accessed with oursql. Statistics are synchronied with the live database
  35. every four minutes and saved once an hour, on the hour, to self.pagename.
  36. In the live bot, this is "Template:AFC statistics".
  37. """
  38. name = "afc_statistics"
  39. number = 2
  40. # Chart status number constants:
  41. CHART_NONE = 0
  42. CHART_PEND = 1
  43. CHART_DRAFT = 2
  44. CHART_REVIEW = 3
  45. CHART_ACCEPT = 4
  46. CHART_DECLINE = 5
  47. CHART_MISPLACE = 6
  48. def setup(self):
  49. self.cfg = cfg = self.config.tasks.get(self.name, {})
  50. # Set some wiki-related attributes:
  51. self.pagename = cfg.get("page", "Template:AFC statistics")
  52. self.pending_cat = cfg.get("pending", "Pending AfC submissions")
  53. self.ignore_list = cfg.get("ignoreList", [])
  54. default_summary = "Updating statistics for [[WP:WPAFC|WikiProject Articles for creation]]."
  55. self.summary = self.make_summary(cfg.get("summary", default_summary))
  56. # Templates used in chart generation:
  57. templates = cfg.get("templates", {})
  58. self.tl_header = templates.get("header", "AFC statistics/header")
  59. self.tl_row = templates.get("row", "#invoke:AfC|row")
  60. self.tl_footer = templates.get("footer", "AFC statistics/footer")
  61. # Connection data for our SQL database:
  62. kwargs = cfg.get("sql", {})
  63. kwargs["read_default_file"] = expanduser("~/.my.cnf")
  64. self.conn_data = kwargs
  65. self.db_access_lock = Lock()
  66. def run(self, **kwargs):
  67. """Entry point for a task event.
  68. Depending on the kwargs passed, we will either synchronize our local
  69. statistics database with the site (self.sync()) or save it to the wiki
  70. (self.save()). We will additionally create an SQL connection with our
  71. local database.
  72. """
  73. action = kwargs.get("action")
  74. if not self.db_access_lock.acquire(False): # Non-blocking
  75. if action == "sync":
  76. self.logger.info("A sync is already ongoing; aborting")
  77. return
  78. self.logger.info("Waiting for database access lock")
  79. self.db_access_lock.acquire()
  80. try:
  81. self.site = self.bot.wiki.get_site()
  82. self.conn = oursql.connect(**self.conn_data)
  83. try:
  84. if action == "save":
  85. self.save(kwargs)
  86. elif action == "sync":
  87. self.sync(kwargs)
  88. elif action == "update":
  89. self.update(kwargs)
  90. finally:
  91. self.conn.close()
  92. finally:
  93. self.db_access_lock.release()
  94. def save(self, kwargs):
  95. """Save our local statistics to the wiki.
  96. After checking for emergency shutoff, the statistics chart is compiled,
  97. and then saved to self.pagename using self.summary iff it has changed
  98. since last save.
  99. """
  100. self.logger.info("Saving chart")
  101. if kwargs.get("fromIRC"):
  102. summary = self.summary + " (!earwigbot)"
  103. else:
  104. if self.shutoff_enabled():
  105. return
  106. summary = self.summary
  107. statistics = self.compile_charts()
  108. page = self.site.get_page(self.pagename)
  109. text = page.get()
  110. newtext = re.sub(u"<!-- stat begin -->(.*?)<!-- stat end -->",
  111. "<!-- stat begin -->\n" + statistics + "\n<!-- stat end -->",
  112. text, flags=re.DOTALL)
  113. if newtext == text:
  114. self.logger.info("Chart unchanged; not saving")
  115. return # Don't edit the page if we're not adding anything
  116. newtext = re.sub("<!-- sig begin -->(.*?)<!-- sig end -->",
  117. "<!-- sig begin -->~~~ at ~~~~~<!-- sig end -->",
  118. newtext)
  119. page.edit(newtext, summary, minor=True, bot=True)
  120. self.logger.info(u"Chart saved to [[{0}]]".format(page.title))
  121. def compile_charts(self):
  122. """Compile and return all statistics information from our local db."""
  123. stats = ""
  124. with self.conn.cursor() as cursor:
  125. cursor.execute("SELECT * FROM chart")
  126. for chart in cursor:
  127. stats += self.compile_chart(chart) + "\n"
  128. return stats[:-1] # Drop the last newline
  129. def compile_chart(self, chart_info):
  130. """Compile and return a single statistics chart."""
  131. chart_id, chart_title, special_title = chart_info
  132. chart = self.tl_header + "|" + chart_title
  133. if special_title:
  134. chart += "|" + special_title
  135. chart = "{{" + chart + "}}"
  136. query = "SELECT * FROM page JOIN row ON page_id = row_id WHERE row_chart = ?"
  137. with self.conn.cursor(oursql.DictCursor) as cursor:
  138. cursor.execute(query, (chart_id,))
  139. for page in cursor:
  140. chart += "\n" + self.compile_chart_row(page)
  141. chart += "\n{{" + self.tl_footer + "}}"
  142. return chart
  143. def compile_chart_row(self, page):
  144. """Compile and return a single chart row.
  145. 'page' is a dict of page information, taken as a row from the page
  146. table, where keys are column names and values are their cell contents.
  147. """
  148. row = u"{0}|s={page_status}|t={page_title}|z={page_size}|"
  149. if page["page_special_oldid"]:
  150. row += "sr={page_special_user}|sd={page_special_time}|si={page_special_oldid}|"
  151. row += "mr={page_modify_user}|md={page_modify_time}|mi={page_modify_oldid}"
  152. page["page_special_time"] = self.format_time(page["page_special_time"])
  153. page["page_modify_time"] = self.format_time(page["page_modify_time"])
  154. if page["page_notes"]:
  155. row += "|n=1{page_notes}"
  156. return "{{" + row.format(self.tl_row, **page) + "}}"
  157. def format_time(self, dt):
  158. """Format a datetime into the standard MediaWiki timestamp format."""
  159. return dt.strftime("%H:%M, %d %b %Y")
  160. def sync(self, kwargs):
  161. """Synchronize our local statistics database with the site.
  162. Syncing involves, in order, updating tracked submissions that have
  163. been changed since last sync (self.update_tracked()), adding pending
  164. submissions that are not tracked (self.add_untracked()), and removing
  165. old submissions from the database (self.delete_old()).
  166. The sync will be canceled if SQL replication lag is greater than 600
  167. seconds, because this will lead to potential problems and outdated
  168. data, not to mention putting demand on an already overloaded server.
  169. Giving sync the kwarg "ignore_replag" will go around this restriction.
  170. """
  171. self.logger.info("Starting sync")
  172. replag = self.site.get_replag()
  173. self.logger.debug("Server replag is {0}".format(replag))
  174. if replag > 600 and not kwargs.get("ignore_replag"):
  175. msg = "Sync canceled as replag ({0} secs) is greater than ten minutes"
  176. self.logger.warn(msg.format(replag))
  177. return
  178. with self.conn.cursor() as cursor:
  179. self.update_tracked(cursor)
  180. self.add_untracked(cursor)
  181. self.delete_old(cursor)
  182. self.logger.info("Sync completed")
  183. def update_tracked(self, cursor):
  184. """Update tracked submissions that have been changed since last sync.
  185. This is done by iterating through every page in our database and
  186. comparing our stored latest revision ID with the actual latest revision
  187. ID from an SQL query. If they differ, we will update our information
  188. about the page (self.update_page()).
  189. If the page does not exist, we will remove it from our database with
  190. self.untrack_page().
  191. """
  192. self.logger.debug("Updating tracked submissions")
  193. query = """SELECT s.page_id, s.page_title, s.page_modify_oldid,
  194. r.page_latest, r.page_title, r.page_namespace FROM page AS s
  195. LEFT JOIN {0}_p.page AS r ON s.page_id = r.page_id"""
  196. cursor.execute(query.format(self.site.name))
  197. for pageid, title, oldid, real_oldid, real_title, real_ns in cursor:
  198. if not real_oldid:
  199. self.untrack_page(cursor, pageid)
  200. continue
  201. if oldid != real_oldid:
  202. msg = u"Updating page [[{0}]] (id: {1}) @ {2}"
  203. self.logger.debug(msg.format(title, pageid, oldid))
  204. self.logger.debug(" {0} -> {1}".format(oldid, real_oldid))
  205. real_title = real_title.decode("utf8").replace("_", " ")
  206. ns = self.site.namespace_id_to_name(real_ns)
  207. if ns:
  208. real_title = u":".join((ns, real_title))
  209. try:
  210. self.update_page(cursor, pageid, real_title)
  211. except Exception:
  212. e = u"Error updating page [[{0}]] (id: {1})"
  213. self.logger.exception(e.format(real_title, pageid))
  214. def add_untracked(self, cursor):
  215. """Add pending submissions that are not yet tracked.
  216. This is done by compiling a list of all currently tracked submissions
  217. and iterating through all members of self.pending_cat via SQL. If a
  218. page in the pending category is not tracked and is not in
  219. self.ignore_list, we will track it with self.track_page().
  220. """
  221. self.logger.debug("Adding untracked pending submissions")
  222. cursor.execute("SELECT page_id FROM page")
  223. tracked = [i[0] for i in cursor.fetchall()]
  224. category = self.site.get_category(self.pending_cat)
  225. for page in category.get_members():
  226. title, pageid = page.title, page.pageid
  227. if title in self.ignore_list:
  228. continue
  229. if pageid not in tracked:
  230. msg = u"Tracking page [[{0}]] (id: {1})".format(title, pageid)
  231. self.logger.debug(msg)
  232. try:
  233. self.track_page(cursor, pageid, title)
  234. except Exception:
  235. e = u"Error tracking page [[{0}]] (id: {1})"
  236. self.logger.exception(e.format(title, pageid))
  237. def delete_old(self, cursor):
  238. """Remove old submissions from the database.
  239. "Old" is defined as a submission that has been declined or accepted
  240. more than 36 hours ago. Pending submissions cannot be "old".
  241. """
  242. self.logger.debug("Removing old submissions from chart")
  243. query = """DELETE FROM page, row USING page JOIN row
  244. ON page_id = row_id WHERE row_chart IN (?, ?)
  245. AND ADDTIME(page_special_time, '36:00:00') < NOW()"""
  246. cursor.execute(query, (self.CHART_ACCEPT, self.CHART_DECLINE))
  247. def update(self, kwargs):
  248. """Update a page by name, regardless of whether anything has changed.
  249. Mainly intended as a command to be used via IRC, e.g.:
  250. !tasks start afc_statistics action=update page=Foobar
  251. """
  252. title = kwargs.get("page")
  253. if not title:
  254. return
  255. title = title.replace("_", " ").decode("utf8")
  256. query = "SELECT page_id, page_modify_oldid FROM page WHERE page_title = ?"
  257. with self.conn.cursor() as cursor:
  258. cursor.execute(query, (title,))
  259. try:
  260. pageid, oldid = cursor.fetchall()[0]
  261. except IndexError:
  262. msg = u"Page [[{0}]] not found in database".format(title)
  263. self.logger.error(msg)
  264. msg = u"Updating page [[{0}]] (id: {1}) @ {2}"
  265. self.logger.info(msg.format(title, pageid, oldid))
  266. self.update_page(cursor, pageid, title)
  267. def untrack_page(self, cursor, pageid):
  268. """Remove a page, given by ID, from our database."""
  269. self.logger.debug("Untracking page (id: {0})".format(pageid))
  270. query = """DELETE FROM page, row USING page JOIN row
  271. ON page_id = row_id WHERE page_id = ?"""
  272. cursor.execute(query, (pageid,))
  273. def track_page(self, cursor, pageid, title):
  274. """Update hook for when page is not in our database.
  275. A variety of SQL queries are used to gather information about the page,
  276. which is then saved to our database.
  277. """
  278. content = self.get_content(title)
  279. if content is None:
  280. msg = u"Could not get page content for [[{0}]]".format(title)
  281. self.logger.error(msg)
  282. return
  283. namespace = self.site.get_page(title).namespace
  284. status, chart = self.get_status_and_chart(content, namespace)
  285. if chart == self.CHART_NONE:
  286. msg = u"Could not find a status for [[{0}]]".format(title)
  287. self.logger.warn(msg)
  288. return
  289. size = self.get_size(content)
  290. m_user, m_time, m_id = self.get_modify(pageid)
  291. s_user, s_time, s_id = self.get_special(pageid, chart)
  292. notes = self.get_notes(chart, content, m_time, s_user)
  293. query1 = "INSERT INTO row VALUES (?, ?)"
  294. query2 = "INSERT INTO page VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
  295. cursor.execute(query1, (pageid, chart))
  296. cursor.execute(query2, (pageid, status, title, size, notes, m_user,
  297. m_time, m_id, s_user, s_time, s_id))
  298. def update_page(self, cursor, pageid, title):
  299. """Update hook for when page is already in our database.
  300. A variety of SQL queries are used to gather information about the page,
  301. which is compared against our stored information. Differing information
  302. is then updated.
  303. """
  304. content = self.get_content(title)
  305. if content is None:
  306. msg = u"Could not get page content for [[{0}]]".format(title)
  307. self.logger.error(msg)
  308. return
  309. namespace = self.site.get_page(title).namespace
  310. status, chart = self.get_status_and_chart(content, namespace)
  311. if chart == self.CHART_NONE:
  312. self.untrack_page(cursor, pageid)
  313. return
  314. query = "SELECT * FROM page JOIN row ON page_id = row_id WHERE page_id = ?"
  315. with self.conn.cursor(oursql.DictCursor) as dict_cursor:
  316. dict_cursor.execute(query, (pageid,))
  317. result = dict_cursor.fetchall()[0]
  318. size = self.get_size(content)
  319. m_user, m_time, m_id = self.get_modify(pageid)
  320. if title != result["page_title"]:
  321. self.update_page_title(cursor, result, pageid, title)
  322. if m_id != result["page_modify_oldid"]:
  323. self.update_page_modify(cursor, result, pageid, size, m_user,
  324. m_time, m_id)
  325. if status != result["page_status"]:
  326. special = self.update_page_status(cursor, result, pageid, status,
  327. chart)
  328. s_user = special[0]
  329. else:
  330. s_user = result["page_special_user"]
  331. notes = self.get_notes(chart, content, m_time, s_user)
  332. if notes != result["page_notes"]:
  333. self.update_page_notes(cursor, result, pageid, notes)
  334. def update_page_title(self, cursor, result, pageid, title):
  335. """Update the title of a page in our database."""
  336. query = "UPDATE page SET page_title = ? WHERE page_id = ?"
  337. cursor.execute(query, (title, pageid))
  338. msg = u" {0}: title: {1} -> {2}"
  339. self.logger.debug(msg.format(pageid, result["page_title"], title))
  340. def update_page_modify(self, cursor, result, pageid, size, m_user, m_time, m_id):
  341. """Update the last modified information of a page in our database."""
  342. query = """UPDATE page SET page_size = ?, page_modify_user = ?,
  343. page_modify_time = ?, page_modify_oldid = ?
  344. WHERE page_id = ?"""
  345. cursor.execute(query, (size, m_user, m_time, m_id, pageid))
  346. msg = u" {0}: modify: {1} / {2} / {3} -> {4} / {5} / {6}"
  347. msg = msg.format(pageid, result["page_modify_user"],
  348. result["page_modify_time"],
  349. result["page_modify_oldid"], m_user, m_time, m_id)
  350. self.logger.debug(msg)
  351. def update_page_status(self, cursor, result, pageid, status, chart):
  352. """Update the status and "specialed" information of a page."""
  353. query1 = """UPDATE page JOIN row ON page_id = row_id
  354. SET page_status = ?, row_chart = ? WHERE page_id = ?"""
  355. query2 = """UPDATE page SET page_special_user = ?,
  356. page_special_time = ?, page_special_oldid = ?
  357. WHERE page_id = ?"""
  358. cursor.execute(query1, (status, chart, pageid))
  359. msg = " {0}: status: {1} ({2}) -> {3} ({4})"
  360. self.logger.debug(msg.format(pageid, result["page_status"],
  361. result["row_chart"], status, chart))
  362. s_user, s_time, s_id = self.get_special(pageid, chart)
  363. if s_id != result["page_special_oldid"]:
  364. cursor.execute(query2, (s_user, s_time, s_id, pageid))
  365. msg = u"{0}: special: {1} / {2} / {3} -> {4} / {5} / {6}"
  366. msg = msg.format(pageid, result["page_special_user"],
  367. result["page_special_time"],
  368. result["page_special_oldid"], s_user, s_time, s_id)
  369. self.logger.debug(msg)
  370. return s_user, s_time, s_id
  371. def update_page_notes(self, cursor, result, pageid, notes):
  372. """Update the notes (or warnings) of a page in our database."""
  373. query = "UPDATE page SET page_notes = ? WHERE page_id = ?"
  374. cursor.execute(query, (notes, pageid))
  375. msg = " {0}: notes: {1} -> {2}"
  376. self.logger.debug(msg.format(pageid, result["page_notes"], notes))
  377. def get_content(self, title):
  378. """Get the current content of a page by title from the API.
  379. The page's current revision ID is retrieved from SQL, and then
  380. an API query is made to get its content. This is the only API query
  381. used in the task's code.
  382. """
  383. query = "SELECT page_latest FROM page WHERE page_title = ? AND page_namespace = ?"
  384. try:
  385. namespace, base = title.split(":", 1)
  386. except ValueError:
  387. base = title
  388. ns = wiki.NS_MAIN
  389. else:
  390. try:
  391. ns = self.site.namespace_name_to_id(namespace)
  392. except exceptions.NamespaceNotFoundError:
  393. base = title
  394. ns = wiki.NS_MAIN
  395. result = self.site.sql_query(query, (base.replace(" ", "_"), ns))
  396. try:
  397. revid = int(list(result)[0][0])
  398. except IndexError:
  399. return None
  400. return self.get_revision_content(revid)
  401. def get_revision_content(self, revid, tries=1):
  402. """Get the content of a revision by ID from the API."""
  403. res = self.site.api_query(action="query", prop="revisions",
  404. revids=revid, rvprop="content")
  405. try:
  406. return res["query"]["pages"].values()[0]["revisions"][0]["*"]
  407. except KeyError:
  408. if tries > 0:
  409. sleep(5)
  410. return self.get_revision_content(revid, tries=tries - 1)
  411. def get_status_and_chart(self, content, namespace):
  412. """Determine the status and chart number of an AFC submission.
  413. The methodology used here is the same one I've been using for years
  414. (see also commands.afc_report), but with the new draft system taken
  415. into account. The order here is important: if there is more than one
  416. {{AFC submission}} template on a page, we need to know which one to
  417. use (revision history search to find the most recent isn't a viable
  418. idea :P).
  419. """
  420. statuses = self.get_statuses(content)
  421. if "R" in statuses:
  422. status, chart = "r", self.CHART_REVIEW
  423. elif "H" in statuses:
  424. status, chart = "p", self.CHART_DRAFT
  425. elif "P" in statuses:
  426. status, chart = "p", self.CHART_PEND
  427. elif "T" in statuses:
  428. status, chart = None, self.CHART_NONE
  429. elif "D" in statuses:
  430. status, chart = "d", self.CHART_DECLINE
  431. else:
  432. status, chart = None, self.CHART_NONE
  433. if namespace == wiki.NS_MAIN:
  434. if not statuses:
  435. status, chart = "a", self.CHART_ACCEPT
  436. else:
  437. status, chart = None, self.CHART_MISPLACE
  438. return status, chart
  439. def get_statuses(self, content):
  440. """Return a list of all AFC submission statuses in a page's text."""
  441. re_has_templates = "\{\{[aA][fF][cC] submission\s*(\}\}|\||/)"
  442. re_template = "\{\{[aA][fF][cC] submission\s*(.*?)\}\}"
  443. re_remove_embed = "(\{\{[aA][fF][cC] submission\s*(.*?))\{\{(.*?)\}\}(.*?)\}\}"
  444. valid = ["R", "H", "P", "T", "D"]
  445. subtemps = {
  446. "/reviewing": "R",
  447. "/onhold": "H",
  448. "/pending": "P",
  449. "/draft": "T",
  450. "/declined": "D"
  451. }
  452. statuses = []
  453. while re.search(re_has_templates, content):
  454. status = "P"
  455. match = re.search(re_template, content, re.S)
  456. if not match:
  457. return statuses
  458. temp = match.group(1)
  459. limit = 0
  460. while "{{" in temp and limit < 50:
  461. content = re.sub(re_remove_embed, "\\1\\4}}", content, 1, re.S)
  462. match = re.search(re_template, content, re.S)
  463. temp = match.group(1)
  464. limit += 1
  465. params = temp.split("|")
  466. try:
  467. subtemp, params = params[0].strip(), params[1:]
  468. except IndexError:
  469. status = "P"
  470. params = []
  471. else:
  472. if subtemp:
  473. status = subtemps.get(subtemp)
  474. params = []
  475. for param in params:
  476. param = param.strip().upper()
  477. if "=" in param:
  478. key, value = param.split("=", 1)
  479. if key.strip() == "1":
  480. status = value if value in valid else "P"
  481. break
  482. else:
  483. status = param if param in valid else "P"
  484. break
  485. statuses.append(status)
  486. content = re.sub(re_template, "", content, 1, re.S)
  487. return statuses
  488. def get_size(self, content):
  489. """Return a page's size in a short, pretty format."""
  490. return "{0} kB".format(round(len(content) / 1000.0, 1))
  491. def get_modify(self, pageid):
  492. """Return information about a page's last edit ("modification").
  493. This consists of the most recent editor, modification time, and the
  494. lastest revision ID.
  495. """
  496. query = """SELECT rev_user_text, rev_timestamp, rev_id FROM revision
  497. JOIN page ON rev_id = page_latest WHERE page_id = ?"""
  498. result = self.site.sql_query(query, (pageid,))
  499. m_user, m_time, m_id = list(result)[0]
  500. timestamp = datetime.strptime(m_time, "%Y%m%d%H%M%S")
  501. return m_user.decode("utf8"), timestamp, m_id
  502. def get_special(self, pageid, chart):
  503. """Return information about a page's "special" edit.
  504. I tend to use the term "special" as a verb a lot, which is bound to
  505. cause confusion. It is merely a short way of saying "the edit in which
  506. a declined submission was declined, an accepted submission was
  507. accepted, a submission in review was set as such, a pending submission
  508. was submitted, and a "misplaced" submission was created."
  509. This "information" consists of the special edit's editor, its time, and
  510. its revision ID. If the page's status is not something that involves
  511. "special"-ing, we will return None for all three. The same will be
  512. returned if we cannot determine when the page was "special"-ed, or if
  513. it was "special"-ed more than 100 edits ago.
  514. """
  515. if chart == self.CHART_NONE:
  516. return None, None, None
  517. elif chart == self.CHART_MISPLACE:
  518. return self.get_create(pageid)
  519. elif chart == self.CHART_ACCEPT:
  520. search_for = None
  521. search_not = ["R", "H", "P", "T", "D"]
  522. elif chart == self.CHART_DRAFT:
  523. search_for = "H"
  524. search_not = []
  525. elif chart == self.CHART_PEND:
  526. search_for = "P"
  527. search_not = []
  528. elif chart == self.CHART_REVIEW:
  529. search_for = "R"
  530. search_not = []
  531. elif chart == self.CHART_DECLINE:
  532. search_for = "D"
  533. search_not = ["R", "H", "P", "T"]
  534. query = """SELECT rev_user_text, rev_timestamp, rev_id
  535. FROM revision WHERE rev_page = ? ORDER BY rev_id DESC"""
  536. result = self.site.sql_query(query, (pageid,))
  537. counter = 0
  538. last = (None, None, None)
  539. for user, ts, revid in result:
  540. counter += 1
  541. if counter > 50:
  542. msg = "Exceeded 50 content lookups while determining special for page (id: {0}, chart: {1})"
  543. self.logger.warn(msg.format(pageid, chart))
  544. return None, None, None
  545. try:
  546. content = self.get_revision_content(revid)
  547. except exceptions.APIError:
  548. msg = "API error interrupted SQL query in get_special() for page (id: {0}, chart: {1})"
  549. self.logger.exception(msg.format(pageid, chart))
  550. return None, None, None
  551. statuses = self.get_statuses(content)
  552. matches = [s in statuses for s in search_not]
  553. if search_for:
  554. if search_for not in statuses or any(matches):
  555. return last
  556. else:
  557. if any(matches):
  558. return last
  559. timestamp = datetime.strptime(ts, "%Y%m%d%H%M%S")
  560. last = (user.decode("utf8"), timestamp, revid)
  561. return last
  562. def get_create(self, pageid):
  563. """Return information about a page's first edit ("creation").
  564. This consists of the page creator, creation time, and the earliest
  565. revision ID.
  566. """
  567. query = """SELECT rev_user_text, rev_timestamp, rev_id
  568. FROM revision WHERE rev_id =
  569. (SELECT MIN(rev_id) FROM revision WHERE rev_page = ?)"""
  570. result = self.site.sql_query(query, (pageid,))
  571. c_user, c_time, c_id = list(result)[0]
  572. timestamp = datetime.strptime(c_time, "%Y%m%d%H%M%S")
  573. return c_user.decode("utf8"), timestamp, c_id
  574. def get_notes(self, chart, content, m_time, s_user):
  575. """Return any special notes or warnings about this page.
  576. copyvio: submission is a suspected copyright violation
  577. unsourced: submission lacks references completely
  578. no-inline: submission has no inline citations
  579. short: submission is less than a kilobyte in length
  580. resubmit: submission was resubmitted after a previous decline
  581. old: submission has not been touched in > 4 days
  582. blocked: submitter is currently blocked
  583. """
  584. notes = ""
  585. ignored_charts = [self.CHART_NONE, self.CHART_ACCEPT, self.CHART_DECLINE]
  586. if chart in ignored_charts:
  587. return notes
  588. copyvios = self.config.tasks.get("afc_copyvios", {})
  589. regex = "\{\{\s*" + copyvios.get("template", "AfC suspected copyvio")
  590. if re.search(regex, content):
  591. notes += "|nc=1" # Submission is a suspected copyvio
  592. if not re.search("\<ref\s*(.*?)\>(.*?)\</ref\>", content, re.I | re.S):
  593. regex = "(https?:)|\[//(?!{0})([^ \]\\t\\n\\r\\f\\v]+?)"
  594. sitedomain = re.escape(self.site.domain)
  595. if re.search(regex.format(sitedomain), content, re.I | re.S):
  596. notes += "|ni=1" # Submission has no inline citations
  597. else:
  598. notes += "|nu=1" # Submission is completely unsourced
  599. if len(content) < 1000:
  600. notes += "|ns=1" # Submission is short
  601. statuses = self.get_statuses(content)
  602. if "D" in statuses and chart != self.CHART_MISPLACE:
  603. notes += "|nr=1" # Submission was resubmitted
  604. time_since_modify = (datetime.utcnow() - m_time).total_seconds()
  605. max_time = 4 * 24 * 60 * 60
  606. if time_since_modify > max_time:
  607. notes += "|no=1" # Submission hasn't been touched in over 4 days
  608. if chart in [self.CHART_PEND, self.CHART_DRAFT] and s_user:
  609. submitter = self.site.get_user(s_user)
  610. try:
  611. if submitter.blockinfo:
  612. notes += "|nb=1" # Submitter is blocked
  613. except exceptions.UserNotFoundError: # Likely an IP
  614. pass
  615. return notes