A Python robot that edits Wikipedia and interacts with people over IRC https://en.wikipedia.org/wiki/User:EarwigBot
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

afc_statistics.py 23 KiB

пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
пре 13 година
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535
  1. # -*- coding: utf-8 -*-
  2. from datetime import datetime
  3. import logging
  4. import re
  5. from os.path import expanduser
  6. from threading import Lock
  7. import oursql
  8. from classes import BaseTask
  9. import config
  10. import wiki
  11. # Chart status number constants:
  12. CHART_NONE = 0
  13. CHART_PEND = 1
  14. CHART_DRAFT = 2
  15. CHART_REVIEW = 3
  16. CHART_ACCEPT = 4
  17. CHART_DECLINE = 5
  18. class Task(BaseTask):
  19. """A task to generate statistics for WikiProject Articles for Creation.
  20. Statistics are stored in a MySQL database ("u_earwig_afc_statistics")
  21. accessed with oursql. Statistics are synchronied with the live database
  22. every four minutes and saved once an hour, on the hour, to self.pagename.
  23. In the live bot, this is "Template:AFC statistics".
  24. """
  25. name = "afc_statistics"
  26. number = 2
  27. def __init__(self):
  28. self.cfg = cfg = config.tasks.get(self.name, {})
  29. # Set some wiki-related attributes:
  30. self.pagename = cfg.get("page", "Template:AFC statistics")
  31. self.pending_cat = cfg.get("pending", "Pending AfC submissions")
  32. self.ignore_list = cfg.get("ignore_list", [])
  33. default_summary = "Updating statistics for [[WP:WPAFC|WikiProject Articles for creation]]."
  34. self.summary = self.make_summary(cfg.get("summary", default_summary))
  35. # Templates used in chart generation:
  36. templates = cfg.get("templates", {})
  37. self.tl_header = templates.get("header", "AFC statistics/header")
  38. self.tl_row = templates.get("row", "AFC statistics/row")
  39. self.tl_footer = templates.get("footer", "AFC statistics/footer")
  40. # Connection data for our SQL database:
  41. kwargs = cfg.get("sql", {})
  42. kwargs["read_default_file"] = expanduser("~/.my.cnf")
  43. self.conn_data = kwargs
  44. self.db_access_lock = Lock()
  45. def run(self, **kwargs):
  46. """Entry point for a task event.
  47. Depending on the kwargs passed, we will either synchronize our local
  48. statistics database with the site (self.sync()) or save it to the wiki
  49. (self.save()). We will additionally create an SQL connection with our
  50. local database.
  51. """
  52. self.site = wiki.get_site()
  53. self.conn = oursql.connect(**self.conn_data)
  54. action = kwargs.get("action")
  55. try:
  56. if action == "save":
  57. self.save(**kwargs)
  58. elif action == "sync":
  59. self.sync(**kwargs)
  60. finally:
  61. self.conn.close()
  62. def save(self, **kwargs):
  63. """Save our local statistics to the wiki.
  64. After checking for emergency shutoff, the statistics chart is compiled,
  65. and then saved to self.pagename using self.summary iff it has changed
  66. since last save.
  67. """
  68. self.logger.info("Saving chart")
  69. if kwargs.get("fromIRC"):
  70. summary = " ".join((self.summary, "(!earwigbot)"))
  71. else:
  72. if self.shutoff_enabled():
  73. return
  74. summary = self.summary
  75. statistics = self.compile_charts()
  76. page = self.site.get_page(self.pagename)
  77. text = page.get()
  78. newtext = re.sub("(<!-- stat begin -->)(.*?)(<!-- stat end -->)",
  79. statistics.join(("\\1\n", "\n\\3")), text,
  80. flags=re.DOTALL)
  81. if newtext == text:
  82. self.logger.info("Chart unchanged; not saving")
  83. return # Don't edit the page if we're not adding anything
  84. newtext = re.sub("(<!-- sig begin -->)(.*?)(<!-- sig end -->)",
  85. "\\1~~~ at ~~~~~\\3", newtext)
  86. page.edit(newtext, summary, minor=True, bot=True)
  87. self.logger.info("Chart saved to [[{0}]]".format(page.title()))
  88. def compile_charts(self):
  89. """Compile and return all statistics information from our local db."""
  90. stats = ""
  91. with self.conn.cursor() as cursor, self.db_access_lock:
  92. cursor.execute("SELECT * FROM chart")
  93. for chart in cursor:
  94. stats += self.compile_chart(chart) + "\n"
  95. return stats[:-1] # Drop the last newline
  96. def compile_chart(self, chart_info):
  97. """Compile and return a single statistics chart."""
  98. chart_id, chart_title, special_title = chart_info
  99. chart = "|".join((self.tl_header, chart_title))
  100. if special_title:
  101. chart += "".join(("|", special_title))
  102. chart = "".join(("{{", chart, "}}"))
  103. query = "SELECT * FROM page JOIN row ON page_id = row_id WHERE row_chart = ?"
  104. with self.conn.cursor(oursql.DictCursor) as cursor:
  105. cursor.execute(query, (chart_id,))
  106. for page in cursor:
  107. chart += "\n" + self.compile_chart_row(page)
  108. chart += "".join(("\n{{", self.tl_footer, "}}"))
  109. return chart
  110. def compile_chart_row(self, page):
  111. """Compile and return a single chart row.
  112. 'page' is a dict of page information, taken as a row from the page
  113. table, where keys are column names and values are their cell contents.
  114. """
  115. row = "{0}|s={page_status}|t={page_title}|h={page_short}|z={page_size}|"
  116. row += "cr={page_create_user}|cd={page_create_time}|ci={page_create_oldid}|"
  117. row += "mr={page_modify_user}|md={page_modify_time}|mi={page_modify_oldid}|"
  118. page["page_create_time"] = self.format_time(page["page_create_time"])
  119. page["page_modify_time"] = self.format_time(page["page_modify_time"])
  120. if page["page_special_user"]:
  121. row += "sr={page_special_user}|sd={page_special_time}|si={page_special_oldid}|"
  122. page["page_special_time"] = self.format_time(page["page_special_time"])
  123. if page["page_notes"]:
  124. row += "n=1{page_notes}"
  125. return "".join(("{{", row.format(self.tl_row, **page), "}}"))
  126. def format_time(self, timestamp):
  127. """Format a datetime into the standard MediaWiki timestamp format."""
  128. return timestamp.strftime("%H:%M, %d %B %Y")
  129. def sync(self, **kwargs):
  130. """Synchronize our local statistics database with the site.
  131. Syncing involves, in order, updating tracked submissions that have
  132. been changed since last sync (self.update_tracked()), adding pending
  133. submissions that are not tracked (self.add_untracked()), and removing
  134. old submissions from the database (self.delete_old()).
  135. The sync will be canceled if SQL replication lag is greater than 600
  136. seconds, because this will lead to potential problems and outdated
  137. data, not to mention putting demand on an already overloaded server.
  138. Giving sync the kwarg "ignore_replag" will go around this restriction.
  139. """
  140. self.logger.info("Starting sync")
  141. replag = self.site.get_replag()
  142. self.logger.debug("Server replag is {0}".format(replag))
  143. if replag > 600 and not kwargs.get("ignore_replag"):
  144. msg = "Sync canceled as replag ({0} secs) is greater than ten minutes."
  145. self.logger.warn(msg.format(replag))
  146. with self.conn.cursor() as cursor, self.db_access_lock:
  147. self.update_tracked(cursor)
  148. self.add_untracked(cursor)
  149. self.delete_old(cursor)
  150. self.logger.info("Sync completed")
  151. def update_tracked(self, cursor):
  152. """Update tracked submissions that have been changed since last sync.
  153. This is done by iterating through every page in our database and
  154. comparing our stored latest revision ID with the actual latest revision
  155. ID from an SQL query. If they differ, we will update our information
  156. about the page (self.update_page()).
  157. If the page does not exist, we will remove it from our database with
  158. self.untrack_page().
  159. """
  160. self.logger.debug("Updating tracked submissions")
  161. query1 = "SELECT page_id, page_title, page_modify_oldid FROM page"
  162. query2 = """SELECT page_latest, page_title, page_namespace FROM page
  163. WHERE page_id = ?"""
  164. cursor.execute(query1)
  165. for pageid, title, oldid in cursor:
  166. msg = "Updating page [[{0}]] (id: {1}) @ {2}"
  167. self.logger.debug(msg.format(pageid, title, oldid))
  168. result = list(self.site.sql_query(query2, (pageid,)))
  169. if not result:
  170. self.untrack_page(cursor, pageid)
  171. continue
  172. real_oldid = result[0][0]
  173. if oldid != real_oldid:
  174. self.logger.debug(" {0} -> {1}".format(oldid, real_oldid))
  175. body = result[0][1].replace("_", " ")
  176. ns = self.site.namespace_id_to_name(result[0][2])
  177. real_title = ":".join(ns, body)
  178. self.update_page(cursor, pageid, real_title)
  179. def add_untracked(self, cursor):
  180. """Add pending submissions that are not yet tracked.
  181. This is done by compiling a list of all currently tracked submissions
  182. and iterating through all members of self.pending_cat via SQL. If a
  183. page in the pending category is not tracked and is not in
  184. self.ignore_list, we will track it with self.track_page().
  185. """
  186. self.logger.debug("Adding untracked pending submissions")
  187. cursor.execute("SELECT page_id FROM page")
  188. tracked = [i[0] for i in cursor.fetchall()]
  189. category = self.site.get_category(self.pending_cat)
  190. pending = category.members(use_sql=True)
  191. for title, pageid in pending:
  192. if title in self.ignore_list:
  193. continue
  194. if pageid not in tracked:
  195. msg = "Tracking page [[{0}]] (id: {1})".format(title, pageid)
  196. self.logger.debug(msg)
  197. self.track_page(cursor, pageid, title)
  198. def delete_old(self, cursor):
  199. """Remove old submissions from the database.
  200. "Old" is defined as a submission that has been declined or accepted
  201. more than 36 hours ago. Pending submissions cannot be "old".
  202. """
  203. self.logger.debug("Removing old submissions from chart")
  204. query = """DELETE FROM page, row USING page JOIN row
  205. ON page_id = row_id WHERE row_chart IN ?
  206. AND ADDTIME(page_special_time, '36:00:00') < NOW()"""
  207. old_charts = (CHART_ACCEPT, CHART_DECLINE)
  208. cursor.execute(query, (old_charts,))
  209. def untrack_page(self, cursor, pageid):
  210. """Remove a page, given by ID, from our database."""
  211. self.logger.debug("Untracking page (id: {0})".format(pageid))
  212. query = """DELETE FROM page, row USING page JOIN row
  213. ON page_id = row_id WHERE page_id = ?"""
  214. cursor.execute(query, (pageid,))
  215. def track_page(self, cursor, pageid, title):
  216. """Update hook for when page is not in our database.
  217. A variety of SQL queries are used to gather information about the page,
  218. which are then saved to our database.
  219. """
  220. content = self.get_content(title)
  221. status, chart = self.get_status_and_chart(content)
  222. if not status:
  223. msg = "Could not find a status for [[{0}]]".format(title)
  224. self.logger.error(msg)
  225. return
  226. short = self.get_short_title(title)
  227. size = len(content)
  228. notes = self.get_notes(pageid)
  229. c_user, c_time, c_id = self.get_create(pageid)
  230. m_user, m_time, m_id = self.get_modify(pageid)
  231. s_user, s_time, s_id = self.get_special(pageid, chart)
  232. query1 = "INSERT INTO row VALUES ?"
  233. query2 = "INSERT INTO page VALUES ?"
  234. cursor.execute(query1, ((pageid, chart),))
  235. cursor.execute(query2, ((pageid, status, title, short, size, notes,
  236. c_user, c_time, c_id, m_user, m_time, m_id,
  237. s_user, s_time, s_id),))
  238. def update_page(self, cursor, pageid, title):
  239. """Update hook for when page is already in our database.
  240. A variety of SQL queries are used to gather information about the page,
  241. which is compared against our stored information. Differing information
  242. is then updated.
  243. If our page is now a redirect, we will determine the namespace it was
  244. moved to. If it was moved to the mainspace or template space, we will
  245. set the sub's status as accepted. If it was to the Project: or Project
  246. talk: namespaces, we'll merely update our stored title (this is likely
  247. to occur if a submission was moved from the userspace to the project
  248. space). If it was moved to another namespace, something unusual has
  249. happened, and we'll untrack the submission.
  250. """
  251. content = self.get_content(title)
  252. try:
  253. redirect_regex = wiki.Page.re_redirect
  254. target_title = re.findall(redirect_regex, content, flags=re.I)[0]
  255. except IndexError:
  256. status, chart = self.get_status_and_chart(content)
  257. if not status:
  258. self.untrack_page(cursor, pageid)
  259. return
  260. else:
  261. target_ns = self.site.get_page(target_title).namespace()
  262. if target_ns in [wiki.NS_MAIN, wiki.NS_TEMPLATE]:
  263. status, chart = "accept", CHART_ACCEPT
  264. elif target_ns in [wiki.NS_PROJECT, wiki.NS_PROJECT_TALK]:
  265. title = target_title
  266. content = self.get_content(title)
  267. status, chart = self.get_status_and_chart(content)
  268. if not status:
  269. self.untrack_page(cursor, pageid)
  270. return
  271. else:
  272. msg = " Page has moved to namespace {0}".format(target_ns)
  273. self.logger.debug(msg)
  274. self.untrack_page(cursor, pageid)
  275. return
  276. size = len(content)
  277. notes = self.get_notes(pageid)
  278. m_user, m_time, m_id = self.get_modify(pageid)
  279. query = "SELECT * FROM page JOIN row ON page_id = row_id WHERE page_id = ?"
  280. with self.conn.cursor(oursql.DictCursor) as dict_cursor:
  281. dict_cursor.execute(query, (pageid,))
  282. result = dict_cursor.fetchall()[0]
  283. if title != result["page_title"]:
  284. self.update_page_title(cursor, result, pageid, title)
  285. if m_id != result["page_modify_oldid"]:
  286. self.update_page_modify(cursor, result, pageid, size, m_user, m_time, m_id)
  287. if status != result["page_status"]:
  288. self.update_page_status(cursor, result, pageid, status, chart, page)
  289. if notes != result["page_notes"]:
  290. self.update_page_notes(cursor, result, pageid, notes)
  291. def update_page_title(self, cursor, result, pageid, title):
  292. """Update the title and short_title of a page in our database."""
  293. query = "UPDATE page SET page_title = ?, page_short = ? WHERE page_id = ?"
  294. short = self.get_short_title(title)
  295. cursor.execute(query, (title, short, pageid))
  296. msg = " {0}: title: {1} -> {2}"
  297. self.logger.debug(msg.format(pageid, result["page_title"], title))
  298. def update_page_modify(self, cursor, result, pageid, size, m_user, m_time, m_id):
  299. """Update the last modified information of a page in our database."""
  300. query = """UPDATE page SET page_size = ?, page_modify_user = ?,
  301. page_modify_time = ?, page_modify_oldid = ?
  302. WHERE page_id = ?"""
  303. cursor.execute(query, (size, m_user, m_time, m_id, pageid))
  304. msg = " {0}: modify: {1} / {2} / {3} -> {4} / {5} / {6}"
  305. msg = msg.format(pageid, result["page_modify_user"],
  306. result["page_modify_time"],
  307. result["page_modify_oldid"], m_user, m_time, m_id)
  308. self.logger.debug(msg)
  309. def update_page_status(self, cursor, result, pageid, status, chart, page):
  310. """Update the status and "specialed" information of a page."""
  311. query1 = """UPDATE page JOIN row ON page_id = row_id
  312. SET page_status = ?, row_chart = ? WHERE page_id = ?"""
  313. query2 = """UPDATE page SET page_special_user = ?,
  314. page_special_time = ?, page_special_oldid = ?
  315. WHERE page_id = ?"""
  316. cursor.execute(query1, (status, chart, pageid))
  317. msg = " {0}: status: {1} ({2}) -> {3} ({4})"
  318. self.logger.debug(msg.format(pageid, result["page_status"],
  319. result["row_chart"], status, chart))
  320. s_user, s_time, s_id = self.get_special(pageid, chart)
  321. if s_id != result["page_special_oldid"]:
  322. cursor.execute(query2, (s_user, s_time, s_id, pageid))
  323. msg = "{0}: special: {1} / {2} / {3} -> {4} / {5} / {6}"
  324. msg = msg.format(pageid, result["page_special_user"],
  325. result["page_special_time"],
  326. result["page_special_oldid"], m_user, m_time, m_id)
  327. self.logger.debug(msg)
  328. def update_page_notes(self, cursor, result, pageid, notes):
  329. """Update the notes (or warnings) of a page in our database."""
  330. query = "UPDATE page SET page_notes = ? WHERE page_id = ?"
  331. cursor.execute(query, (notes, pageid))
  332. msg = " {0}: notes: {1} -> {2}"
  333. self.logger.debug(msg.format(pageid, result["page_notes"], notes))
  334. def get_content(self, title):
  335. """Get the current content of a page by title from SQL.
  336. The page's current revision ID is retrieved from SQL, and then
  337. site.get_revid_content() is called.
  338. The reason a more conventional method (i.e. site.get_page.get()) is
  339. avoided is that due to replication lag, a discrepancy between the live
  340. database (which the API uses) and the replicated database (which SQL
  341. uses) can lead to incorrect and very confusing data, such as missing
  342. pages that are supposed to exist, if both are used interchangeably.
  343. """
  344. query = "SELECT page_latest FROM page WHERE page_title = ? AND page_namespace = ?"
  345. namespace, base = title.split(":", 1)
  346. try:
  347. ns = self.site.namespace_name_to_id(namespace)
  348. except wiki.NamespaceNotFoundError:
  349. base = title
  350. ns = wiki.NS_MAIN
  351. result = self.site.sql_query(query, (base, ns))
  352. revid = list(result)[0]
  353. return self.site.get_revid_content(revid)
  354. def get_status_and_chart(self, content):
  355. """Determine the status and chart number of an AFC submission.
  356. The methodology used here is the same one I've been using for years
  357. (see also commands.afc_report), but with the new draft system taken
  358. into account. The order here is important: if there is more than one
  359. {{AFC submission}} template on a page, we need to know which one to
  360. use (revision history search to find the most recent isn't a viable
  361. idea :P).
  362. """
  363. if re.search("\{\{afc submission\|r\|(.*?)\}\}", content, re.I):
  364. return "review", CHART_REVIEW
  365. elif re.search("\{\{afc submission\|h\|(.*?)\}\}", content, re.I):
  366. return "pend", CHART_DRAFT
  367. elif re.search("\{\{afc submission\|\|(.*?)\}\}", content, re.I):
  368. return "pend", CHART_PEND
  369. elif re.search("\{\{afc submission\|t\|(.*?)\}\}", content, re.I):
  370. return None, CHART_NONE
  371. elif re.search("\{\{afc submission\|d\|(.*?)\}\}", content, re.I):
  372. return "decline", CHART_DECLINE
  373. return None, CHART_NONE
  374. def get_short_title(self, title):
  375. """Shorten a title so we can display it in a chart using less space.
  376. Basically, this just means removing the "Wikipedia talk:Articles for
  377. creation" part from the beginning. If it is longer than 50 characters,
  378. we'll shorten it down to 47 and add an poor-man's ellipsis at the end.
  379. """
  380. short = re.sub("Wikipedia(\s*talk)?\:Articles\sfor\screation\/", "", title)
  381. if len(short) > 50:
  382. short = "".join((short[:47], "..."))
  383. return short
  384. def get_create(self, pageid):
  385. """Return information about a page's first edit ("creation").
  386. This consists of the page creator, creation time, and the earliest
  387. revision ID.
  388. """
  389. query = """SELECT rev_user_text, rev_timestamp, rev_id
  390. FROM revision WHERE rev_id =
  391. (SELECT MIN(rev_id) FROM revision WHERE rev_page = ?)"""
  392. result = self.site.sql_query(query, (pageid,))
  393. c_user, c_time, c_id = list(result)[0]
  394. return c_user, datetime.strptime(c_time, "%Y%m%d%H%M%S"), c_id
  395. def get_modify(self, pageid):
  396. """Return information about a page's last edit ("modification").
  397. This consists of the most recent editor, modification time, and the
  398. lastest revision ID.
  399. """
  400. query = """SELECT rev_user_text, rev_timestamp, rev_id FROM revision
  401. JOIN page ON rev_id = page_latest WHERE page_id = ?"""
  402. result = self.site.sql_query(query, (pageid,))
  403. m_user, m_time, m_id = list(result)[0]
  404. return m_user, datetime.strptime(m_time, "%Y%m%d%H%M%S"), m_id
  405. def get_special(self, pageid, chart):
  406. """Return information about a page's "special" edit.
  407. I tend to use the term "special" as a verb a lot, which is bound to
  408. cause confusion. It is merely a short way of saying "the edit in which
  409. a declined submission was declined, an accepted submission was
  410. accepted, a submission in review was set as such, and a pending draft
  411. was submitted."
  412. This "information" consists of the special edit's editor, its time, and
  413. its revision ID. If the page's status is not something that involves
  414. "special"-ing, we will return None for all three. The same will be
  415. returned if we cannot determine when the page was "special"-ed, or if
  416. it was "special"-ed more than 250 edits ago.
  417. """
  418. if chart in [CHART_NONE, CHART_PEND]:
  419. return None, None, None
  420. elif chart == CHART_ACCEPT:
  421. return self.get_create(pageid)
  422. elif chart == CHART_DRAFT:
  423. search = "(?!\{\{afc submission\|h\|(.*?)\}\})"
  424. elif chart == CHART_REVIEW:
  425. search = "(?!\{\{afc submission\|r\|(.*?)\}\})"
  426. elif chart == CHART_DECLINE:
  427. search = "(?!\{\{afc submission\|d\|(.*?)\}\})"
  428. query = """SELECT rev_user_text, rev_timestamp, rev_id
  429. FROM revision WHERE rev_page = ? ORDER BY rev_id DESC"""
  430. result = self.site.sql_query(query, (pageid,))
  431. counter = 0
  432. for user, ts, revid in result:
  433. counter += 1
  434. if counter > 250:
  435. msg = "Exceeded 250 content lookups while determining special for page (id: {0}, chart: {1})"
  436. self.logger.warn(msg.format(pageid, chart))
  437. break
  438. content = self.site.get_revid_content(revid)
  439. if re.search(search, content, re.I):
  440. return user, datetime.strptime(ts, "%Y%m%d%H%M%S"), revid
  441. return None, None, None
  442. def get_notes(self, pageid):
  443. """Return any special notes or warnings about this page.
  444. Currently unimplemented, so always returns None.
  445. """
  446. return None