A Python robot that edits Wikipedia and interacts with people over IRC https://en.wikipedia.org/wiki/User:EarwigBot
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

122 lines
5.2 KiB

  1. # -*- coding: utf-8 -*-
  2. # Report the status of AFC submissions, either as an automatic message on join or a request via !status.
  3. import json
  4. import re
  5. import urllib
  6. from config.irc_config import *
  7. from irc.base_command import BaseCommand
  8. class AFCStatus(BaseCommand):
  9. def get_hooks(self):
  10. return ["join", "msg"]
  11. def get_help(self, command):
  12. return "Get the number of pending AfC submissions, open redirect requests, and open file upload requests."
  13. def check(self, data):
  14. if data.is_command and (data.command == "status" or
  15. data.command == "count" or data.command == "num" or
  16. data.command == "number" or data.command == "afc_status"):
  17. return True
  18. if data.line[1] == "JOIN" and data.chan in AFC_CHANS:
  19. return True
  20. return False
  21. def process(self, data):
  22. if data.line[1] == "JOIN":
  23. subs = self.count_submissions()
  24. redirs = self.count_redirects()
  25. files = self.count_files()
  26. agg_num = self.get_aggregate_number((subs, redirs, files))
  27. aggregate = self.get_aggregate(agg_num)
  28. self.connection.notice(data.nick, "\x02Current status:\x0F Articles for Creation %s (\x0302AFC\x0301: \x0305%s\x0301; \x0302AFC/R\x0301: \x0305%s\x0301; \x0302FFU\x0301: \x0305%s\x0301)"
  29. % (aggregate, subs, redirs, files))
  30. return
  31. if data.args:
  32. if data.args[0].startswith("sub") or data.args[0] == "s":
  33. subs = self.count_submissions()
  34. self.connection.reply(data, "there are currently %s pending AfC submissions." % subs)
  35. elif data.args[0].startswith("redir") or data.args[0] == "r":
  36. redirs = self.count_redirects()
  37. self.connection.reply(data, "there are currently %s open redirect requests." % redirs)
  38. elif data.args[0].startswith("file") or data.args[0] == "f":
  39. files = self.count_redirects()
  40. self.connection.reply(data, "there are currently %s open file upload requests." % files)
  41. elif data.args[0].startswith("agg") or data.args[0] == "a":
  42. agg_data = (self.count_submissions(), self.count_redirects(), self.count_files())
  43. agg_num = self.get_aggregate_number(agg_data)
  44. aggregate = self.get_aggregate(agg_num)
  45. self.connection.reply(data, "aggregate is currently %s (AfC %s)." % (agg_num, aggregate))
  46. else:
  47. self.connection.reply(data, "unknown argument: \x0303%s\x0301. Valid args are 'subs', 'redirs', and 'files'." % data.args[0])
  48. else:
  49. subs = self.count_submissions()
  50. redirs = self.count_redirects()
  51. files = self.count_files()
  52. self.connection.reply(data, "there are currently %s pending submissions, %s open redirect requests, and %s open file upload requests."
  53. % (subs, redirs, files))
  54. def count_submissions(self):
  55. params = {'action': 'query', 'list': 'categorymembers', 'cmlimit':'500', 'format': 'json'}
  56. params['cmtitle'] = "Category:Pending_AfC_submissions"
  57. data = urllib.urlencode(params)
  58. raw = urllib.urlopen("http://en.wikipedia.org/w/api.php", data).read()
  59. res = json.loads(raw)
  60. subs = len(res['query']['categorymembers'])
  61. subs -= 2 # remove [[Wikipedia:Articles for creation/Redirects]] and [[Wikipedia:Files for upload]], which aren't real submissions
  62. return subs
  63. def count_redirects(self):
  64. content = self.get_page("Wikipedia:Articles_for_creation/Redirects")
  65. total = len(re.findall("==\s*(Redirect|Category) request: \[\[(.*?)\]\]\s*==", content))
  66. closed = content.lower().count("{{afc-c|b}}")
  67. redirs = total - closed
  68. return redirs
  69. def count_files(self):
  70. content = self.get_page("Wikipedia:Files_for_upload")
  71. total = len(re.findall("^\s*==(.*?)==\s*$", content, re.MULTILINE))
  72. closed = content.lower().count("{{ifu-c|b}}")
  73. files = total - closed
  74. return files
  75. def get_page(self, pagename):
  76. params = {'action': 'query', 'prop': 'revisions', 'rvprop':'content', 'rvlimit':'1', 'format': 'json'}
  77. params['titles'] = pagename
  78. data = urllib.urlencode(params)
  79. raw = urllib.urlopen("http://en.wikipedia.org/w/api.php", data).read()
  80. res = json.loads(raw)
  81. pageid = res['query']['pages'].keys()[0]
  82. content = res['query']['pages'][pageid]['revisions'][0]['*']
  83. return content
  84. def get_aggregate(self, num):
  85. if num == 0:
  86. agg = "is \x02\x0303clear\x0301\x0F"
  87. elif num < 60:
  88. agg = "is \x0303almost clear\x0301"
  89. elif num < 125:
  90. agg = "has a \x0312small backlog\x0301"
  91. elif num < 175:
  92. agg = "has an \x0307average backlog\x0301"
  93. elif num < 250:
  94. agg = "is \x0304backlogged\x0301"
  95. elif num < 300:
  96. agg = "is \x02\x0304heavily backlogged\x0301\x0F"
  97. else:
  98. agg = "is \x02\x1F\x0304severely backlogged\x0301\x0F"
  99. return agg
  100. def get_aggregate_number(self, (subs, redirs, files)):
  101. num = (subs * 5) + (redirs * 2) + (files * 2)
  102. return num