A tool that evaluates high-risk Wikipedia templates https://tools.wmflabs.org/earwig-dev/tif
Non puoi selezionare più di 25 argomenti Gli argomenti devono iniziare con una lettera o un numero, possono includere trattini ('-') e possono essere lunghi fino a 35 caratteri.

8 anni fa
8 anni fa
8 anni fa
8 anni fa
8 anni fa
8 anni fa
8 anni fa
8 anni fa
8 anni fa
8 anni fa
8 anni fa
8 anni fa
8 anni fa
8 anni fa
8 anni fa
8 anni fa
8 anni fa
8 anni fa
8 anni fa
8 anni fa
8 anni fa
8 anni fa
8 anni fa
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138
  1. # -*- coding: utf-8 -*-
  2. from datetime import datetime, timedelta
  3. from gzip import GzipFile
  4. from json import loads
  5. from os.path import expanduser
  6. from StringIO import StringIO
  7. from urllib import quote
  8. from urllib2 import URLError
  9. from earwigbot.bot import Bot
  10. from oursql import connect
  11. __all__ = ["calculate_tif"]
  12. SITE_DB = "enwiki_p"
  13. def _get_db(bot):
  14. args = bot.config.wiki["_tifSQL"]
  15. args["read_default_file"] = expanduser("~/.my.cnf")
  16. args["autoping"] = True
  17. args["autoreconnect"] = True
  18. return connect(**args)
  19. def _count_transclusions(cursor, title, ns):
  20. query = """SELECT COUNT(*)
  21. FROM {0}.templatelinks
  22. WHERE tl_title = ? AND tl_namespace = ? AND tl_from_namespace = 0"""
  23. cursor.execute(query.format(SITE_DB), (title, ns))
  24. return cursor.fetchall()[0][0]
  25. def _count_views(cursor, title, ns):
  26. query = """SELECT SUM(cache_views), MIN(cache_time)
  27. FROM {0}.templatelinks
  28. INNER JOIN cache ON tl_from = cache_id
  29. WHERE tl_title = ? AND tl_namespace = ? AND tl_from_namespace = 0"""
  30. cursor.execute(query.format(SITE_DB), (title, ns))
  31. return cursor.fetchall()[0]
  32. def _get_avg_views(site, article):
  33. url = ("https://wikimedia.org/api/rest_v1/metrics/pageviews/per-article/"
  34. "{0}.{1}/all-access/user/{2}/daily/{3}/{4}")
  35. days = 30
  36. slug = quote(article, safe="")
  37. start = (datetime.utcnow() - timedelta(days=days)).strftime("%Y%m%d")
  38. end = datetime.utcnow().strftime("%Y%m%d")
  39. query = url.format(site.lang, site.project, slug, start, end)
  40. try:
  41. response = site._opener.open(query) # We're terrible
  42. except URLError:
  43. return None
  44. result = response.read()
  45. if response.headers.get("Content-Encoding") == "gzip":
  46. stream = StringIO(result)
  47. gzipper = GzipFile(fileobj=stream)
  48. result = gzipper.read()
  49. try:
  50. res = loads(result)
  51. except ValueError:
  52. return None
  53. if "items" not in res:
  54. return None
  55. total_views = sum(item["views"] for item in res["items"])
  56. return total_views / (float(days) * 24 * 60)
  57. def _update_views(cursor, site, title, ns):
  58. cache_life = "7 DAY"
  59. query1 = """DELETE FROM cache
  60. WHERE cache_time < DATE_SUB(NOW(), INTERVAL {1})"""
  61. query2 = """SELECT tl_from, page_title
  62. FROM {0}.templatelinks
  63. LEFT JOIN {0}.page ON tl_from = page_id
  64. LEFT JOIN cache ON tl_from = cache_id
  65. WHERE tl_title = ? AND tl_namespace = ? AND tl_from_namespace = 0
  66. AND cache_id IS NULL"""
  67. query3 = """INSERT INTO cache (cache_id, cache_views, cache_time)
  68. VALUES (?, ?, NOW()) ON DUPLICATE KEY
  69. UPDATE cache_views = ?, cache_time = NOW()"""
  70. cursor.execute(query1.format(cache_life))
  71. cursor.execute(query2.format(SITE_DB), (title, ns))
  72. while True:
  73. titles = cursor.fetchmany(1024)
  74. if not titles:
  75. break
  76. viewcounts = [(pageid, _get_avg_views(site, name))
  77. for (pageid, name) in titles]
  78. parambatch = [(i, v, v) for (i, v) in viewcounts if v is not None]
  79. cursor.executemany(query3, parambatch)
  80. def _compute_stats(db, page):
  81. title = page.title.split(":", 1)[-1].replace(" ", "_")
  82. title = title[0].upper() + title[1:]
  83. with db.cursor() as cursor:
  84. transclusions = _count_transclusions(cursor, title, page.namespace)
  85. _update_views(cursor, page.site, title, page.namespace)
  86. tif, cache_time = _count_views(cursor, title, page.namespace)
  87. return tif, transclusions, cache_time
  88. def _format_time(cache_time):
  89. formatter = lambda n, w: "{0} {1}{2}".format(n, w, "" if n == 1 else "s")
  90. diff = datetime.utcnow() - cache_time
  91. if diff.seconds > 3600:
  92. return formatter(diff.seconds / 3600, "hour")
  93. if diff.seconds > 60:
  94. return formatter(diff.seconds / 60, "minute")
  95. return formatter(diff.seconds, "second")
  96. def _get_protection(page):
  97. edit = [prot for prot in page.protection if prot["type"] == "edit"]
  98. return edit[0] if edit else None
  99. def calculate_tif(title):
  100. bot = Bot(".earwigbot")
  101. db = _get_db(bot)
  102. site = bot.wiki.get_site()
  103. page = site.get_page(title)
  104. result = {"title": title, "page": page}
  105. if page.exists != page.PAGE_EXISTS:
  106. result["error"] = "no page"
  107. return result
  108. tif, transclusions, cache_time = _compute_stats(db, page)
  109. result["tif"] = tif
  110. result["transclusions"] = transclusions
  111. result["protection"] = _get_protection(page)
  112. if cache_time:
  113. result["cache_time"] = cache_time.strftime("%b %d, %Y %H:%M:%S UTC")
  114. result["cache_age"] = _format_time(cache_time)
  115. return result