diff --git a/build.py b/build.py index bff81fd..cee1b2a 100755 --- a/build.py +++ b/build.py @@ -8,7 +8,10 @@ import shutil page_src = """#! /usr/bin/env python # -*- coding: utf-8 -*- import os +import sys + os.chdir("..") +sys.path.insert(0, os.path.join(".", "{{pages_dir}}")) from mako.template import Template from mako.lookup import TemplateLookup diff --git a/pages/copyvios.mako b/pages/copyvios.mako index 5fb64c2..f7e207c 100644 --- a/pages/copyvios.mako +++ b/pages/copyvios.mako @@ -2,9 +2,9 @@ from urlparse import parse_qs from earwigbot.bot import Bot %>\ -<%namespace file="/support/copyvios/__init__.py" import="get_results, highlight_delta"/>\ -<%namespace file="/support/sites.py" import="get_site, get_sites"/>\ -<%namespace file="/support/misc.py" import="urlstrip"/>\ +<%namespace module="support.copyvios" import="get_results, highlight_delta"/>\ +<%namespace module="support.sites" import="get_site, get_sites"/>\ +<%namespace module="support.misc" import="urlstrip"/>\ <% lang = orig_lang = project = name = title = url = None query = parse_qs(environ["QUERY_STRING"]) diff --git a/pages/support/copyvios/checker.py b/pages/support/copyvios/checker.py index 513a568..fbe6da1 100644 --- a/pages/support/copyvios/checker.py +++ b/pages/support/copyvios/checker.py @@ -6,7 +6,7 @@ from time import time from earwigbot import exceptions -def get_results(bot, lang, project, name, all_projects, title, url, query): +def get_results(context, bot, lang, project, name, all_projects, title, url, query): site = get_site(bot, lang, project, name, all_projects) if not site: return None, None, None diff --git a/pages/support/copyvios/highlighter.py b/pages/support/copyvios/highlighter.py index 40be5b8..df9f9ba 100644 --- a/pages/support/copyvios/highlighter.py +++ b/pages/support/copyvios/highlighter.py @@ -2,7 +2,7 @@ from re import sub, UNICODE -def highlight_delta(chain, delta): +def highlight_delta(context, chain, delta): processed = [] prev_prev = prev = chain.START i = 0 @@ -11,7 +11,7 @@ def highlight_delta(chain, delta): for paragraph in paragraphs: processed_words = [] words = paragraph.split(" ") - for i, word in enumerate(words, i) + for i, word in enumerate(words, i): try: next = strip_word(all_words[i+1]) except IndexError: diff --git a/pages/support/sites.py b/pages/support/sites.py index e6e455f..29f8ae1 100644 --- a/pages/support/sites.py +++ b/pages/support/sites.py @@ -5,7 +5,7 @@ from urlparse import urlparse from earwigbot import exceptions -def get_site(bot, lang, project, name, all_projects): +def get_site(context, bot, lang, project, name, all_projects): if project not in [proj[0] for proj in all_projects]: return None if project == "wikimedia" and name: # Special sites: @@ -24,7 +24,7 @@ def get_site(bot, lang, project, name, all_projects): except (exceptions.APIError, exceptions.LoginError): return None -def get_sites(bot): +def get_sites(context, bot): max_staleness = 60 * 60 * 24 * 7 conn = open_sql_connection(bot, "globals") query1 = "SELECT update_time FROM updates WHERE update_service = ?"