From f644208f05f54cdd651fc6d2b84ef9870b83432a Mon Sep 17 00:00:00 2001 From: Ben Kurtovic Date: Thu, 19 Jul 2012 23:53:03 -0400 Subject: [PATCH] Implement missing cases, plus closing a few Unicode loopholes. --- build.py | 3 ++- pages/copyvios.mako | 36 ++++++++++++++++++++---------------- 2 files changed, 22 insertions(+), 17 deletions(-) diff --git a/build.py b/build.py index 9fa9607..bff81fd 100755 --- a/build.py +++ b/build.py @@ -15,7 +15,8 @@ from mako.lookup import TemplateLookup def myapp(environ, start_response): start_response("200 OK", [("Content-Type", "text/html")]) - lookup = TemplateLookup(directories=["{{pages_dir}}"]) + lookup = TemplateLookup(directories=["{{pages_dir}}"], + input_encoding="utf8") template = Template(filename="{{src}}", module_directory="{{temp_dir}}", lookup=lookup, format_exceptions=True) return [template.render(environ=environ).encode("utf8")] diff --git a/pages/copyvios.mako b/pages/copyvios.mako index c6cdabb..c6ce807 100644 --- a/pages/copyvios.mako +++ b/pages/copyvios.mako @@ -28,10 +28,11 @@ # if query.get("nocache") or not result: # result = get_fresh_results(page, conn) mc1 = __import__("earwigbot").wiki.copyvios.MarkovChain(page.get()) - mc2 = __import__("earwigbot").wiki.copyvios.MarkovChain("This is some random textual content for a page.") + mc2 = __import__("earwigbot").wiki.copyvios.MarkovChain(u"This is some random textual content for a page.") mci = __import__("earwigbot").wiki.copyvios.MarkovChainIntersection(mc1, mc2) result = __import__("earwigbot").wiki.copyvios.CopyvioCheckResult( True, 0.67123, "http://example.com/", 7, mc1, (mc2, mci)) + # END TEST BLOCK return page, result def get_site(bot, lang, project, all_projects): @@ -129,18 +130,15 @@ query1 = "SELECT update_time FROM updates WHERE update_service = ?" query2 = "SELECT lang_code, lang_name FROM languages" query3 = "SELECT project_code, project_name FROM projects" - with conn.cursor() as cursor: cursor.execute(query1, ("sites",)) time_since_update = int(time() - cursor.fetchall()[0][0]) if time_since_update > max_staleness: update_sites(bot, cursor) - cursor.execute(query2) langs = cursor.fetchall() cursor.execute(query3) projects = cursor.fetchall() - return langs, projects def update_sites(site, cursor): @@ -152,8 +150,8 @@ projects.add(("wikimedia", "Wikimedia")) for special in site: if "closed" not in special and "private" not in special: - code = special["dbname"] - name = special["code"].capitalize() + code = special["dbname"].encode("utf8") + name = special["code"].encode("utf8").capitalize() languages.add((code, name)) this = set() for web in site["site"]: @@ -293,9 +291,9 @@ @@ -303,9 +301,9 @@ @@ -314,7 +312,7 @@ Page title: % if page: - + % elif title: % else: @@ -345,23 +343,29 @@ % if project and lang and title and not page: - CASE WHEN GIVEN SITE DOESN'T EXIST +
+
+

The given site, (project=${project.decode("utf8")}, language=${lang.decode("utf8")}) doesn't seem to exist. Check its URL?

+
% elif project and lang and title and page and not result: - CASE WHEN GIVEN PAGE DOESN'T EXIST +
+
+

The given page, ${page.title | h}, doesn't seem to exist.

+
% elif page:
% if result.violation: -

${page.title() | h} is a suspected violation of ${result.url | urlstrip}.

+

${page.title | h} is a suspected violation of ${result.url | urlstrip}.

% else: -

No violations detected in ${page.title() | h}.

+

No violations detected in ${page.title | h}.

% endif