Parcourir la source

Connection pooling; cleanup.

pull/24/head
Ben Kurtovic il y a 10 ans
Parent
révision
9a25f97831
10 fichiers modifiés avec 66 ajouts et 48 suppressions
  1. +1
    -0
      README.md
  2. +16
    -6
      app.fcgi
  3. +6
    -5
      copyvios/background.py
  4. +5
    -4
      copyvios/checker.py
  5. +2
    -0
      copyvios/cookies.py
  6. +2
    -0
      copyvios/highlighter.py
  7. +20
    -25
      copyvios/misc.py
  8. +2
    -0
      copyvios/settings.py
  9. +10
    -6
      copyvios/sites.py
  10. +2
    -2
      templates/index.mako

+ 1
- 0
README.md Voir le fichier

@@ -14,6 +14,7 @@ Dependencies
* [mako](http://www.makotemplates.org/) >= 0.7.2
* [mwparserfromhell](https://github.com/earwig/mwparserfromhell) >= 0.3
* [oursql](http://packages.python.org/oursql/) >= 0.9.3.1
* [SQLAlchemy](http://sqlalchemy.org/) >= 0.9.6
* [uglifycss](https://github.com/fmarcia/UglifyCSS/)
* [uglifyjs](https://github.com/mishoo/UglifyJS/) >= 1.3.3



+ 16
- 6
app.fcgi Voir le fichier

@@ -7,13 +7,13 @@ from logging.handlers import TimedRotatingFileHandler
from time import asctime
from traceback import format_exc

from earwigbot.bot import Bot
from flask import Flask, g, request
from flask.ext.mako import MakoTemplates, render_template, TemplateError
from flup.server.fcgi import WSGIServer

from copyvios.checker import do_check
from copyvios.cookies import parse_cookies
from copyvios.misc import get_bot
from copyvios.settings import process_settings
from copyvios.sites import get_sites

@@ -25,6 +25,8 @@ app.logger.addHandler(TimedRotatingFileHandler(
"logs/app.log", when="D", interval=1, backupCount=7))
app.logger.info(u"Flask server started " + asctime())

bot = Bot(".earwigbot", 100)

def catch_errors(func):
@wraps(func)
def inner(*args, **kwargs):
@@ -37,9 +39,11 @@ def catch_errors(func):
return inner

@app.before_request
def prepare_cookies():
cookie_string = request.environ.get("HTTP_COOKIE")
g.cookies = parse_cookies(request.script_root, cookie_string)
def prepare_request():
g.bot = bot
g.globals_db = g.cache_db = None
g.cookies = parse_cookies(request.script_root,
request.environ.get("HTTP_COOKIE"))
g.new_cookies = []

@app.after_request
@@ -54,6 +58,13 @@ def write_access_log(response):
app.logger.debug(msg, asctime(), request.path, response.status_code)
return response

@app.teardown_appcontext
def close_databases(error):
if g.globals_db:
g.globals_db.close()
if g.cache_db:
g.cache_db.close()

@app.route("/")
@catch_errors
def index():
@@ -64,8 +75,7 @@ def index():
@catch_errors
def settings():
status = process_settings() if request.method == "POST" else None
bot = get_bot()
langs, projects = get_sites(bot)
langs, projects = get_sites()
default = bot.wiki.get_site()
kwargs = {"status": status, "langs": langs, "projects": projects,
"default_lang": default.lang, "default_project": default.project}


+ 6
- 5
copyvios/background.py Voir le fichier

@@ -9,10 +9,12 @@ from time import time
from earwigbot import exceptions
from flask import g

from .misc import get_bot, open_sql_connection
from .misc import get_globals_db

__all__ = ["set_background"]

def set_background(selected):
conn = open_sql_connection(get_bot(), "globals")
conn = get_globals_db()
if "CopyviosScreenCache" in g.cookies:
cache = g.cookies["CopyviosScreenCache"].value
try:
@@ -94,11 +96,10 @@ def _load_file(site, filename):
return filename.replace(" ", "_"), url, descurl, width, height

def _get_site():
bot = get_bot()
try:
return bot.wiki.get_site("commonswiki")
return g.bot.wiki.get_site("commonswiki")
except exceptions.SiteNotFoundError:
return bot.wiki.add_site(project="wikimedia", lang="commons")
return g.bot.wiki.add_site(project="wikimedia", lang="commons")

def _build_url(screen, filename, url, imgwidth, imgheight):
width = screen["width"]


+ 5
- 4
copyvios/checker.py Voir le fichier

@@ -6,9 +6,11 @@ from urlparse import urlparse

from earwigbot import exceptions

from .misc import get_bot, Query, open_sql_connection
from .misc import Query, get_cache_db
from .sites import get_site, get_sites

__all__ = ["do_check"]

def do_check():
query = Query()
if query.lang:
@@ -18,8 +20,7 @@ def do_check():
if query.project:
query.project = query.project.lower()

query.bot = get_bot()
query.all_langs, query.all_projects = get_sites(query.bot)
query.all_langs, query.all_projects = get_sites()
if query.project and query.lang and (query.title or query.oldid):
query.site = get_site(query)
if query.site:
@@ -43,7 +44,7 @@ def _get_results(query):
query.result = page.copyvio_compare(query.url)
query.result.cached = False
else:
conn = open_sql_connection(query.bot, "cache")
conn = get_cache_db()
if not query.nocache:
query.result = _get_cached_results(page, conn)
if not query.result:


+ 2
- 0
copyvios/cookies.py Voir le fichier

@@ -6,6 +6,8 @@ from datetime import datetime, timedelta

from flask import g

__all__ = ["parse_cookies", "set_cookie", "delete_cookie"]

class _CookieManager(SimpleCookie):
MAGIC = "--cpv2"



+ 2
- 0
copyvios/highlighter.py Voir le fichier

@@ -4,6 +4,8 @@ from re import sub, UNICODE

from markupsafe import escape

__all__ = ["highlight_delta"]

def highlight_delta(context, chain, delta):
degree = chain.degree - 1
highlights = [False] * degree


+ 20
- 25
copyvios/misc.py Voir le fichier

@@ -3,12 +3,13 @@
from os.path import expanduser
from urlparse import parse_qs

from earwigbot.bot import Bot
from flask import request
from flask import g, request
import oursql
from sqlalchemy.pool import manage

_bot = None
_connections = {}
oursql = manage(oursql)

__all__ = ["Query", "get_globals_db", "get_cache_db", "httpsfix", "urlstrip"]

class Query(object):
def __init__(self, method="GET"):
@@ -36,28 +37,22 @@ class Query(object):
self.query[key] = value


def get_bot():
global _bot
if not _bot:
_bot = Bot(".earwigbot", 100) # Don't print any logs to the console
return _bot
def _connect_db(name):
args = g.bot.config.wiki["_copyviosSQL"][name]
args["read_default_file"] = expanduser("~/.my.cnf")
args["autoping"] = True
args["autoreconnect"] = True
return oursql.connect(**args)

def get_globals_db():
if not g.globals_db:
g.globals_db = _connect_db("globals")
return g.globals_db

def open_sql_connection(bot, dbname):
if dbname in _connections:
return _connections[dbname]
conn_args = bot.config.wiki["_copyviosSQL"][dbname]
if "read_default_file" not in conn_args and "user" not in conn_args and "passwd" not in conn_args:
conn_args["read_default_file"] = expanduser("~/.my.cnf")
elif "read_default_file" in conn_args:
default_file = expanduser(conn_args["read_default_file"])
conn_args["read_default_file"] = default_file
if "autoping" not in conn_args:
conn_args["autoping"] = True
if "autoreconnect" not in conn_args:
conn_args["autoreconnect"] = True
conn = oursql.connect(**conn_args)
_connections[dbname] = conn
return conn
def get_cache_db():
if not g.cache_db:
g.cache_db = _connect_db("cache")
return g.cache_db

def httpsfix(context, url):
if url.startswith("http://"):


+ 2
- 0
copyvios/settings.py Voir le fichier

@@ -6,6 +6,8 @@ from markupsafe import escape
from .cookies import set_cookie, delete_cookie
from .misc import Query

__all__ = ["process_settings"]

def process_settings():
query = Query(method="POST")
if query.action == "set":


+ 10
- 6
copyvios/sites.py Voir le fichier

@@ -4,12 +4,15 @@ from time import time
from urlparse import urlparse

from earwigbot import exceptions
from flask import g

from .misc import open_sql_connection
from .misc import get_globals_db

__all__ = ["get_site", "get_sites"]

def get_site(query):
lang, project, name = query.lang, query.project, query.name
wiki = query.bot.wiki
wiki = g.bot.wiki
if project not in [proj[0] for proj in query.all_projects]:
return None
if project == "wikimedia" and name: # Special sites:
@@ -28,9 +31,9 @@ def get_site(query):
except (exceptions.APIError, exceptions.LoginError):
return None

def get_sites(bot):
def get_sites():
max_staleness = 60 * 60 * 24 * 7
conn = open_sql_connection(bot, "globals")
conn = get_globals_db()
query1 = "SELECT update_time FROM updates WHERE update_service = ?"
query2 = "SELECT lang_code, lang_name FROM language"
query3 = "SELECT project_code, project_name FROM project"
@@ -41,7 +44,7 @@ def get_sites(bot):
except IndexError:
time_since_update = time()
if time_since_update > max_staleness:
_update_sites(bot.wiki.get_site(), cursor)
_update_sites(cursor)
cursor.execute(query2)
langs = []
for code, name in cursor.fetchall():
@@ -52,7 +55,8 @@ def get_sites(bot):
projects = cursor.fetchall()
return langs, projects

def _update_sites(site, cursor):
def _update_sites(cursor):
site = g.bot.wiki.get_site()
matrix = site.api_query(action="sitematrix")["sitematrix"]
del matrix["count"]
languages, projects = set(), set()


+ 2
- 2
templates/index.mako Voir le fichier

@@ -30,7 +30,7 @@
<td colspan="3">
<span class="mono">http://</span>
<select name="lang">
<% selected_lang = query.orig_lang if query.orig_lang else g.cookies["CopyviosDefaultLang"].value if "CopyviosDefaultLang" in g.cookies else query.bot.wiki.get_site().lang %>\
<% selected_lang = query.orig_lang if query.orig_lang else g.cookies["CopyviosDefaultLang"].value if "CopyviosDefaultLang" in g.cookies else g.bot.wiki.get_site().lang %>\
% for code, name in query.all_langs:
% if code == selected_lang:
<option value="${code | h}" selected="selected">${name}</option>
@@ -41,7 +41,7 @@
</select>
<span class="mono">.</span>
<select name="project">
<% selected_project = query.project if query.project else g.cookies["CopyviosDefaultProject"].value if "CopyviosDefaultProject" in g.cookies else query.bot.wiki.get_site().project %>\
<% selected_project = query.project if query.project else g.cookies["CopyviosDefaultProject"].value if "CopyviosDefaultProject" in g.cookies else g.bot.wiki.get_site().project %>\
% for code, name in query.all_projects:
% if code == selected_project:
<option value="${code | h}" selected="selected">${name}</option>


Chargement…
Annuler
Enregistrer