Browse Source

Add sqlite3 backend to replace mysql

master
Ben Kurtovic 5 years ago
parent
commit
ca58aac413
3 changed files with 32 additions and 11 deletions
  1. +1
    -0
      README.md
  2. +4
    -6
      copyvios/checker.py
  3. +27
    -5
      copyvios/misc.py

+ 1
- 0
README.md View File

@@ -16,6 +16,7 @@ Dependencies
* [oursql](http://packages.python.org/oursql/) >= 0.9.3.1
* [requests](http://python-requests.org/) >= 2.9.1
* [SQLAlchemy](http://sqlalchemy.org/) >= 0.9.6
* [apsw](https://github.com/rogerbinns/apsw) >= 3.26.0
* [uglifycss](https://github.com/fmarcia/UglifyCSS/)
* [uglifyjs](https://github.com/mishoo/UglifyJS/) >= 1.3.3



+ 4
- 6
copyvios/checker.py View File

@@ -9,7 +9,7 @@ from earwigbot.wiki.copyvios.markov import EMPTY, MarkovChain
from earwigbot.wiki.copyvios.parsers import ArticleTextParser
from earwigbot.wiki.copyvios.result import CopyvioSource, CopyvioCheckResult

from .misc import Query, get_db
from .misc import Query, get_db, get_cursor
from .sites import get_site
from .turnitin import search_turnitin

@@ -119,7 +119,7 @@ def _get_page_by_revid(site, revid):
page_data["revisions"][0]["*"] # Only need to check that these exist
page_data["revisions"][0]["timestamp"]
except KeyError:
return
return None
page = site.get_page(title)

# EarwigBot doesn't understand old revisions of pages, so we use a somewhat
@@ -140,7 +140,7 @@ def _get_cached_results(page, conn, mode, noskip):
WHERE cdata_cache_id = ?"""
cache_id = buffer(sha256(mode + page.get().encode("utf8")).digest())

with conn.cursor() as cursor:
with get_cursor(conn) as cursor:
cursor.execute(query1)
cursor.execute(query2, (cache_id,))
results = cursor.fetchall()
@@ -202,10 +202,8 @@ def _cache_result(page, result, conn, mode):
data = [(cache_id, source.url[:1024], source.confidence, source.skipped,
source.excluded)
for source in result.sources]
with conn.cursor() as cursor:
cursor.execute("START TRANSACTION")
with get_cursor(conn) as cursor:
cursor.execute(query1, (cache_id,))
cursor.execute(query2, (cache_id, result.queries, result.time,
result.possible_miss))
cursor.executemany(query3, data)
cursor.execute("COMMIT")

+ 27
- 5
copyvios/misc.py View File

@@ -1,7 +1,8 @@
# -*- coding: utf-8 -*-

from contextlib import contextmanager
import datetime
from os.path import expanduser
from os.path import expanduser, join

from flask import g, request
import oursql
@@ -41,15 +42,36 @@ class _AppCache(object):

cache = _AppCache()

def get_db():
if not g._db:
args = cache.bot.config.wiki["_copyviosSQL"]
def _connect_to_db(engine, args):
if engine == "mysql":
args["read_default_file"] = expanduser("~/.my.cnf")
args["autoping"] = True
args["autoreconnect"] = True
g._db = oursql.connect(**args)
return oursql.connect(**args)
if engine == "sqlite":
import apsw
dbpath = join(cache.bot.config.root_dir, "copyvios.db")
return apsw.Connection(dbpath)
raise ValueError("Unknown engine: %s" % engine)

def get_db():
if not g._db:
args = cache.bot.config.wiki["_copyviosSQL"].copy()
g._engine = engine = args.pop("engine", "mysql").lower()
g._db = _connect_to_db(engine, args)
return g._db

@contextmanager
def get_cursor(conn):
if g._engine == "mysql":
with conn.cursor() as cursor:
yield cursor
elif g._engine == "sqlite":
with conn:
yield conn.cursor()
else:
raise ValueError("Unknown engine: %s" % g._engine)

def get_notice():
try:
with open(expanduser("~/copyvios_notice.html")) as fp:


Loading…
Cancel
Save