add file cache
This commit is contained in:
parent
2a7b70c576
commit
8fe8822e09
1 changed files with 193 additions and 162 deletions
355
ox/cache.py
355
ox/cache.py
|
@ -1,6 +1,8 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# vi:si:et:sw=4:sts=4:ts=4
|
# vi:si:et:sw=4:sts=4:ts=4
|
||||||
# GPL 2008
|
# GPL 2011
|
||||||
|
from __future__ import with_statement
|
||||||
|
|
||||||
import gzip
|
import gzip
|
||||||
import zlib
|
import zlib
|
||||||
import hashlib
|
import hashlib
|
||||||
|
@ -12,12 +14,12 @@ import urllib2
|
||||||
import sqlite3
|
import sqlite3
|
||||||
|
|
||||||
import chardet
|
import chardet
|
||||||
from ox.utils import json
|
from utils import json
|
||||||
|
from .file import makedirs
|
||||||
|
|
||||||
import net
|
import net
|
||||||
from net import DEFAULT_HEADERS, getEncoding
|
from net import DEFAULT_HEADERS, getEncoding
|
||||||
|
|
||||||
|
|
||||||
cache_timeout = 30*24*60*60 # default is 30 days
|
cache_timeout = 30*24*60*60 # default is 30 days
|
||||||
|
|
||||||
COMPRESS_TYPES = (
|
COMPRESS_TYPES = (
|
||||||
|
@ -54,12 +56,10 @@ def exists(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def getHeaders(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout):
|
def getHeaders(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout):
|
||||||
url_headers = _readUrlCache(url, data, headers, timeout, "headers")
|
url_headers = store.get(url, data, headers, timeout, "headers")
|
||||||
if url_headers:
|
if not url_headers:
|
||||||
url_headers = json.loads(url_headers)
|
|
||||||
else:
|
|
||||||
url_headers = net.getHeaders(url, data, headers)
|
url_headers = net.getHeaders(url, data, headers)
|
||||||
_saveUrlCache(url, data, -1, url_headers)
|
store.set(url, data, -1, url_headers)
|
||||||
return url_headers
|
return url_headers
|
||||||
|
|
||||||
class InvalidResult(Exception):
|
class InvalidResult(Exception):
|
||||||
|
@ -80,7 +80,7 @@ def readUrl(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout, vali
|
||||||
#FIXME: send last-modified / etag from cache and only update if needed
|
#FIXME: send last-modified / etag from cache and only update if needed
|
||||||
if isinstance(url, unicode):
|
if isinstance(url, unicode):
|
||||||
url = url.encode('utf-8')
|
url = url.encode('utf-8')
|
||||||
result = _readUrlCache(url, data, headers, timeout)
|
result = store.get(url, data, headers, timeout)
|
||||||
if not result:
|
if not result:
|
||||||
#print "get data", url
|
#print "get data", url
|
||||||
try:
|
try:
|
||||||
|
@ -92,7 +92,7 @@ def readUrl(url, data=None, headers=DEFAULT_HEADERS, timeout=cache_timeout, vali
|
||||||
if url_headers.get('content-encoding', None) == 'gzip':
|
if url_headers.get('content-encoding', None) == 'gzip':
|
||||||
result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read()
|
result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read()
|
||||||
if not valid or valid(result, url_headers):
|
if not valid or valid(result, url_headers):
|
||||||
_saveUrlCache(url, data, result, url_headers)
|
store.set(url, data, result, url_headers)
|
||||||
else:
|
else:
|
||||||
raise InvalidResult(result, url_headers)
|
raise InvalidResult(result, url_headers)
|
||||||
return result
|
return result
|
||||||
|
@ -114,172 +114,203 @@ def saveUrl(url, filename, overwrite=False):
|
||||||
f.write(data)
|
f.write(data)
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
def _getCacheBase():
|
def cache_path():
|
||||||
'cache base is eather ~/.ox/cache or can set via env variable oxCACHE'
|
|
||||||
return os.environ.get('oxCACHE', os.path.expanduser('~/.ox/cache'))
|
return os.environ.get('oxCACHE', os.path.expanduser('~/.ox/cache'))
|
||||||
|
|
||||||
def _getCacheDB():
|
class Cache:
|
||||||
path = _getCacheBase()
|
def __init__(self):
|
||||||
if not os.path.exists(path):
|
pass
|
||||||
os.makedirs(path)
|
|
||||||
return os.path.join(path, "cache.sqlite")
|
|
||||||
|
|
||||||
def _connectDb():
|
def get(self, url, data, headers=DEFAULT_HEADERS, timeout=-1, value="data"):
|
||||||
conn = sqlite3.connect(_getCacheDB(), timeout=10)
|
'''
|
||||||
conn.text_factory = str
|
if value == 'data' return data of url if its in the cache else None
|
||||||
return conn
|
if value == 'headers' return headers for url
|
||||||
|
'''
|
||||||
|
pass
|
||||||
|
|
||||||
def _getSetting(c, key, default=None):
|
def set(self, url, post_data, data, headers):
|
||||||
c.execute('SELECT value FROM setting WHERE key = ?', (key, ))
|
pass
|
||||||
for row in c:
|
|
||||||
return row[0]
|
|
||||||
return default
|
|
||||||
|
|
||||||
def _setSetting(c, key, value):
|
class SQLiteCache(Cache):
|
||||||
c.execute(u'INSERT OR REPLACE INTO setting values (?, ?)', (key, str(value)))
|
def __init__(self):
|
||||||
|
path = cache_path()
|
||||||
|
if not os.path.exists(path):
|
||||||
|
os.makedirs(path)
|
||||||
|
self.db = os.path.join(path, "cache.sqlite")
|
||||||
|
|
||||||
def _createDb(conn, c):
|
def connect(self):
|
||||||
# Create table and indexes
|
self.conn = sqlite3.connect(self.db, timeout=10)
|
||||||
c.execute('''CREATE TABLE IF NOT EXISTS cache (url_hash varchar(42) unique, domain text, url text,
|
self.conn.text_factory = str
|
||||||
post_data text, headers text, created int, data blob, only_headers int)''')
|
self.create()
|
||||||
c.execute('''CREATE INDEX IF NOT EXISTS cache_domain ON cache (domain)''')
|
|
||||||
c.execute('''CREATE INDEX IF NOT EXISTS cache_url ON cache (url)''')
|
|
||||||
c.execute('''CREATE INDEX IF NOT EXISTS cache_url_hash ON cache (url_hash)''')
|
|
||||||
|
|
||||||
c.execute('''CREATE TABLE IF NOT EXISTS setting (key varchar(1024) unique, value text)''')
|
def create(self):
|
||||||
if int(_getSetting(c, 'version', 0)) < 1:
|
c = self.conn.cursor()
|
||||||
_setSetting(c, 'version', 1)
|
# Create table and indexes
|
||||||
c.execute('''ALTER TABLE cache ADD compressed INT DEFAULT 0''')
|
c.execute('''CREATE TABLE IF NOT EXISTS cache (url_hash varchar(42) unique, domain text, url text,
|
||||||
conn.commit()
|
post_data text, headers text, created int, data blob, only_headers int)''')
|
||||||
|
c.execute('''CREATE INDEX IF NOT EXISTS cache_domain ON cache (domain)''')
|
||||||
|
c.execute('''CREATE INDEX IF NOT EXISTS cache_url ON cache (url)''')
|
||||||
|
c.execute('''CREATE INDEX IF NOT EXISTS cache_url_hash ON cache (url_hash)''')
|
||||||
|
|
||||||
def _readUrlCache(url, data, headers=DEFAULT_HEADERS, timeout=-1, value="data"):
|
c.execute('''CREATE TABLE IF NOT EXISTS setting (key varchar(1024) unique, value text)''')
|
||||||
r = None
|
if int(self.get_setting(c, 'version', 0)) < 1:
|
||||||
if timeout == 0:
|
self.set_setting(c, 'version', 1)
|
||||||
|
c.execute('''ALTER TABLE cache ADD compressed INT DEFAULT 0''')
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def get_setting(self, c, key, default=None):
|
||||||
|
c.execute('SELECT value FROM setting WHERE key = ?', (key, ))
|
||||||
|
for row in c:
|
||||||
|
return row[0]
|
||||||
|
return default
|
||||||
|
|
||||||
|
def set_setting(self, c, key, value):
|
||||||
|
c.execute(u'INSERT OR REPLACE INTO setting values (?, ?)', (key, str(value)))
|
||||||
|
|
||||||
|
def get(self, url, data={}, headers=DEFAULT_HEADERS, timeout=-1, value="data"):
|
||||||
|
r = None
|
||||||
|
if timeout == 0:
|
||||||
|
return r
|
||||||
|
|
||||||
|
if data:
|
||||||
|
url_hash = hashlib.sha1(url + '?' + data).hexdigest()
|
||||||
|
else:
|
||||||
|
url_hash = hashlib.sha1(url).hexdigest()
|
||||||
|
|
||||||
|
self.connect()
|
||||||
|
c = self.conn.cursor()
|
||||||
|
sql = 'SELECT %s, compressed FROM cache WHERE url_hash=?' % value
|
||||||
|
if timeout > 0:
|
||||||
|
now = time.mktime(time.localtime())
|
||||||
|
t = (url_hash, now-timeout)
|
||||||
|
sql += ' AND created > ?'
|
||||||
|
else:
|
||||||
|
t = (url_hash, )
|
||||||
|
if value != "headers":
|
||||||
|
sql += ' AND only_headers != 1 '
|
||||||
|
c.execute(sql, t)
|
||||||
|
for row in c:
|
||||||
|
r = row[0]
|
||||||
|
if value == 'headers':
|
||||||
|
r = json.loads(r)
|
||||||
|
elif value == 'data':
|
||||||
|
if row[1] == 1:
|
||||||
|
r = zlib.decompress(r)
|
||||||
|
else:
|
||||||
|
r = str(r)
|
||||||
|
break
|
||||||
|
|
||||||
|
c.close()
|
||||||
|
self.conn.close()
|
||||||
return r
|
return r
|
||||||
|
|
||||||
if data:
|
def set(self, url, post_data, data, headers):
|
||||||
url_hash = hashlib.sha1(url + '?' + data).hexdigest()
|
if post_data:
|
||||||
else:
|
url_hash = hashlib.sha1(url + '?' + post_data).hexdigest()
|
||||||
url_hash = hashlib.sha1(url).hexdigest()
|
else:
|
||||||
|
url_hash = hashlib.sha1(url).hexdigest()
|
||||||
|
|
||||||
conn = _connectDb()
|
domain = ".".join(urlparse.urlparse(url)[1].split('.')[-2:])
|
||||||
c = conn.cursor()
|
|
||||||
_createDb(conn, c)
|
|
||||||
|
|
||||||
sql = 'SELECT %s, compressed FROM cache WHERE url_hash=?' % value
|
self.connect()
|
||||||
if timeout > 0:
|
c = self.conn.cursor()
|
||||||
now = time.mktime(time.localtime())
|
|
||||||
t = (url_hash, now-timeout)
|
|
||||||
sql += ' AND created > ?'
|
|
||||||
else:
|
|
||||||
t = (url_hash, )
|
|
||||||
if value != "headers":
|
|
||||||
sql += ' AND only_headers != 1 '
|
|
||||||
c.execute(sql, t)
|
|
||||||
for row in c:
|
|
||||||
r = row[0]
|
|
||||||
if value == 'data':
|
|
||||||
if row[1] == 1:
|
|
||||||
r = zlib.decompress(r)
|
|
||||||
else:
|
|
||||||
r = str(r)
|
|
||||||
break
|
|
||||||
|
|
||||||
c.close()
|
# Insert a row of data
|
||||||
conn.close()
|
if not post_data: post_data=""
|
||||||
return r
|
only_headers = 0
|
||||||
|
if data == -1:
|
||||||
def _saveUrlCache(url, post_data, data, headers):
|
only_headers = 1
|
||||||
if post_data:
|
data = ""
|
||||||
url_hash = hashlib.sha1(url + '?' + post_data).hexdigest()
|
created = time.mktime(time.localtime())
|
||||||
else:
|
|
||||||
url_hash = hashlib.sha1(url).hexdigest()
|
|
||||||
|
|
||||||
domain = ".".join(urlparse.urlparse(url)[1].split('.')[-2:])
|
|
||||||
|
|
||||||
conn = _connectDb()
|
|
||||||
c = conn.cursor()
|
|
||||||
|
|
||||||
# Create table if not exists
|
|
||||||
_createDb(conn, c)
|
|
||||||
|
|
||||||
# Insert a row of data
|
|
||||||
if not post_data: post_data=""
|
|
||||||
only_headers = 0
|
|
||||||
if data == -1:
|
|
||||||
only_headers = 1
|
|
||||||
data = ""
|
|
||||||
created = time.mktime(time.localtime())
|
|
||||||
content_type = headers.get('content-type', '').split(';')[0].strip()
|
|
||||||
if content_type in COMPRESS_TYPES:
|
|
||||||
compressed = 1
|
|
||||||
data = zlib.compress(data)
|
|
||||||
else:
|
|
||||||
compressed = 0
|
|
||||||
data = sqlite3.Binary(data)
|
|
||||||
t = (url_hash, domain, url, post_data, json.dumps(headers), created, data, only_headers, compressed)
|
|
||||||
c.execute(u"""INSERT OR REPLACE INTO cache values (?, ?, ?, ?, ?, ?, ?, ?, ?)""", t)
|
|
||||||
|
|
||||||
# Save (commit) the changes and clean up
|
|
||||||
conn.commit()
|
|
||||||
c.close()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
def migrate_to_db():
|
|
||||||
import re
|
|
||||||
import os
|
|
||||||
import sqlite3
|
|
||||||
import glob
|
|
||||||
|
|
||||||
conn = _connectDb()
|
|
||||||
c = conn.cursor()
|
|
||||||
_createDb(conn, c)
|
|
||||||
|
|
||||||
files = glob.glob(_getCacheBase() + "/*/*/*/*/*")
|
|
||||||
_files = filter(lambda x: not x.endswith(".headers"), files)
|
|
||||||
|
|
||||||
for f in _files:
|
|
||||||
info = re.compile("%s/(.*?)/../../../(.*)" % _getCacheBase()).findall(f)
|
|
||||||
domain = url = info[0][0]
|
|
||||||
url_hash = info[0][1]
|
|
||||||
post_data = ""
|
|
||||||
created = os.stat(f).st_ctime
|
|
||||||
fd = open(f, "r")
|
|
||||||
data = fd.read()
|
|
||||||
fd.close()
|
|
||||||
fd = open(f + ".headers", "r")
|
|
||||||
headers = fd.read()
|
|
||||||
fd.close()
|
|
||||||
t = (url_hash, domain, url, post_data, headers, created, sqlite3.Binary(data), 0)
|
|
||||||
c.execute(u"""INSERT OR REPLACE INTO cache values (?, ?, ?, ?, ?, ?, ?, ?)""", t)
|
|
||||||
|
|
||||||
conn.commit()
|
|
||||||
c.close()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
def compress_db():
|
|
||||||
conn = _connectDb()
|
|
||||||
c = conn.cursor()
|
|
||||||
_createDb(conn, c)
|
|
||||||
c.execute(u"""SELECT url_hash FROM cache WHERE compressed = 0""")
|
|
||||||
ids = [row[0] for row in c]
|
|
||||||
for url_hash in ids:
|
|
||||||
c.execute(u"""SELECT headers, data FROM cache WHERE url_hash = ?""", (url_hash, ))
|
|
||||||
headers = {}
|
|
||||||
for row in c:
|
|
||||||
headers = json.loads(row[0])
|
|
||||||
data = row[1]
|
|
||||||
|
|
||||||
content_type = headers.get('content-type', '').split(';')[0].strip()
|
content_type = headers.get('content-type', '').split(';')[0].strip()
|
||||||
if content_type in COMPRESS_TYPES:
|
if content_type in COMPRESS_TYPES:
|
||||||
|
compressed = 1
|
||||||
data = zlib.compress(data)
|
data = zlib.compress(data)
|
||||||
t = (sqlite3.Binary(data), url_hash)
|
else:
|
||||||
print url_hash, 'update'
|
compressed = 0
|
||||||
c.execute('UPDATE cache SET compressed = 1, data = ? WHERE url_hash = ?', t)
|
data = sqlite3.Binary(data)
|
||||||
|
t = (url_hash, domain, url, post_data, json.dumps(headers), created,
|
||||||
|
data, only_headers, compressed)
|
||||||
|
c.execute(u"""INSERT OR REPLACE INTO cache values (?, ?, ?, ?, ?, ?, ?, ?, ?)""", t)
|
||||||
|
|
||||||
|
# Save (commit) the changes and clean up
|
||||||
|
self.conn.commit()
|
||||||
|
c.close()
|
||||||
|
self.conn.close()
|
||||||
|
|
||||||
|
class FileCache(Cache):
|
||||||
|
def __init__(self):
|
||||||
|
f, self.root = cache_path().split(':')
|
||||||
|
|
||||||
|
def files(self, domain, h):
|
||||||
|
prefix = os.path.join(self.root, domain, h[:2], h[2:4], h[4:6], h[6:8])
|
||||||
|
i = os.path.join(prefix, '%s.json'%h)
|
||||||
|
f = os.path.join(prefix, '%s.dat'%h)
|
||||||
|
return prefix, i, f
|
||||||
|
|
||||||
|
def get(self, url, data={}, headers=DEFAULT_HEADERS, timeout=-1, value="data"):
|
||||||
|
r = None
|
||||||
|
if timeout == 0:
|
||||||
|
return r
|
||||||
|
|
||||||
|
if data:
|
||||||
|
url_hash = hashlib.sha1(url + '?' + data).hexdigest()
|
||||||
|
else:
|
||||||
|
url_hash = hashlib.sha1(url).hexdigest()
|
||||||
|
|
||||||
|
domain = ".".join(urlparse.urlparse(url)[1].split('.')[-2:])
|
||||||
|
prefix, i, f = self.files(domain, url_hash)
|
||||||
|
|
||||||
|
if os.path.exists(i):
|
||||||
|
with open(i) as _i:
|
||||||
|
info = json.load(_i)
|
||||||
|
|
||||||
|
now = time.mktime(time.localtime())
|
||||||
|
expired = now-timeout
|
||||||
|
|
||||||
|
if value != 'headers' and info['only_headers']:
|
||||||
|
return None
|
||||||
|
if timeout < 0 or info['created'] > expired:
|
||||||
|
if value == 'headers':
|
||||||
|
r = info['headers']
|
||||||
|
else:
|
||||||
|
with open(f) as data:
|
||||||
|
r = data.read()
|
||||||
|
if info['compressed']:
|
||||||
|
r = zlib.decompress(r)
|
||||||
|
return r
|
||||||
|
|
||||||
|
def set(self, url, post_data, data, headers):
|
||||||
|
if post_data:
|
||||||
|
url_hash = hashlib.sha1(url + '?' + post_data).hexdigest()
|
||||||
|
else:
|
||||||
|
url_hash = hashlib.sha1(url).hexdigest()
|
||||||
|
|
||||||
|
domain = ".".join(urlparse.urlparse(url)[1].split('.')[-2:])
|
||||||
|
prefix, i, f = self.files(domain, url_hash)
|
||||||
|
makedirs(prefix)
|
||||||
|
|
||||||
|
created = time.mktime(time.localtime())
|
||||||
|
content_type = headers.get('content-type', '').split(';')[0].strip()
|
||||||
|
|
||||||
|
info = {
|
||||||
|
'compressed': content_type in COMPRESS_TYPES,
|
||||||
|
'only_headers': data == -1,
|
||||||
|
'created': created,
|
||||||
|
'headers': headers,
|
||||||
|
}
|
||||||
|
if post_data:
|
||||||
|
info['post_data'] = post_data
|
||||||
|
if not info['only_headers']:
|
||||||
|
if info['compressed']:
|
||||||
|
data = zlib.compress(data)
|
||||||
|
with open(f, 'w') as _f:
|
||||||
|
_f.write(data)
|
||||||
|
with open(i, 'w') as _i:
|
||||||
|
json.dump(info, _i)
|
||||||
|
|
||||||
|
if cache_path().startswith('fs:'):
|
||||||
|
store = FileCache()
|
||||||
|
else:
|
||||||
|
store = SQLiteCache()
|
||||||
|
|
||||||
conn.commit()
|
|
||||||
print "optimizing database"
|
|
||||||
c.execute('VACUUM')
|
|
||||||
conn.commit()
|
|
||||||
c.close()
|
|
||||||
conn.close()
|
|
||||||
|
|
Loading…
Reference in a new issue