cleanup cache, fix delete

This commit is contained in:
j 2015-12-11 20:00:05 +01:00
parent d938091b26
commit 85c1e789ba

View file

@ -144,6 +144,17 @@ class Cache:
def set(self, url, post_data, data, headers): def set(self, url, post_data, data, headers):
pass pass
def get_domain(self, url):
return ".".join(urllib.parse.urlparse(url)[1].split('.')[-2:])
def get_url_hash(self, url, data=None):
if data:
url_hash = hashlib.sha1((url + '?' + data).encode('utf-8')).hexdigest()
else:
url_hash = hashlib.sha1(url.encode('utf-8')).hexdigest()
return url_hash
class SQLiteCache(Cache): class SQLiteCache(Cache):
def __init__(self): def __init__(self):
path = cache_path() path = cache_path()
@ -185,10 +196,7 @@ class SQLiteCache(Cache):
r = None r = None
if timeout == 0: if timeout == 0:
return r return r
if data: url_hash = self.get_url_hash(url, data)
url_hash = hashlib.sha1((url + '?' + data).encode('utf-8')).hexdigest()
else:
url_hash = hashlib.sha1(url.encode('utf-8')).hexdigest()
conn = self.connect() conn = self.connect()
c = conn.cursor() c = conn.cursor()
@ -218,23 +226,19 @@ class SQLiteCache(Cache):
return r return r
def delete(self, url, data=None, headers=DEFAULT_HEADERS): def delete(self, url, data=None, headers=DEFAULT_HEADERS):
if data: url_hash = self.get_url_hash(url, data)
url_hash = hashlib.sha1((url + '?' + data).encode('utf-8')).hexdigest()
else:
url_hash = hashlib.sha1(url.encode('utf-8')).hexdigest()
conn = self.connect() conn = self.connect()
c = conn.cursor() c = conn.cursor()
sql = 'DELETE FROM cache WHERE url_hash=?' sql = 'DELETE FROM cache WHERE url_hash=?'
t = (url_hash, ) t = (url_hash, )
c.execute(sql, t) c.execute(sql, t)
conn.commit()
c.close()
conn.close()
def set(self, url, post_data, data, headers): def set(self, url, post_data, data, headers):
if post_data: url_hash = self.get_url_hash(url, post_data)
url_hash = hashlib.sha1((url + '?' + post_data).encode('utf-8')).hexdigest() domain = self.get_domain(url)
else:
url_hash = hashlib.sha1(url.encode('utf-8')).hexdigest()
domain = ".".join(urllib.parse.urlparse(url)[1].split('.')[-2:])
conn = self.connect() conn = self.connect()
c = conn.cursor() c = conn.cursor()
@ -285,12 +289,9 @@ class FileCache(Cache):
if timeout == 0: if timeout == 0:
return r return r
if data: url_hash = self.get_url_hash(url, data)
url_hash = hashlib.sha1((url + '?' + data).encode('utf-8')).hexdigest() domain = self.get_domain(url)
else:
url_hash = hashlib.sha1(url.encode('utf-8')).hexdigest()
domain = ".".join(urllib.parse.urlparse(url)[1].split('.')[-2:])
prefix, i, f = self.files(domain, url_hash) prefix, i, f = self.files(domain, url_hash)
if os.path.exists(i): if os.path.exists(i):
with open(i) as _i: with open(i) as _i:
@ -314,22 +315,17 @@ class FileCache(Cache):
return r return r
def delete(self, url, data=None, headers=DEFAULT_HEADERS): def delete(self, url, data=None, headers=DEFAULT_HEADERS):
if data: url_hash = self.get_url_hash(url, data)
url_hash = hashlib.sha1((url + '?' + data).encode('utf-8')).hexdigest() domain = self.get_domain(url)
else:
url_hash = hashlib.sha1(url.encode('utf-8')).hexdigest()
domain = ".".join(urllib.parse.urlparse(url)[1].split('.')[-2:])
prefix, i, f = self.files(domain, url_hash) prefix, i, f = self.files(domain, url_hash)
if os.path.exists(i): if os.path.exists(i):
os.unlink(i) os.unlink(i)
def set(self, url, post_data, data, headers): def set(self, url, post_data, data, headers):
if post_data: url_hash = self.get_url_hash(url, post_data)
url_hash = hashlib.sha1((url + '?' + post_data).encode('utf-8')).hexdigest() domain = self.get_domain(url)
else:
url_hash = hashlib.sha1(url.encode('utf-8')).hexdigest()
domain = ".".join(urllib.parse.urlparse(url)[1].split('.')[-2:])
prefix, i, f = self.files(domain, url_hash) prefix, i, f = self.files(domain, url_hash)
makedirs(prefix) makedirs(prefix)