cleanup cache, fix delete

This commit is contained in:
j 2015-12-11 20:00:05 +01:00
parent d938091b26
commit 85c1e789ba

View file

@ -144,6 +144,17 @@ class Cache:
def set(self, url, post_data, data, headers):
pass
def get_domain(self, url):
return ".".join(urllib.parse.urlparse(url)[1].split('.')[-2:])
def get_url_hash(self, url, data=None):
if data:
url_hash = hashlib.sha1((url + '?' + data).encode('utf-8')).hexdigest()
else:
url_hash = hashlib.sha1(url.encode('utf-8')).hexdigest()
return url_hash
class SQLiteCache(Cache):
def __init__(self):
path = cache_path()
@ -171,7 +182,7 @@ class SQLiteCache(Cache):
self.set_setting(c, 'version', 1)
c.execute('''ALTER TABLE cache ADD compressed INT DEFAULT 0''')
conn.commit()
def get_setting(self, c, key, default=None):
c.execute('SELECT value FROM setting WHERE key = ?', (key, ))
for row in c:
@ -185,10 +196,7 @@ class SQLiteCache(Cache):
r = None
if timeout == 0:
return r
if data:
url_hash = hashlib.sha1((url + '?' + data).encode('utf-8')).hexdigest()
else:
url_hash = hashlib.sha1(url.encode('utf-8')).hexdigest()
url_hash = self.get_url_hash(url, data)
conn = self.connect()
c = conn.cursor()
@ -218,23 +226,19 @@ class SQLiteCache(Cache):
return r
def delete(self, url, data=None, headers=DEFAULT_HEADERS):
if data:
url_hash = hashlib.sha1((url + '?' + data).encode('utf-8')).hexdigest()
else:
url_hash = hashlib.sha1(url.encode('utf-8')).hexdigest()
url_hash = self.get_url_hash(url, data)
conn = self.connect()
c = conn.cursor()
sql = 'DELETE FROM cache WHERE url_hash=?'
t = (url_hash, )
c.execute(sql, t)
conn.commit()
c.close()
conn.close()
def set(self, url, post_data, data, headers):
if post_data:
url_hash = hashlib.sha1((url + '?' + post_data).encode('utf-8')).hexdigest()
else:
url_hash = hashlib.sha1(url.encode('utf-8')).hexdigest()
domain = ".".join(urllib.parse.urlparse(url)[1].split('.')[-2:])
url_hash = self.get_url_hash(url, post_data)
domain = self.get_domain(url)
conn = self.connect()
c = conn.cursor()
@ -279,18 +283,15 @@ class FileCache(Cache):
i = os.path.join(prefix, '%s.json'%h)
f = os.path.join(prefix, '%s.dat'%h)
return prefix, i, f
def get(self, url, data={}, headers=DEFAULT_HEADERS, timeout=-1, value="data"):
r = None
if timeout == 0:
return r
if data:
url_hash = hashlib.sha1((url + '?' + data).encode('utf-8')).hexdigest()
else:
url_hash = hashlib.sha1(url.encode('utf-8')).hexdigest()
url_hash = self.get_url_hash(url, data)
domain = self.get_domain(url)
domain = ".".join(urllib.parse.urlparse(url)[1].split('.')[-2:])
prefix, i, f = self.files(domain, url_hash)
if os.path.exists(i):
with open(i) as _i:
@ -314,22 +315,17 @@ class FileCache(Cache):
return r
def delete(self, url, data=None, headers=DEFAULT_HEADERS):
if data:
url_hash = hashlib.sha1((url + '?' + data).encode('utf-8')).hexdigest()
else:
url_hash = hashlib.sha1(url.encode('utf-8')).hexdigest()
domain = ".".join(urllib.parse.urlparse(url)[1].split('.')[-2:])
url_hash = self.get_url_hash(url, data)
domain = self.get_domain(url)
prefix, i, f = self.files(domain, url_hash)
if os.path.exists(i):
os.unlink(i)
def set(self, url, post_data, data, headers):
if post_data:
url_hash = hashlib.sha1((url + '?' + post_data).encode('utf-8')).hexdigest()
else:
url_hash = hashlib.sha1(url.encode('utf-8')).hexdigest()
url_hash = self.get_url_hash(url, post_data)
domain = self.get_domain(url)
domain = ".".join(urllib.parse.urlparse(url)[1].split('.')[-2:])
prefix, i, f = self.files(domain, url_hash)
makedirs(prefix)