210 lines
7.0 KiB
Python
210 lines
7.0 KiB
Python
# encoding: utf-8
|
|
# vi:si:et:sw=4:sts=4:ts=4
|
|
import json
|
|
import sqlite3
|
|
import os
|
|
|
|
import ox
|
|
|
|
import utils
|
|
|
|
|
|
class Backend:
|
|
def __init__(self, db):
|
|
self.db = db
|
|
|
|
conn, c = self._conn()
|
|
|
|
c.execute('''CREATE TABLE IF NOT EXISTS setting (key varchar(1024) unique, value text)''')
|
|
|
|
if int(self.get('version', 0)) < 1:
|
|
self.set('version', 1)
|
|
db = [
|
|
'''CREATE TABLE IF NOT EXISTS file (
|
|
path varchar(1024) unique,
|
|
oshash varchar(16),
|
|
atime FLOAT,
|
|
ctime FLOAT,
|
|
mtime FLOAT,
|
|
size INT,
|
|
info TEXT,
|
|
created INT,
|
|
modified INT,
|
|
deleted INT)''',
|
|
'''CREATE INDEX IF NOT EXISTS path_idx ON file (path)''',
|
|
'''CREATE INDEX IF NOT EXISTS oshash_idx ON file (oshash)''',
|
|
]
|
|
for i in db:
|
|
c.execute(i)
|
|
conn.commit()
|
|
if int(self.get('version', 0)) < 2:
|
|
self.set('version', 2)
|
|
db = [
|
|
'''CREATE TABLE IF NOT EXISTS encode (
|
|
oshash varchar(16),
|
|
site varchar(255))''',
|
|
'''CREATE INDEX IF NOT EXISTS upload_site_idx ON encode (site)''',
|
|
]
|
|
for i in db:
|
|
c.execute(i)
|
|
conn.commit()
|
|
if int(self.get('version', 0)) < 3:
|
|
self.set('version', 3)
|
|
db = [
|
|
'''CREATE TABLE IF NOT EXISTS volume (
|
|
name varchar(1024) unique,
|
|
path text,
|
|
site varchar(255))''',
|
|
'''CREATE TABLE IF NOT EXISTS part (
|
|
id varchar(1024),
|
|
part int,
|
|
oshash varchar(16),
|
|
site varchar(255))''',
|
|
]
|
|
for i in db:
|
|
c.execute(i)
|
|
conn.commit()
|
|
|
|
self.media_cache = self.get('media_cache')
|
|
if not self.media_cache:
|
|
self.media_cache = os.path.join(os.path.dirname(self.db), 'media')
|
|
self.set('media_cache', self.media_cache)
|
|
|
|
def _conn(self):
|
|
if not os.path.exists(os.path.dirname(self.db)):
|
|
os.makedirs(os.path.dirname(self.db))
|
|
conn = sqlite3.connect(self.db, timeout=10)
|
|
conn.text_factory = sqlite3.OptimizedUnicode
|
|
return conn, conn.cursor()
|
|
|
|
def get(self, key, default=None):
|
|
conn, c = self._conn()
|
|
c.execute('SELECT value FROM setting WHERE key = ?', (key, ))
|
|
for row in c:
|
|
return row[0]
|
|
return default
|
|
|
|
def set(self, key, value):
|
|
conn, c = self._conn()
|
|
c.execute(u'INSERT OR REPLACE INTO setting VALUES (?, ?)', (key, str(value)))
|
|
conn.commit()
|
|
|
|
def info(self, oshash):
|
|
conn, c = self._conn()
|
|
c.execute('SELECT info FROM file WHERE oshash = ?', (oshash, ))
|
|
for row in c:
|
|
return json.loads(row[0])
|
|
return None
|
|
|
|
def path(self, oshash):
|
|
conn, c = self._conn()
|
|
c.execute('SELECT path FROM file WHERE oshash = ?', (oshash, ))
|
|
paths = []
|
|
for row in c:
|
|
paths.append(row[0])
|
|
return paths
|
|
|
|
def cache_path(self, oshash, profile):
|
|
return os.path.join(self.media_cache, os.path.join(*utils.hash_prefix(oshash)), profile)
|
|
|
|
def volumes(self, site):
|
|
conn, c = self._conn()
|
|
c.execute('SELECT name, path FROM volume WHERE site= ?', (site, ))
|
|
volumes = []
|
|
for r in c:
|
|
volumes.append({'name': r[0], 'path': r[1]})
|
|
return volumes
|
|
|
|
def add_volume(self, site, name, path):
|
|
volumes = self.volumes(site)
|
|
exists = filter(lambda v: v['path'] == path, volumes)
|
|
if exists:
|
|
return exists[0]
|
|
_name = name
|
|
n = 2
|
|
while filter(lambda v: v['name'] == _name, volumes):
|
|
_name = "%s %d" % (name, n)
|
|
name = _name
|
|
conn, c = self._conn()
|
|
c.execute('INSERT INTO volume (site, name, path) VALUES (?, ?, ?)', (site, name, path))
|
|
conn.commit()
|
|
return {
|
|
'name': name, 'path': path
|
|
}
|
|
|
|
def rename_volume(self, site, name, new_name):
|
|
volumes = self.volumes(site)
|
|
_name = new_name
|
|
n = 2
|
|
while filter(lambda v: v['name'] == _name, volumes):
|
|
_name = "%s %d" % (new_name, n)
|
|
new_name = _name
|
|
conn, c = self._conn()
|
|
c.execute('UPDATE volume SET name = ? WHERE name = ? AND site = ?', (new_name, name, site))
|
|
conn.commit()
|
|
return {
|
|
'name': new_name,
|
|
}
|
|
|
|
def files(self, site, keys=[], order='path', limit=None):
|
|
conn, c = self._conn()
|
|
files = []
|
|
sql = 'SELECT %s FROM file ORDER BY %s '% (','.join(keys), order)
|
|
if limit:
|
|
sql += ' LIMIT %d, %d' %(limit[0], limit[1]-limit[0])
|
|
print sql
|
|
c.execute(sql)
|
|
for r in c:
|
|
f = {}
|
|
for i in range(len(r)):
|
|
f[keys[i]] = r[i]
|
|
files.append(f)
|
|
return files
|
|
|
|
def add_file(self, site, filename):
|
|
info = utils.avinfo(filename)
|
|
return info['oshash']
|
|
|
|
def cache_file(self, site, url, itemId, filename):
|
|
conn, c = self._conn()
|
|
filename, ext = filename.split('.')
|
|
resolution, part = filename.split('p')
|
|
print site, itemId, resolution, part, ext
|
|
c.execute('SELECT oshash FROM part WHERE site = ? AND id = ? AND part =?',
|
|
(site, itemId, part))
|
|
path = ''
|
|
for r in c:
|
|
oshash = r[0]
|
|
path = self.cache_path(oshash, '480p.webm')
|
|
break
|
|
|
|
if path and not os.path.exists(path):
|
|
path = ''
|
|
if not path:
|
|
#FIXME: oshash get oshash for part
|
|
path = self.cache_path(oshash, '480p.webm')
|
|
#FIXME: need to add cookies
|
|
ox.net.saveUrl(url, path)
|
|
t = (oshash, part, itemId, site)
|
|
c.execute('INSERT INTO part (oshash, part, id, site) VALUES (?, ?, ?, ?)', t)
|
|
return path
|
|
|
|
def get_file(self, site, itemId, filename):
|
|
conn, c = self._conn()
|
|
filename, ext = filename.split('.')
|
|
resolution, part = filename.split('p')
|
|
print site, itemId, resolution, part, ext
|
|
if len(itemId) == 16 and itemId.islower() and itemId.isalnum():
|
|
path = self.cache_path(itemId, '480p.webm')
|
|
else:
|
|
c.execute('SELECT oshash FROM part WHERE site = ? AND id = ? AND part =?',
|
|
(site, itemId, part))
|
|
path = ''
|
|
for r in c:
|
|
path = self.cache_path(r[0], '480p.webm')
|
|
break
|
|
|
|
if path and not os.path.exists(path):
|
|
path = ''
|
|
return path
|