574 lines
20 KiB
Python
574 lines
20 KiB
Python
# -*- coding: utf-8 -*-
|
|
# vi:si:et:sw=4:sts=4:ts=4
|
|
|
|
|
|
from contextlib import closing
|
|
import base64
|
|
import json
|
|
import os
|
|
import tarfile
|
|
from threading import Thread
|
|
import urllib.request, urllib.error, urllib.parse
|
|
import shutil
|
|
import subprocess
|
|
import sys
|
|
import time
|
|
|
|
import OpenSSL.crypto
|
|
import ox
|
|
from oxtornado import actions
|
|
|
|
import settings
|
|
import utils
|
|
import db
|
|
|
|
import logging
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
ENCODING='base64'
|
|
|
|
def verify(release):
|
|
verified = False
|
|
value = []
|
|
for module in sorted(release['modules']):
|
|
value += [str('%s/%s' % (release['modules'][module]['version'], release['modules'][module]['sha1']))]
|
|
value = '\n'.join(value)
|
|
value = value.encode()
|
|
for digest in ('sha512', 'sha256', 'sha1'):
|
|
if 'signature_%s'%digest in release:
|
|
tls_sig = base64.b64decode(release['signature_%s'%digest].encode())
|
|
cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, settings.OML_UPDATE_CERT)
|
|
try:
|
|
OpenSSL.crypto.verify(cert, tls_sig, value, digest)
|
|
verified = True
|
|
except OpenSSL.crypto.Error:
|
|
logger.debug('invalid tls signature')
|
|
verified = False
|
|
break
|
|
if 'signature' in release and not verified:
|
|
import ed25519
|
|
vk = ed25519.VerifyingKey(settings.OML_UPDATE_KEY, encoding=ENCODING)
|
|
sig = release['signature'].encode()
|
|
try:
|
|
vk.verify(sig, value, encoding=ENCODING)
|
|
verified = True
|
|
except ed25519.BadSignatureError:
|
|
verified = False
|
|
return verified
|
|
|
|
def get(url, filename=None):
|
|
request = urllib.request.Request(url, headers={
|
|
'User-Agent': settings.USER_AGENT
|
|
})
|
|
with closing(urllib.request.urlopen(request)) as u:
|
|
if not filename:
|
|
data = u.read()
|
|
return data
|
|
else:
|
|
dirname = os.path.dirname(filename)
|
|
if dirname and not os.path.exists(dirname):
|
|
os.makedirs(dirname)
|
|
with open(filename, 'wb') as fd:
|
|
data = u.read(4096)
|
|
while data:
|
|
fd.write(data)
|
|
data = u.read(4096)
|
|
|
|
def check():
|
|
if settings.release:
|
|
release_data = get(settings.server.get('release_url',
|
|
'http://downloads.openmedialibrary.com/release.json'))
|
|
release = json.loads(release_data.decode('utf-8'))
|
|
old = current_version('openmedialibrary')
|
|
new = release['modules']['openmedialibrary']['version']
|
|
return verify(release) and old < new
|
|
return False
|
|
|
|
def current_version(module):
|
|
if 'modules' in settings.release \
|
|
and module in settings.release['modules'] \
|
|
and 'version' in settings.release['modules'][module]:
|
|
version = settings.release['modules'][module]['version']
|
|
else:
|
|
version = ''
|
|
return version
|
|
|
|
def get_latest_release():
|
|
try:
|
|
release_data = get(settings.server.get('release_url'))
|
|
release = json.loads(release_data.decode('utf-8'))
|
|
if verify(release):
|
|
ox.makedirs(settings.updates_path)
|
|
with open(os.path.join(settings.updates_path, 'release.json'), 'wb') as fd:
|
|
fd.write(release_data)
|
|
return release
|
|
except:
|
|
logger.debug('failed get latest release')
|
|
|
|
def get_platform():
|
|
name = sys.platform
|
|
if name.startswith('darwin'):
|
|
name = 'darwin64'
|
|
elif name.startswith('linux'):
|
|
import platform
|
|
if platform.architecture()[0] == '64bit':
|
|
name = 'linux64'
|
|
else:
|
|
name = 'linux32'
|
|
return name
|
|
|
|
def download():
|
|
if not os.path.exists(os.path.join(settings.data_path, 'release.json')):
|
|
return True
|
|
release = get_latest_release()
|
|
platform = get_platform()
|
|
if release:
|
|
ox.makedirs(settings.updates_path)
|
|
os.chdir(os.path.dirname(settings.base_dir))
|
|
current_files = {'release.json'}
|
|
for module in release['modules']:
|
|
module_tar = os.path.join(settings.updates_path, release['modules'][module]['name'])
|
|
if release['modules'][module].get('platform', platform) == platform and \
|
|
release['modules'][module]['version'] > current_version(module):
|
|
base_url = settings.server.get('release_url').rsplit('/', 1)[0]
|
|
url = '/'.join([base_url, release['modules'][module]['name']])
|
|
if not os.path.exists(module_tar):
|
|
logger.debug('download %s', os.path.basename(module_tar))
|
|
get(url, module_tar)
|
|
if ox.sha1sum(module_tar) != release['modules'][module]['sha1']:
|
|
logger.debug('invalid checksum %s', os.path.basename(module_tar))
|
|
os.unlink(module_tar)
|
|
return False
|
|
current_files.add(os.path.basename(module_tar))
|
|
for f in set(next(os.walk(settings.updates_path))[2])-current_files:
|
|
os.unlink(os.path.join(settings.updates_path, f))
|
|
return True
|
|
return True
|
|
|
|
def install():
|
|
if not os.path.exists(os.path.join(settings.updates_path, 'release.json')):
|
|
return True
|
|
if not os.path.exists(os.path.join(settings.data_path, 'release.json')):
|
|
return True
|
|
with open(os.path.join(settings.updates_path, 'release.json')) as fd:
|
|
release = json.load(fd)
|
|
old_version = current_version('openmedialibrary')
|
|
new_version = release['modules']['openmedialibrary']['version']
|
|
if verify(release) and old_version < new_version:
|
|
base = os.path.dirname(settings.base_dir)
|
|
os.chdir(base)
|
|
platform = get_platform()
|
|
for module in release['modules']:
|
|
if release['modules'][module].get('platform', platform) == platform and \
|
|
release['modules'][module]['version'] > current_version(module):
|
|
module_tar = os.path.join(settings.updates_path, release['modules'][module]['name'])
|
|
if os.path.exists(module_tar) and ox.sha1sum(module_tar) == release['modules'][module]['sha1']:
|
|
#tar fails if old platform is moved before extract
|
|
new = '%s_new' % module
|
|
ox.makedirs(new)
|
|
os.chdir(new)
|
|
tar = tarfile.open(module_tar)
|
|
tar.extractall()
|
|
tar.close()
|
|
os.chdir(base)
|
|
module_old = '%s_old' % module
|
|
if os.path.exists(module_old):
|
|
rmtree(module_old)
|
|
if os.path.exists(module):
|
|
move(module, module_old)
|
|
move(os.path.join(new, module), module)
|
|
if platform != 'win32' and os.path.exists(module_old):
|
|
rmtree(module_old)
|
|
rmtree(new)
|
|
else:
|
|
if os.path.exists(module_tar):
|
|
os.unlink(module_tar)
|
|
return False
|
|
shutil.copy(os.path.join(settings.updates_path, 'release.json'), os.path.join(settings.data_path, 'release.json'))
|
|
upgrade_app()
|
|
# FIXME: still needed?
|
|
if old_version < '20160112-651-de984a3':
|
|
subprocess.call(['./ctl', 'postupdate', '-o', old_version, '-n', new_version])
|
|
return True
|
|
return True
|
|
|
|
def move(src, dst):
|
|
try:
|
|
shutil.move(src, dst)
|
|
except:
|
|
logger.debug('failed to move %s to %s', src, dst)
|
|
raise
|
|
|
|
def rmtree(path):
|
|
try:
|
|
shutil.rmtree(path)
|
|
except:
|
|
logger.debug('failed to remove %s', path)
|
|
raise
|
|
|
|
def update_available():
|
|
db_version = settings.server.get('db_version', 0)
|
|
if db_version < settings.DB_VERSION:
|
|
return True
|
|
if not os.path.exists(os.path.join(settings.updates_path, 'release.json')):
|
|
return False
|
|
if not os.path.exists(os.path.join(settings.data_path, 'release.json')):
|
|
return False
|
|
with open(os.path.join(settings.updates_path, 'release.json')) as fd:
|
|
release = json.load(fd)
|
|
old_version = current_version('openmedialibrary')
|
|
new_version = release['modules']['openmedialibrary']['version']
|
|
return verify(release) and old_version < new_version
|
|
|
|
def restart_oml(update=False):
|
|
if update:
|
|
get_latest_release()
|
|
utils.ctl('restart')
|
|
|
|
def get_app_version(app):
|
|
plist = app + '/Contents/Info.plist'
|
|
if os.path.exists(plist):
|
|
cmd = ['defaults', 'read', plist, 'CFBundleShortVersionString']
|
|
return subprocess.check_output(cmd).strip()
|
|
|
|
def upgrade_app():
|
|
if sys.platform == 'darwin':
|
|
base = os.path.dirname(settings.base_dir)
|
|
bundled_app = os.path.join(base, 'platform_darwin64/Applications/Open Media Library.app')
|
|
app = '/Applications/Open Media Library.app'
|
|
version = get_app_version(app)
|
|
current_version = get_app_version(bundled_app)
|
|
if version and current_version and version != current_version:
|
|
try:
|
|
shutil.rmtree(app)
|
|
shutil.copytree(bundled_app, app)
|
|
except:
|
|
logger.debug('Failed to update Application', exc_info=True)
|
|
|
|
def getVersion(data):
|
|
'''
|
|
check if new version is available
|
|
'''
|
|
response = {
|
|
'current': settings.MINOR_VERSION,
|
|
'version': settings.MINOR_VERSION,
|
|
'upgrade': False,
|
|
}
|
|
if settings.MINOR_VERSION == 'git':
|
|
'''
|
|
cmd = ['git', 'rev-parse', '@']
|
|
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, close_fds=True)
|
|
stdout, stderr = p.communicate()
|
|
current = stdout.strip()
|
|
cmd = ['git', 'ls-remote', 'origin', '-h', 'refs/heads/master']
|
|
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, close_fds=True)
|
|
stdout, stderr = p.communicate()
|
|
new = stdout.strip()[:40]
|
|
response['update'] = len(new) == 40 and current != new
|
|
'''
|
|
response['update'] = False
|
|
else:
|
|
get_latest_release()
|
|
if not os.path.exists(os.path.join(settings.updates_path, 'release.json')):
|
|
return response
|
|
if not os.path.exists(os.path.join(settings.data_path, 'release.json')):
|
|
return response
|
|
with open(os.path.join(settings.updates_path, 'release.json')) as fd:
|
|
release = json.load(fd)
|
|
current = current_version('openmedialibrary')
|
|
response['current'] = current
|
|
new = release['modules']['openmedialibrary']['version']
|
|
response['version'] = new
|
|
response['update'] = current < new
|
|
return response
|
|
actions.register(getVersion, cache=False)
|
|
|
|
def restart(data):
|
|
'''
|
|
restart (and upgrade if upgrades are available)
|
|
'''
|
|
restart_oml(data.get('update'))
|
|
return {}
|
|
actions.register(restart, cache=False)
|
|
|
|
|
|
class Update(Thread):
|
|
_status = {
|
|
'reload': False,
|
|
'status': 'Updating Open Media Library...'
|
|
}
|
|
|
|
def __init__(self):
|
|
Thread.__init__(self)
|
|
self.daemon = True
|
|
self.start()
|
|
|
|
def status(self, status, reload=False):
|
|
from websocket import trigger_event
|
|
self._status = {
|
|
'reload': reload,
|
|
'status': status,
|
|
}
|
|
trigger_event('updatestatus', self._status)
|
|
|
|
def install(self):
|
|
while update_available():
|
|
self.status('Downloading...')
|
|
while not download():
|
|
self.status('Download failed, retrying...')
|
|
time.sleep(5)
|
|
self.status('Downloading...')
|
|
self.status('Installing...')
|
|
if not install():
|
|
self.status('Installation failed.')
|
|
return True
|
|
return False
|
|
|
|
def update_database(self):
|
|
db_version = settings.server.get('db_version', 0)
|
|
if db_version < settings.DB_VERSION:
|
|
self.status('Updating...')
|
|
if db_version < 3:
|
|
db_version = migrate_3()
|
|
if db_version < 4:
|
|
db_version = migrate_4()
|
|
if db_version < 5:
|
|
db_version = migrate_5()
|
|
if db_version < 6:
|
|
db_version = migrate_6()
|
|
if db_version < 7:
|
|
db_version = migrate_7()
|
|
if db_version < 9:
|
|
db_version = migrate_8()
|
|
if db_version < 10:
|
|
db_version = migrate_10()
|
|
if db_version < 11:
|
|
db_version = migrate_11()
|
|
if db_version < 12:
|
|
db_version = migrate_12()
|
|
settings.server['db_version'] = settings.DB_VERSION
|
|
|
|
def run(self):
|
|
self.status('Checking for updates...')
|
|
self.update_database()
|
|
if self.install():
|
|
restart_oml()
|
|
return
|
|
self.status('Relaunching...', True)
|
|
restart_oml()
|
|
|
|
|
|
def migrate_3():
|
|
with db.session():
|
|
import item.models
|
|
for i in item.models.Item.find({
|
|
'query': {
|
|
'conditions': [{
|
|
'key':'mediastate',
|
|
'value':'available',
|
|
'operator': '=='
|
|
}]
|
|
}
|
|
}):
|
|
if not i.files.all():
|
|
i.remove_file()
|
|
else:
|
|
f = i.files.all()[0]
|
|
if not 'pages' in i.info and 'pages' in f.info:
|
|
i.info['pages'] = f.info['pages']
|
|
i.save()
|
|
return 3
|
|
|
|
def migrate_4():
|
|
with db.session() as session:
|
|
import item.models
|
|
from meta.utils import to_isbn13
|
|
for i in item.models.Item.query:
|
|
update = False
|
|
if 'isbn' in i.meta and isinstance(i.meta['isbn'], list):
|
|
isbns = [to_isbn13(isbn) for isbn in i.meta['isbn']]
|
|
isbns = [isbn for isbn in isbns if isbn]
|
|
if isbns:
|
|
i.meta['isbn'] = isbns[0]
|
|
if 'isbn' in i.info:
|
|
i.info['isbn'] = i.meta['isbn']
|
|
else:
|
|
del i.meta['isbn']
|
|
if 'isbn' in i.info:
|
|
del i.info['isbn']
|
|
update = True
|
|
if 'isbn' in i.meta and not i.meta['isbn']:
|
|
del i.meta['isbn']
|
|
update = True
|
|
if update:
|
|
session.add(i)
|
|
session.commit()
|
|
return 4
|
|
|
|
def migrate_5():
|
|
db.run_sql([
|
|
'DROP INDEX IF EXISTS user_metadata_index',
|
|
'CREATE UNIQUE INDEX user_metadata_index ON user_metadata(item_id, user_id)',
|
|
'UPDATE sort SET sharemetadata = 0',
|
|
]),
|
|
with db.session() as session:
|
|
import user.models
|
|
for m in user.models.Metadata.query:
|
|
data_hash = m.get_hash()
|
|
if m.data_hash != data_hash:
|
|
m.data_hash = data_hash
|
|
session.add(m)
|
|
session.commit()
|
|
import item.models
|
|
for i in item.models.Item.query:
|
|
update = False
|
|
if '_from' in i.info:
|
|
del i.info['_from']
|
|
update = True
|
|
if update:
|
|
session.add(i)
|
|
session.commit()
|
|
for i in item.models.Item.query:
|
|
i.sync_metadata()
|
|
return 5
|
|
|
|
def migrate_6():
|
|
with db.session() as session:
|
|
import item.models
|
|
for s in item.models.Sort.query.filter_by(author=''):
|
|
s.item.update_sort()
|
|
for s in item.models.Sort.query.filter_by(publisher=''):
|
|
s.item.update_sort()
|
|
for s in item.models.Sort.query.filter_by(language=''):
|
|
s.item.update_sort()
|
|
for s in item.models.Sort.query.filter_by(place=''):
|
|
s.item.update_sort()
|
|
for s in item.models.Sort.query.filter_by(isbn=''):
|
|
s.item.update_sort()
|
|
for s in item.models.Sort.query.filter_by(date=''):
|
|
s.item.update_sort()
|
|
session.commit()
|
|
return 6
|
|
|
|
def migrate_7():
|
|
with db.session() as session:
|
|
import changelog
|
|
for c in changelog.Changelog.query:
|
|
if 'editmeta' in c.data or 'resetmeta' in c.data:
|
|
session.delete(c)
|
|
session.commit()
|
|
db.run_sql('DROP TABLE IF EXISTS metadata')
|
|
db.run_sql('DROP TABLE IF EXISTS scrape')
|
|
db.run_sql('VACUUM')
|
|
return 7
|
|
|
|
def migrate_8():
|
|
for key in ('directory_service', 'meta_service', 'local_lookup', 'cert'):
|
|
if key in settings.server:
|
|
del settings.server[key]
|
|
list_cache = os.path.join(settings.data_path, 'list_cache.json')
|
|
if os.path.exists(list_cache):
|
|
os.unlink(list_cache)
|
|
with db.session() as session:
|
|
import item.models
|
|
for i in item.models.Item.query:
|
|
delta = set(i.meta)-set(i.meta_keys)
|
|
if delta:
|
|
for key in delta:
|
|
del i.meta[key]
|
|
session.add(i)
|
|
session.commit()
|
|
import changelog
|
|
import user.models
|
|
changelog.Changelog.query.delete()
|
|
u = user.models.User.get(settings.USER_ID)
|
|
u.rebuild_changelog()
|
|
for peer in user.models.User.query:
|
|
if peer.id != u.id:
|
|
if len(peer.id) != 16:
|
|
session.delete(peer)
|
|
session.commit()
|
|
return 8
|
|
|
|
def migrate_10():
|
|
with db.session() as session:
|
|
from item.models import Item, Find
|
|
from utils import get_by_id
|
|
from item.person import get_sort_name
|
|
import unicodedata
|
|
sort_names = {}
|
|
updates = {}
|
|
for f in Find.query.filter(Find.key.in_(Item.filter_keys)):
|
|
sort_type = get_by_id(settings.config['itemKeys'], f.key).get('sortType')
|
|
if sort_type == 'person':
|
|
if f.value in sort_names:
|
|
sortvalue = sort_names[f.value]
|
|
else:
|
|
sortvalue = sort_names[f.value] = get_sort_name(f.value)
|
|
else:
|
|
sortvalue = f.value
|
|
if sortvalue:
|
|
sortvalue = ox.sort_string(unicodedata.normalize('NFKD', sortvalue)).lower()
|
|
if not f.key in updates:
|
|
updates[f.key] = {}
|
|
updates[f.key][f.value] = sortvalue
|
|
for key in updates:
|
|
for value in updates[key]:
|
|
Find.query.filter_by(key=key, value=value).update({'sortvalue': updates[key][value]})
|
|
session.commit()
|
|
return 10
|
|
|
|
def migrate_11():
|
|
with db.session() as session:
|
|
from user.models import User, Metadata, List
|
|
from changelog import Changelog
|
|
import utils
|
|
for u in User.query.filter_by(peered=True):
|
|
peer = utils.get_peer(u.id)
|
|
last = Changelog.query.filter_by(user_id=u.id).order_by('-revision').first()
|
|
if last:
|
|
peer.info['revision'] = last.revision
|
|
listorder = []
|
|
for l in List.query.filter_by(user_id=u.id).order_by('index_'):
|
|
if l.name:
|
|
peer.info['lists'][l.name] = [i.id for i in l.get_items()]
|
|
listorder.append(l.name)
|
|
if 'listorder' not in peer.info:
|
|
peer.info['listorder'] = listorder
|
|
for m in Metadata.query.filter_by(user_id=u.id):
|
|
peer.library[m.item_id] = {
|
|
'meta': dict(m.data),
|
|
'meta_hash': m.data_hash,
|
|
'modified': m.modified,
|
|
}
|
|
peer.library.commit()
|
|
peer.sync_info()
|
|
peer.sync_db()
|
|
Changelog.query.filter_by(user_id=u.id).delete()
|
|
Metadata.query.filter_by(user_id=u.id).delete()
|
|
session.commit()
|
|
if db.table_exists('transfer'):
|
|
import state
|
|
import downloads
|
|
state.online = False
|
|
state.downloads = downloads.Downloads()
|
|
r = state.db.session.execute('SELECT item_id, added, progress FROM transfer')
|
|
for t in r:
|
|
item_id, added, progress = t
|
|
if added:
|
|
state.downloads.transfers[item_id] = {
|
|
'added': added,
|
|
'progress': progress
|
|
}
|
|
state.db.session.commit()
|
|
state.downloads.transfers.commit()
|
|
state.downloads = None
|
|
return 11
|
|
|
|
def migrate_12():
|
|
db.run_sql([
|
|
'DROP TABLE IF EXISTS transfer'
|
|
])
|
|
return 12
|