openmedialibrary/oml/update.py

409 lines
14 KiB
Python

# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
from contextlib import closing
import base64
import hashlib
import json
import os
import tarfile
from threading import Thread
import urllib.request, urllib.error, urllib.parse
import shutil
import subprocess
import sys
import time
import OpenSSL.crypto
import ox
from oxtornado import actions
import settings
import db
import logging
logger = logging.getLogger(__name__)
ENCODING='base64'
def verify(release):
verified = False
value = []
for module in sorted(release['modules']):
value += [str('%s/%s' % (release['modules'][module]['version'], release['modules'][module]['sha1']))]
value = '\n'.join(value)
value = value.encode()
for digest in ('sha512', 'sha256', 'sha1'):
if 'signature_%s'%digest in release:
tls_sig = base64.b64decode(release['signature_%s'%digest].encode())
cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, settings.OML_UPDATE_CERT)
try:
OpenSSL.crypto.verify(cert, tls_sig, value, digest)
verified = True
except OpenSSL.crypto.Error:
print('invalid tls signature')
verified = False
break
if 'signature' in release and not verified:
import ed25519
vk = ed25519.VerifyingKey(settings.OML_UPDATE_KEY, encoding=ENCODING)
sig = release['signature'].encode()
try:
vk.verify(sig, value, encoding=ENCODING)
verified = True
except ed25519.BadSignatureError:
verified = False
return verified
def get(url, filename=None):
request = urllib.request.Request(url, headers={
'User-Agent': settings.USER_AGENT
})
with closing(urllib.request.urlopen(request)) as u:
if not filename:
data = u.read()
return data
else:
dirname = os.path.dirname(filename)
if dirname and not os.path.exists(dirname):
os.makedirs(dirname)
with open(filename, 'wb') as fd:
data = u.read(4096)
while data:
fd.write(data)
data = u.read(4096)
def check():
if settings.release:
release_data = get(settings.server.get('release_url',
'http://downloads.openmedialibrary.com/release.json'))
release = json.loads(release_data.decode('utf-8'))
old = current_version('openmedialibrary')
new = release['modules']['openmedialibrary']['version']
return verify(release) and old < new
return False
def current_version(module):
if 'modules' in settings.release \
and module in settings.release['modules'] \
and 'version' in settings.release['modules'][module]:
version = settings.release['modules'][module]['version']
else:
version = ''
return version
def get_latest_release():
release_data = get(settings.server.get('release_url'))
release = json.loads(release_data.decode('utf-8'))
if verify(release):
ox.makedirs(settings.updates_path)
with open(os.path.join(settings.updates_path, 'release.json'), 'wb') as fd:
fd.write(release_data)
return release
def download():
if not os.path.exists(os.path.join(settings.data_path, 'release.json')):
return True
release = get_latest_release()
if release:
ox.makedirs(settings.updates_path)
os.chdir(os.path.dirname(settings.base_dir))
current_files = {'release.json'}
for module in release['modules']:
if release['modules'][module]['version'] > current_version(module):
module_tar = os.path.join(settings.updates_path, release['modules'][module]['name'])
base_url = settings.server.get('release_url').rsplit('/', 1)[0]
url = '/'.join([base_url, release['modules'][module]['name']])
if not os.path.exists(module_tar):
logger.debug('download %s', os.path.basename(module_tar))
get(url, module_tar)
if ox.sha1sum(module_tar) != release['modules'][module]['sha1']:
logger.debug('invalid checksum %s', os.path.basename(module_tar))
os.unlink(module_tar)
return False
current_files.add(os.path.basename(module_tar))
for f in set(next(os.walk(settings.updates_path))[2])-current_files:
os.unlink(os.path.join(settings.updates_path, f))
return True
return True
def install(stop=True):
if not os.path.exists(os.path.join(settings.updates_path, 'release.json')):
return True
if not os.path.exists(os.path.join(settings.data_path, 'release.json')):
return True
with open(os.path.join(settings.updates_path, 'release.json')) as fd:
release = json.load(fd)
old_version = current_version('openmedialibrary')
new_version = release['modules']['openmedialibrary']['version']
if verify(release) and old_version < new_version:
os.chdir(os.path.dirname(settings.base_dir))
for module in release['modules']:
if release['modules'][module]['version'] > current_version(module):
module_tar = os.path.join(settings.updates_path, release['modules'][module]['name'])
if os.path.exists(module_tar) and ox.sha1sum(module_tar) == release['modules'][module]['sha1']:
#tar fails if old platform is moved before extract
new = '%s_new' % module
ox.makedirs(new)
os.chdir(new)
tar = tarfile.open(module_tar)
tar.extractall()
tar.close()
os.chdir(os.path.dirname(settings.base_dir))
module_old = '%s_old' % module
if os.path.exists(module):
shutil.move(module, module_old)
shutil.move(os.path.join(new, module), module)
if os.path.exists(module_old):
shutil.rmtree(module_old)
shutil.rmtree(new)
else:
if os.path.exists(module_tar):
os.unlink(module_tar)
return False
shutil.copy(os.path.join(settings.updates_path, 'release.json'), os.path.join(settings.data_path, 'release.json'))
if stop:
subprocess.call(['./ctl', 'stop'])
subprocess.call(['./ctl', 'postupdate', '-o', old_version, '-n', new_version])
upgrade_app()
return True
return True
def update_available():
db_version = settings.server.get('db_version', 0)
if db_version < settings.DB_VERSION:
return True
if not os.path.exists(os.path.join(settings.updates_path, 'release.json')):
return False
if not os.path.exists(os.path.join(settings.data_path, 'release.json')):
return False
with open(os.path.join(settings.updates_path, 'release.json')) as fd:
release = json.load(fd)
old_version = current_version('openmedialibrary')
new_version = release['modules']['openmedialibrary']['version']
return verify(release) and old_version < new_version
def restart_oml(update=False):
if update:
get_latest_release()
subprocess.Popen([os.path.join(settings.base_dir, 'ctl'), 'restart'],
close_fds=True, start_new_session=True)
def get_app_version(app):
plist = app + '/Contents/Info.plist'
if os.path.exists(plist):
cmd = ['defaults', 'read', plist, 'CFBundleShortVersionString']
return subprocess.check_output(cmd).strip()
def upgrade_app():
if sys.platform == 'darwin':
base = os.path.dirname(settings.base_dir)
bundled_app = os.path.join(base, 'platform/Darwin/Applications/Open Media Library.app')
app = '/Applications/Open Media Library.app'
version = get_app_version(app)
current_version = get_app_version(bundled_app)
if version and current_version and version != current_version:
try:
shutil.rmtree(app)
shutil.copytree(bundled_app, app)
except:
logger.debug('Failed to update Application', exc_info=1)
def getVersion(data):
'''
check if new version is available
'''
response = {
'current': settings.MINOR_VERSION,
'version': settings.MINOR_VERSION,
'upgrade': False,
}
if settings.MINOR_VERSION == 'git':
'''
cmd = ['git', 'rev-parse', '@']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, close_fds=True)
stdout, stderr = p.communicate()
current = stdout.strip()
cmd = ['git', 'ls-remote', 'origin', '-h', 'refs/heads/master']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, close_fds=True)
stdout, stderr = p.communicate()
new = stdout.strip()[:40]
response['update'] = len(new) == 40 and current != new
'''
response['update'] = False
else:
get_latest_release()
if not os.path.exists(os.path.join(settings.updates_path, 'release.json')):
return response
if not os.path.exists(os.path.join(settings.data_path, 'release.json')):
return response
with open(os.path.join(settings.updates_path, 'release.json')) as fd:
release = json.load(fd)
current = current_version('openmedialibrary')
response['current'] = current
new = release['modules']['openmedialibrary']['version']
response['version'] = new
response['update'] = current < new
return response
actions.register(getVersion, cache=False)
def restart(data):
'''
restart (and upgrade if upgrades are available)
'''
restart_oml(data.get('update'))
return {}
actions.register(restart, cache=False)
class Update(Thread):
_status = {
'reload': False,
'status': 'Updating Open Media Library...'
}
def __init__(self):
Thread.__init__(self)
self.daemon = True
self.start()
def status(self, status, reload=False):
from websocket import trigger_event
self._status = {
'reload': reload,
'status': status,
}
trigger_event('updatestatus', self._status)
def install(self):
while update_available():
self.status('Downloading...')
while not download():
self.status('Download failed, retrying...')
time.sleep(5)
self.status('Downloading...')
self.status('Installing...')
if not install(False):
self.status('Installation failed.')
return True
return False
def update_database(self):
db_version = settings.server.get('db_version', 0)
if db_version < settings.DB_VERSION:
self.status('Updating...')
if db_version < 3:
db_version = migrate_3()
if db_version < 4:
db_version = migrate_4()
if db_version < 5:
db_version = migrate_5()
if db_version < 6:
db_version = migrate_6()
settings.server['db_version'] = settings.DB_VERSION
def run(self):
self.status('Checking for updates...')
self.update_database()
if self.install():
restart_oml()
return
self.status('Relaunching...', True)
restart_oml()
def migrate_3():
with db.session():
import item.models
for i in item.models.Item.find({
'query': {
'conditions': [{
'key':'mediastate',
'value':'available',
'operator': '=='
}]
}
}):
if not i.files.all():
i.remove_file()
else:
f = i.files.all()[0]
if not 'pages' in i.info and 'pages' in f.info:
i.info['pages'] = f.info['pages']
i.save()
return 3
def migrate_4():
with db.session() as session:
import item.models
from meta.utils import to_isbn13
for i in item.models.Item.query:
update = False
if 'isbn' in i.meta and isinstance(i.meta['isbn'], list):
isbns = [to_isbn13(isbn) for isbn in i.meta['isbn']]
isbns = [isbn for isbn in isbns if isbn]
if isbns:
i.meta['isbn'] = isbns[0]
if 'isbn' in i.info:
i.info['isbn'] = i.meta['isbn']
else:
del i.meta['isbn']
if 'isbn' in i.info:
del i.info['isbn']
update = True
if 'isbn' in i.meta and not i.meta['isbn']:
del i.meta['isbn']
update = True
if update:
session.add(i)
session.commit()
return 4
def migrate_5():
db.run_sql([
'DROP INDEX IF EXISTS user_metadata_index',
'CREATE UNIQUE INDEX user_metadata_index ON user_metadata(item_id, user_id)',
'UPDATE sort SET sharemetadata = 0',
]),
with db.session() as session:
import user.models
for m in user.models.Metadata.query:
data_hash = m.get_hash()
if m.data_hash != data_hash:
m.data_hash = data_hash
session.add(m)
session.commit()
import item.models
for i in item.models.Item.query:
update = False
if '_from' in i.info:
del i.info['_from']
update = True
if update:
session.add(i)
session.commit()
for i in item.models.Item.query:
i.sync_metadata()
return 5
def migrate_6():
with db.session() as session:
import item.models
for s in item.models.Sort.query.filter_by(author=''):
s.item.update_sort()
for s in item.models.Sort.query.filter_by(publisher=''):
s.item.update_sort()
for s in item.models.Sort.query.filter_by(language=''):
s.item.update_sort()
for s in item.models.Sort.query.filter_by(place=''):
s.item.update_sort()
for s in item.models.Sort.query.filter_by(isbn=''):
s.item.update_sort()
for s in item.models.Sort.query.filter_by(date=''):
s.item.update_sort()
session.commit()
return 6